-
Notifications
You must be signed in to change notification settings - Fork 0
/
gen_single_channel_acc_trigs_segdb.py
150 lines (115 loc) · 5.41 KB
/
gen_single_channel_acc_trigs_segdb.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
import numpy as np
from glue.ligolw import utils, ligolw, lsctables
from glue.lal import LIGOTimeGPS
from glue import lal
import os
from optparse import OptionParser
from gwpy.timeseries import TimeSeries
from glue import datafind
import pysegdb
def read_command_line():
parser = OptionParser(
version = "Name: Overflow Trigger Generator",
usage = "%prog --gps-start-time --gps-end-time --channel --ifo --outdir --seg-list",
description = "Makes triggers for overflows from a single channel"
)
parser.add_option("-s", "--gps-start-time", help="GPS start time",type='float')
parser.add_option("-e", "--gps-end-time", help="GPS end time",type='float')
parser.add_option("-c", "--channel", metavar = "channel", help="Channel name.")
parser.add_option("-i", "--ifo", metavar = "ifo", help="IFO, L or H")
parser.add_option("-o", "--outdir", metavar = "outdir", help = "base output directory")
parser.add_option("-p", "--padding", metavar = "padding", default = "0", type = "float", help = "padding at end of science segment to avoid lock loss saturations")
parser.add_option("-f","--science-flag", metavar="science_flag", type="str", help="Flag used to determine science mode")
args, others = parser.parse_args()
gps_start_time = args.gps_start_time
gps_end_time = args.gps_end_time
DQFlag = args.science_flag
channel = args.channel
ifo = args.ifo
outdir = args.outdir
padding = args.padding
#sanitize IFO input
if ifo == 'L1':
ifo = 'L'
if ifo == 'H1':
ifo = 'H'
if ifo == 'L':
frames = 'L1_R'
else:
frames = 'H1_R'
return channel, ifo, frames, outdir, gps_start_time, gps_end_time, padding, DQFlag
# functions to find the start of overflow segments
# if x and y are equal in value and z jumps, we'll pull off the timestamp attached to z
# and record that as the beginning of a segment
def cumu_seg_test(x,y,z):
if (x == y < z):
return True
else:
return False
def generate_triggers(channel,gps_start,gps_end,ifo,frames,outdir,segments,padding):
# generate frame cache and connection
pad_gps_end = gps_end - padding
print "Processing segment: " + str(gps_start) + " - " + str(pad_gps_end)
connection = datafind.GWDataFindHTTPConnection()
cache = connection.find_frame_urls(ifo, frames, int(gps_start), int(gps_end), urltype='file')
data=TimeSeries.read(cache, channel, start=int(gps_start), end=int(pad_gps_end))
time_vec=data.times.value
'''
We are interested in times when the channels switch from a normal state to an overflowing
state or vice versa. We're not checking the first and last data point of each set because it's not
possible to tell whether or not a channel has just started overflowing at our first data
point or if it had been overflowing beforehand.
This big loop will test every data point (that isn't an endpoint) and record it in the
trigger vector if it's an overflow transition.
'''
# trig_segs = seg.segmentlist()
#
# for j in np.arange(np.size(data,0)):
# if (0 < j < (np.size(data,0) - 1)):
# if cumu_seg_test(data[j-1],data[j],data[j+1]):
# trig_segs |= seg.segmentlist([seg.segment(time_vec[j+1],time_vec[j+1]+1)])
#
# trig_segs = trig_segs.coalesce()
trigger_vec = []
for j in np.arange(np.size(data,0)):
if (0 < j < (np.size(data,0) - 1)):
if cumu_seg_test(data[j-1],data[j],data[j+1]):
trigger_vec.append(time_vec[j+1])
if (np.size(trigger_vec) == 0):
print "No triggers found for " + str(channel)
return
else:
print "Found triggers for " + str(channel)
# map triggers into float type and then convert them all into LIGOTimeGPS notation
trig_times = map(LIGOTimeGPS,map(float,trigger_vec))
# create mocked up frequency and SNR vectors to fill in XML tables
freqs = np.empty(np.size(trigger_vec))
freqs.fill(100)
snrs = np.empty(np.size(trigger_vec))
snrs.fill(10)
sngl_burst_table_up = lsctables.New(lsctables.SnglBurstTable, ["peak_time", "peak_time_ns","peak_frequency","snr"])
for t,f,s in zip(trig_times, freqs, snrs):
row = sngl_burst_table_up.RowType()
row.set_peak(t)
row.peak_frequency = f
row.snr = s
sngl_burst_table_up.append(row)
xmldoc_up = ligolw.Document()
xmldoc_up.appendChild(ligolw.LIGO_LW())
xmldoc_up.childNodes[0].appendChild(sngl_burst_table_up)
directory_up = (outdir + '/' + channel[:2] + "/" +
channel[3:] + "/" + str(gps_start)[:5] + "/")
if not os.path.exists(directory_up):
os.makedirs(directory_up)
utils.write_filename(xmldoc_up, directory_up + channel[:2] + "-" + channel[3:6] +
"_" + channel[7:] + "_ADC-" + str(gps_start) + "-" + str(gps_end - gps_start) +
".xml.gz", gz=True)
if __name__=="__main__":
read_command_line()
channel,ifo,frames,outdir,gps_start_time,gps_end_time,padding,DQFlag = read_command_line()
segments = pysegdb.get_active_segments(ifo+'1',gps_start_time,(gps_end_time - gps_start_time),DQFlag)
for entry in segments:
if (entry[1] - entry[0] > padding):
generate_triggers(channel,entry[0],entry[1],ifo,frames,outdir,segments,padding)
else:
print "Science segment " + str(entry[0]) + " - " + str(entry[1]) + " was shorter than padding window."