52 from collections
import deque
58 from scipy
import random
63 from pysqlite2
import dbapi2
as sqlite3
77 gobject.threads_init()
83 from ligo
import gracedb
85 print >>sys.stderr,
"warning: gracedb import failed, program will crash if gracedb uploads are attempted"
87 from glue
import iterutils
88 from glue
import segments
89 from glue.ligolw
import ligolw
90 from glue.ligolw
import dbtables
91 from glue.ligolw
import ilwd
92 from glue.ligolw
import lsctables
93 from glue.ligolw
import array
as ligolw_array
94 from glue.ligolw
import param
as ligolw_param
95 from glue.ligolw
import utils
as ligolw_utils
96 from glue.ligolw.utils
import ligolw_sqlite
97 from glue.ligolw.utils
import ligolw_add
98 from glue.ligolw.utils
import process
as ligolw_process
99 from glue.ligolw.utils
import search_summary
as ligolw_search_summary
100 from glue.ligolw.utils
import segments
as ligolw_segments
101 from glue.ligolw.utils
import time_slide
as ligolw_time_slide
103 from pylal
import rate
104 from pylal.datatypes
import LALUnit
105 from pylal.datatypes
import LIGOTimeGPS
106 from pylal.datatypes
import REAL8FrequencySeries
107 from pylal.xlal.datatypes.snglinspiraltable
import from_buffer
as sngl_inspirals_from_buffer
109 from gstlal
import bottle
110 from gstlal
import reference_psd
111 from gstlal
import streamthinca
112 from gstlal
import svd_bank
113 from gstlal
import cbc_template_iir
114 from gstlal
import far
116 lsctables.LIGOTimeGPS = LIGOTimeGPS
128 def message_new_checkpoint(src, timestamp = None):
129 s = gst.Structure(
"CHECKPOINT")
130 s.set_value(
"timestamp", timestamp)
131 return gst.message_new_application(src, s)
134 def channel_dict_from_channel_list(channel_list, channel_dict = {"H1" :
"LSC-STRAIN",
"H2" :
"LSC-STRAIN",
"L1" :
"LSC-STRAIN",
"V1" :
"LSC-STRAIN"}):
136 given a list of channels like this ["H1=LSC-STRAIN",
137 H2="SOMETHING-ELSE"] produce a dictionary keyed by ifo of channel
138 names. The default values are LSC-STRAIN for all detectors
141 for channel
in channel_list:
142 ifo = channel.split(
"=")[0]
143 chan =
"".join(channel.split(
"=")[1:])
144 channel_dict[ifo] = chan
149 def pipeline_channel_list_from_channel_dict(channel_dict, opt = "channel-name"):
151 produce a string of channel name arguments suitable for a pipeline.py
152 program that doesn't technically allow multiple options. For example
153 --channel-name=H1=LSC-STRAIN --channel-name=H2=LSC-STRAIN
157 for i, ifo
in enumerate(channel_dict):
159 outstr +=
"%s=%s " % (ifo, channel_dict[ifo])
161 outstr +=
"--%s=%s=%s " % (opt, ifo, channel_dict[ifo])
166 def state_vector_on_off_dict_from_bit_lists(on_bit_list, off_bit_list, state_vector_on_off_dict = {"H1" : [0x7, 0x160],
"L1" : [0x7, 0x160],
"V1" : [0x67, 0x100]}):
170 for line
in on_bit_list:
171 ifo = line.split(
"=")[0]
172 bits =
"".join(line.split(
"=")[1:])
174 state_vector_on_off_dict[ifo][0] = int(bits)
176 state_vector_on_off_dict[ifo][0] = int(bits, 16)
178 for line
in off_bit_list:
179 ifo = line.split(
"=")[0]
180 bits =
"".join(line.split(
"=")[1:])
182 state_vector_on_off_dict[ifo][1] = int(bits)
184 state_vector_on_off_dict[ifo][1] = int(bits, 16)
186 return state_vector_on_off_dict
189 def state_vector_on_off_list_from_bits_dict(bit_dict):
195 for i, ifo
in enumerate(bit_dict):
197 onstr +=
"%s=%s " % (ifo, bit_dict[ifo][0])
198 offstr +=
"%s=%s " % (ifo, bit_dict[ifo][1])
200 onstr +=
"--state-vector-on-bits=%s=%s " % (ifo, bit_dict[ifo][0])
201 offstr +=
"--state-vector-off-bits=%s=%s " % (ifo, bit_dict[ifo][1])
206 def parse_svdbank_string(bank_string):
208 parses strings of form
210 H1:bank1.xml,H2:bank2.xml,L1:bank3.xml
212 into a dictionary of lists of bank files.
215 if bank_string
is None:
217 for b
in bank_string.split(
','):
218 ifo, bank = b.split(
':')
220 raise ValueError(
"Only one svd bank per instrument should be given")
225 def parse_iirbank_string(bank_string):
227 parses strings of form
229 H1:bank1.xml,H2:bank2.xml,L1:bank3.xml,H2:bank4.xml,...
231 into a dictionary of lists of bank files.
234 if bank_string
is None:
236 for b
in bank_string.split(
','):
237 ifo, bank = b.split(
':')
238 out.setdefault(ifo, []).append(bank)
242 def parse_bank_files(svd_banks, verbose, snr_threshold = None):
244 given a dictionary of lists of svd template bank file names parse them
245 into a dictionary of bank classes
250 for instrument, filename
in svd_banks.items():
251 for n, bank
in enumerate(
svd_bank.read_banks(filename, contenthandler = LIGOLWContentHandler, verbose = verbose)):
254 bank.template_bank_filename = tempfile.NamedTemporaryFile(suffix =
".gz", delete =
False).name
255 xmldoc = ligolw.Document()
257 xmldoc.appendChild(ligolw.LIGO_LW()).appendChild(bank.sngl_inspiral_table.copy()).extend(bank.sngl_inspiral_table)
258 ligolw_utils.write_filename(xmldoc, bank.template_bank_filename, gz =
True, verbose = verbose)
260 bank.logname =
"%sbank%d" % (instrument, n)
261 banks.setdefault(instrument, []).append(bank)
262 if snr_threshold
is not None:
263 bank.snr_threshold = snr_threshold
267 raise ValueError(
"Could not parse bank files into valid bank dictionary.\n\t- Perhaps you are using out-of-date svd bank files? Please ensure that they were generated with the same code version as the parsing code")
270 def parse_iirbank_files(iir_banks, verbose, snr_threshold = 4.0):
272 given a dictionary of lists of iir template bank file names parse them
273 into a dictionary of bank classes
278 for instrument, files
in iir_banks.items():
279 for n, filename
in enumerate(files):
283 bank.template_bank_filename = filename
284 bank.logname =
"%sbank%d" % (instrument,n)
285 banks.setdefault(instrument,[]).append(bank)
290 def subdir_from_T050017_filename(fname):
291 path = str(fname.split(
"-")[2])[:5]
310 ligolw_array.use_in(LIGOLWContentHandler)
311 ligolw_param.use_in(LIGOLWContentHandler)
312 lsctables.use_in(LIGOLWContentHandler)
329 def snr_distribution(size, startsnr):
331 This produces a power law distribution in snr of size size starting at startsnr
333 return startsnr * random.power(3, size)**-1
336 def noncentrality(snrs, prefactor):
338 This produces a set of noncentrality parameters that scale with snr^2 according to the prefactor
340 return prefactor * random.rand(len(snrs)) * snrs**2
344 def chisq_distribution(df, non_centralities, size):
346 This produces a set of noncentral chisq values of size size, with degrees of freedom given by df
348 out = numpy.empty((len(non_centralities) * size,))
349 for i, nc
in enumerate(non_centralities):
350 out[i*size:(i+1)*size] = random.noncentral_chisquare(df, nc, size)
364 sngl_inspiral_columns = (
"process_id",
"ifo",
"end_time",
"end_time_ns",
"eff_distance",
"coa_phase",
"mass1",
"mass2",
"snr",
"chisq",
"chisq_dof",
"bank_chisq",
"bank_chisq_dof",
"sigmasq",
"spin1x",
"spin1y",
"spin1z",
"spin2x",
"spin2y",
"spin2z",
"event_id")
366 def __init__(self, filename, process_params, comment, instruments, seg, injection_filename = None, time_slide_file = None, tmp_path = None, replace_file = None, verbose = False):
371 self.
get_another =
lambda:
CoincsDocument(filename = filename, process_params = process_params, comment = comment, instruments = instruments, seg = seg, injection_filename = injection_filename, time_slide_file = time_slide_file, tmp_path = tmp_path, replace_file = replace_file, verbose = verbose)
383 self.
xmldoc = ligolw.Document()
384 self.xmldoc.appendChild(ligolw.LIGO_LW())
385 self.
process = ligolw_process.register_to_xmldoc(self.
xmldoc,
u"gstlal_inspiral", process_params, comment = comment, ifos = instruments)
387 lalwrapper_cvs_tag =
None,
391 self.xmldoc.childNodes[-1].appendChild(lsctables.New(lsctables.SnglInspiralTable, columns = self.
sngl_inspiral_columns))
392 self.xmldoc.childNodes[-1].appendChild(lsctables.New(lsctables.CoincDefTable))
393 self.xmldoc.childNodes[-1].appendChild(lsctables.New(lsctables.CoincTable))
394 self.xmldoc.childNodes[-1].appendChild(lsctables.New(lsctables.CoincMapTable))
395 self.xmldoc.childNodes[-1].appendChild(lsctables.New(lsctables.TimeSlideTable))
396 self.xmldoc.childNodes[-1].appendChild(lsctables.New(lsctables.CoincInspiralTable))
397 self.xmldoc.childNodes[-1].appendChild(lsctables.New(lsctables.SegmentDefTable, columns = ligolw_segments.LigolwSegmentList.segment_def_columns))
398 self.xmldoc.childNodes[-1].appendChild(lsctables.New(lsctables.SegmentSumTable, columns = ligolw_segments.LigolwSegmentList.segment_sum_columns))
399 self.xmldoc.childNodes[-1].appendChild(lsctables.New(lsctables.SegmentTable, columns = ligolw_segments.LigolwSegmentList.segment_columns))
405 if injection_filename
is not None:
406 ligolw_add.ligolw_add(self.
xmldoc, [injection_filename], contenthandler = LIGOLWContentHandler, verbose = verbose)
414 time_slide_table = lsctables.TimeSlideTable.get_table(self.
xmldoc)
415 if time_slide_file
is not None:
416 ligolw_add.ligolw_add(self.
xmldoc, [time_slide_file], contenthandler = LIGOLWContentHandler, verbose = verbose)
418 time_slide_table.append_offsetvector(dict.fromkeys(instruments, 0.0), self.
process)
419 time_slide_mapping = ligolw_time_slide.time_slides_vacuum(time_slide_table.as_dict())
420 iterutils.inplace_filter(
lambda row: row.time_slide_id
not in time_slide_mapping, time_slide_table)
421 for tbl
in self.xmldoc.getElementsByTagName(ligolw.Table.tagName):
422 tbl.applyKeyMapping(time_slide_mapping)
430 if filename
is not None and filename.endswith(
'.sqlite'):
431 self.
working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, replace_file = replace_file, verbose = verbose)
433 ligolw_sqlite.insert_from_xmldoc(self.
connection, self.
xmldoc, preserve_ids =
False, verbose = verbose)
440 self.xmldoc.removeChild(self.xmldoc.childNodes[-1]).unlink()
441 self.xmldoc.appendChild(dbtables.get_xml(self.
connection))
449 (self.process.process_id,), = (self.search_summary.process_id,), = self.connection.cursor().execute(
"SELECT process_id FROM process WHERE program == ? AND node == ? AND username == ? AND unix_procid == ? AND start_time == ?", (self.process.program, self.process.node, self.process.username, self.process.unix_procid, self.process.start_time)).fetchall()
450 self.process.process_id = self.search_summary.process_id = ilwd.ilwdchar(self.process.process_id)
466 self.connection.commit()
470 def process_id(self):
471 return self.process.process_id
475 def search_summary_outseg(self):
476 return self.search_summary.out_segment
479 def add_to_search_summary_outseg(self, seg):
481 if out_segs == [
None]:
484 out_segs |= segments.segmentlist([seg])
485 self.search_summary.out_segment = out_segs.extent()
488 def get_next_sngl_id(self):
489 return self.sngl_inspiral_table.get_next_id()
492 def T050017_filename(self, description, extension):
494 start, end = int(math.floor(start)), int(math.ceil(end))
495 return "%s-%s-%d-%d.%s" % (
"".join(sorted(self.process.instruments)), description, start, end - start, extension)
498 def write_output_file(self, verbose = False):
499 self.llwsegments.finalize()
500 ligolw_process.set_process_end_time(self.
process)
506 cursor = self.connection.cursor()
508 cursor.execute(
"UPDATE search_summary SET out_start_time = ?, out_start_time_ns = ?, out_end_time = ?, out_end_time_ns = ? WHERE process_id == ?", (seg[0].seconds, seg[0].nanoseconds, seg[1].seconds, seg[1].nanoseconds, self.search_summary.process_id))
509 cursor.execute(
"UPDATE search_summary SET nevents = (SELECT count(*) FROM sngl_inspiral) WHERE process_id == ?", (self.search_summary.process_id,))
510 cursor.execute(
"UPDATE process SET end_time = ? WHERE process_id == ?", (self.process.end_time, self.process.process_id))
512 self.connection.commit()
513 dbtables.build_indexes(self.
connection, verbose = verbose)
514 self.connection.close()
518 self.sngl_inspiral_table.sort(
lambda a, b: cmp(a.end_time, b.end_time)
or cmp(a.end_time_ns, b.end_time_ns)
or cmp(a.ifo, b.ifo))
520 ligolw_utils.write_filename(self.
xmldoc, self.
filename, gz = (self.
filename or "stdout").endswith(
".gz"), verbose = verbose, trap_signals =
None)
524 def __init__(self, filename, process_params, pipeline, instruments, seg, coincidence_threshold, coinc_params_distributions, ranking_data, marginalized_likelihood_file = None, likelihood_files_namedtuple = None, injection_filename = None, time_slide_file = None, comment = None, tmp_path = None, likelihood_snapshot_interval = None, thinca_interval = 50.0, sngls_snr_threshold = None, gracedb_far_threshold = None, gracedb_group = "Test", gracedb_search = "LowMass", gracedb_pipeline = "gstlal", replace_file = True, verbose = False):
529 self.
lock = threading.Lock()
563 self.
coincs_document =
CoincsDocument(filename, process_params, comment, instruments, seg, injection_filename = injection_filename, time_slide_file = time_slide_file, tmp_path = tmp_path, replace_file = replace_file, verbose = verbose)
570 coincidence_threshold = coincidence_threshold,
571 thinca_interval = thinca_interval,
572 sngls_snr_threshold = sngls_snr_threshold
589 self.
latency_histogram = rate.BinnedArray(rate.NDBins((rate.LinearPlusOverflowBins(5, 205, 22),)))
594 def appsink_new_buffer(self, elem):
597 buf = elem.emit(
"pull-buffer")
598 events = sngl_inspirals_from_buffer(buf)
600 instrument = elem.get_name().split(
"_")[0]
604 buf_timestamp = LIGOTimeGPS(0, buf.timestamp)
605 buf_seg = segments.segment(buf_timestamp, buf_timestamp + LIGOTimeGPS(0, buf.duration))
606 self.coincs_document.add_to_search_summary_outseg(buf_seg)
607 self.
seglistdicts[
"triggersegments"][instrument] |= segments.segmentlist((buf_seg,))
616 event.process_id = self.coincs_document.process_id
617 event.event_id = self.coincs_document.get_next_sngl_id()
625 self.coinc_params_distributions.finish(verbose = self.
verbose)
635 self.pipeline.get_bus().post(message_new_checkpoint(self.
pipeline, timestamp = buf_timestamp.ns()))
646 if self.likelihood_files_namedtuple.reference_likelihood_file
is not None:
648 self.coinc_params_distributions.finish(verbose = self.
verbose)
666 if ranking_data
is None:
668 ranking_data.finish(verbose = self.
verbose)
674 for event
in self.stream_thinca.add_events(self.coincs_document.xmldoc, self.coincs_document.process_id, events, buf_timestamp, fapfar = self.
fapfar):
675 self.coinc_params_distributions.add_background(self.coinc_params_distributions.coinc_params((event,),
None))
676 self.coincs_document.commit()
686 if self.stream_thinca.last_coincs:
687 for coinc_event_id, coinc_event
in self.stream_thinca.last_coincs.coinc_event_index.items():
688 offset_vector = self.stream_thinca.last_coincs.offset_vector(coinc_event.time_slide_id)
689 if (coinc_event.likelihood >= far.RankingData.ln_likelihood_ratio_threshold
or self.
marginalized_likelihood_file is None)
and not any(offset_vector.values()):
690 self.coinc_params_distributions.add_zero_lag(self.coinc_params_distributions.coinc_params(self.stream_thinca.last_coincs.sngl_inspirals(coinc_event_id), offset_vector))
699 if self.stream_thinca.last_coincs:
700 self.stream_thinca.last_coincs.coinc_event_index = dict([max(self.stream_thinca.last_coincs.coinc_event_index.iteritems(), key =
lambda (coinc_event_id, coinc_event): coinc_event.likelihood)])
704 if self.stream_thinca.last_coincs:
705 for coinc_event_id, coinc_event
in self.stream_thinca.last_coincs.coinc_event_index.items():
706 offset_vector = self.stream_thinca.last_coincs.offset_vector(coinc_event.time_slide_id)
708 ifos = self.stream_thinca.last_coincs.coinc_inspiral_index[coinc_event_id].ifos
709 if (coinc_event.likelihood
is not None and coinc_event.likelihood >= far.RankingData.ln_likelihood_ratio_threshold)
and not any(offset_vector.values()):
710 self.ranking_data.zero_lag_likelihood_rates[frozenset(lsctables.instrument_set_from_ifos(ifos))][coinc_event.likelihood,] += 1
717 def record_horizon_distance(self, instrument, timestamp, psd, m1, m2, snr_threshold = 8.0):
720 horizon_distance = reference_psd.horizon_distance(psd, m1 = m1, m2 = m2, snr = snr_threshold, f_min = 40.0, f_max = 0.85 * (psd.f0 + (len(psd.data) - 1) * psd.deltaF))
721 assert not (math.isnan(horizon_distance)
or math.isinf(horizon_distance))
728 horizon_history = self.coinc_params_distributions.horizon_history[instrument]
730 horizon_history = self.coinc_params_distributions.horizon_history[instrument] =
far.NearestLeafTree()
731 horizon_history[float(timestamp)] = horizon_distance
733 def __get_likelihood_file(self):
737 xmldoc = ligolw.Document()
738 xmldoc.appendChild(ligolw.LIGO_LW())
739 process = ligolw_process.register_to_xmldoc(xmldoc,
u"gstlal_inspiral", paramdict = {})
740 search_summary = ligolw_search_summary.append_search_summary(xmldoc, process, ifos = self.
seglistdicts[
"triggersegments"].keys(), inseg = self.
seglistdicts[
"triggersegments"].extent_all(), outseg = self.
seglistdicts[
"triggersegments"].extent_all())
744 ligolw_process.set_process_end_time(process)
747 def web_get_likelihood_file(self):
749 output = StringIO.StringIO()
751 outstr = output.getvalue()
758 for event
in self.stream_thinca.flush(self.coincs_document.xmldoc, self.coincs_document.process_id, fapfar = self.
fapfar):
759 self.coinc_params_distributions.add_background(self.coinc_params_distributions.coinc_params((event,),
None))
760 self.coincs_document.commit()
763 if self.stream_thinca.last_coincs:
764 for coinc_event_id, coinc_event
in self.stream_thinca.last_coincs.coinc_event_index.items():
765 offset_vector = self.stream_thinca.last_coincs.offset_vector(coinc_event.time_slide_id)
766 if (coinc_event.likelihood >= far.RankingData.ln_likelihood_ratio_threshold
or self.
marginalized_likelihood_file is None)
and not any(offset_vector.values()):
767 self.coinc_params_distributions.add_zero_lag(self.coinc_params_distributions.coinc_params(self.stream_thinca.last_coincs.sngl_inspirals(coinc_event_id), offset_vector))
776 if self.stream_thinca.last_coincs:
777 self.stream_thinca.last_coincs.coinc_event_index = dict([max(self.stream_thinca.last_coincs.coinc_event_index.iteritems(), key =
lambda (coinc_event_id, coinc_event): coinc_event.likelihood)])
781 if self.stream_thinca.last_coincs:
782 for coinc_event_id, coinc_event
in self.stream_thinca.last_coincs.coinc_event_index.items():
783 offset_vector = self.stream_thinca.last_coincs.offset_vector(coinc_event.time_slide_id)
785 ifos = self.stream_thinca.last_coincs.coinc_inspiral_index[coinc_event_id].ifos
786 if (coinc_event.likelihood
is not None and coinc_event.likelihood >= far.RankingData.ln_likelihood_ratio_threshold)
and not any(offset_vector.values()):
787 self.ranking_data.zero_lag_likelihood_rates[frozenset(lsctables.instrument_set_from_ifos(ifos))][coinc_event.likelihood,] += 1
797 def __do_gracedb_alerts(self):
798 if self.stream_thinca.last_coincs:
799 gracedb_client = gracedb.Client()
802 coinc_inspiral_index = self.stream_thinca.last_coincs.coinc_inspiral_index
808 for coinc_event
in self.stream_thinca.last_coincs.coinc_event_index.values():
814 if coinc_inspiral_index[coinc_event.coinc_event_id].combined_far
is None or coinc_inspiral_index[coinc_event.coinc_event_id].combined_far > self.
gracedb_far_threshold or numpy.isnan(coinc_inspiral_index[coinc_event.coinc_event_id].combined_far):
821 if psdmessage
is None:
823 print >>sys.stderr,
"retrieving PSDs from whiteners and generating psd.xml.gz ..."
826 elem = self.pipeline.get_by_name(
"lal_whiten_%s" % instrument)
832 psddict[instrument] = REAL8FrequencySeries(
834 epoch = LIGOTimeGPS(lal.UTCToGPS(time.gmtime()), 0),
836 deltaF = elem.get_property(
"delta-f"),
837 sampleUnits = LALUnit(
"s strain^2"),
838 data = numpy.array(elem.get_property(
"mean-psd"))
840 psdmessage = StringIO.StringIO()
841 reference_psd.write_psd_fileobj(psdmessage, psddict, gz =
True, trap_signals =
None)
847 observatories =
"".join(sorted(set(instrument[0]
for instrument
in self.
seglistdicts[
"triggersegments"])))
848 instruments =
"".join(sorted(self.
seglistdicts[
"triggersegments"]))
849 description =
"%s_%s_%s_%s" % (instruments, (
"%.4g" % coinc_inspiral_index[coinc_event.coinc_event_id].mass).replace(
".",
"_").replace(
"-",
"_"), self.
gracedb_group, self.
gracedb_search)
850 end_time = int(coinc_inspiral_index[coinc_event.coinc_event_id].get_end())
851 filename =
"%s-%s-%d-%d.xml" % (observatories, description, end_time, 0)
865 print >>sys.stderr,
"sending %s to gracedb ..." % filename
866 message = StringIO.StringIO()
867 xmldoc = self.stream_thinca.last_coincs[coinc_event.coinc_event_id]
871 sngl_inspiral_table = lsctables.SnglInspiralTable.get_table(xmldoc)
872 for standard_column
in (
"process_id",
"ifo",
"search",
"channel",
"end_time",
"end_time_ns",
"end_time_gmst",
"impulse_time",
"impulse_time_ns",
"template_duration",
"event_duration",
"amplitude",
"eff_distance",
"coa_phase",
"mass1",
"mass2",
"mchirp",
"mtotal",
"eta",
"kappa",
"chi",
"tau0",
"tau2",
"tau3",
"tau4",
"tau5",
"ttotal",
"psi0",
"psi3",
"alpha",
"alpha1",
"alpha2",
"alpha3",
"alpha4",
"alpha5",
"alpha6",
"beta",
"f_final",
"snr",
"chisq",
"chisq_dof",
"bank_chisq",
"bank_chisq_dof",
"cont_chisq",
"cont_chisq_dof",
"sigmasq",
"rsqveto_duration",
"Gamma0",
"Gamma1",
"Gamma2",
"Gamma3",
"Gamma4",
"Gamma5",
"Gamma6",
"Gamma7",
"Gamma8",
"Gamma9",
"spin1x",
"spin1y",
"spin1z",
"spin2x",
"spin2y",
"spin2z",
"event_id"):
874 sngl_inspiral_table.appendColumn(standard_column)
878 ligolw_utils.write_fileobj(xmldoc, message, gz =
False, trap_signals =
None)
883 resp_json = resp.json()
884 if resp.status != httplib.CREATED:
885 print >>sys.stderr,
"gracedb upload of %s failed" % filename
888 print >>sys.stderr,
"event assigned grace ID %s" % resp_json[
"graceid"]
889 gracedb_ids.append(resp_json[
"graceid"])
891 proc = subprocess.Popen((
"/bin/cp",
"/dev/stdin", filename), stdin = subprocess.PIPE)
892 proc.stdin.write(message.getvalue())
901 if psdmessage
is not None:
902 filename =
"psd.xml.gz"
903 for gracedb_id
in gracedb_ids:
904 resp = gracedb_client.writeLog(gracedb_id,
"strain spectral densities", filename = filename, filecontents = psdmessage.getvalue(), tagname =
"psd")
905 resp_json = resp.json()
906 if resp.status != httplib.CREATED:
907 print >>sys.stderr,
"gracedb upload of %s for ID %s failed" % (filename, gracedb_id)
909 def do_gracedb_alerts(self):
913 def __update_eye_candy(self):
914 if self.stream_thinca.last_coincs:
915 self.ram_history.append((time.time(), (resource.getrusage(resource.RUSAGE_SELF).ru_maxrss + resource.getrusage(resource.RUSAGE_CHILDREN).ru_maxrss) / 1048576.))
918 coinc_inspiral_index = self.stream_thinca.last_coincs.coinc_inspiral_index
919 for coinc_event_id, coinc_inspiral
in coinc_inspiral_index.items():
921 latency = coinc_inspiral.minimum_duration
923 if latency_val
is None:
924 t = float(coinc_inspiral_index[coinc_event_id].get_end())
925 latency_val = (t, latency)
926 snr = coinc_inspiral_index[coinc_event_id].snr
927 if snr >= snr_val[1]:
928 t = float(coinc_inspiral_index[coinc_event_id].get_end())
930 if latency_val
is not None:
931 self.latency_history.append(latency_val)
933 self.snr_history.append(snr_val)
935 def update_eye_candy(self):
939 def web_get_latency_histogram(self):
941 for latency, number
in zip(self.latency_histogram.centres()[0][1:-1], self.latency_histogram.array[1:-1]):
942 yield "%e %e\n" % (latency, number)
944 def web_get_latency_history(self):
948 yield "%f %e\n" % (time, latency)
950 def web_get_snr_history(self):
954 yield "%f %e\n" % (time, snr)
956 def web_get_ram_history(self):
960 yield "%f %e\n" % (time, ram)
962 def web_get_gracedb_far_threshold(self):
969 def web_set_gracedb_far_threshold(self):
972 rate = bottle.request.forms[
"rate"]
982 def web_get_sngls_snr_threshold(self):
984 if self.stream_thinca.sngls_snr_threshold
is not None:
985 yield "snr=%.17g\n" % self.stream_thinca.sngls_snr_threshold
989 def web_set_sngls_snr_threshold(self):
992 snr_threshold = bottle.request.forms[
"snr"]
994 self.stream_thinca.sngls_snr_threshold = float(rate)
995 yield "OK: snr=%.17g\n" % self.stream_thinca.sngls_snr_threshold
997 self.stream_thinca.sngls_snr_threshold =
None
1002 def __write_output_file(self, filename = None, verbose = False):
1006 for segtype, seglistdict
in self.seglistdicts.items():
1007 self.coincs_document.llwsegments.insert_from_segmentlistdict(seglistdict, name = segtype, comment =
"LLOID")
1009 if filename
is not None:
1010 self.coincs_document.filename = filename
1011 self.coincs_document.write_output_file(verbose = verbose)
1013 def __write_likelihood_file(self, filename, description, snapshot = False, verbose = False):
1025 path, filename = os.path.split(filename)
1026 tmp_likelihood_file = os.path.join(path,
'tmp_%s' % filename)
1027 ligolw_utils.write_filename(self.
__get_likelihood_file(), tmp_likelihood_file, gz = (filename
or "stdout").endswith(
".gz"), verbose = verbose, trap_signals =
None)
1028 shutil.move(tmp_likelihood_file, os.path.join(path,filename))
1031 fname = self.coincs_document.T050017_filename(description +
'_DISTSTATS',
'xml.gz', verbose = verbose)
1032 path = subdir_from_T050017_filename(fname)
1033 shutil.copy(os.path.join(path,filename), os.path.join(path, fname))
1035 def write_output_file(self, filename = None, description = "", verbose = False):
1038 if self.likelihood_files_namedtuple.likelihood_file:
1039 self.
__write_likelihood_file(self.likelihood_files_namedtuple.likelihood_file, description, verbose = verbose)
1044 def snapshot_output_file(self, description, extension, verbose = False):
1046 coincs_document = self.coincs_document.get_another()
1050 fname = self.coincs_document.T050017_filename(description, extension)
1051 fname = os.path.join(subdir_from_T050017_filename(fname), fname)
1053 if self.likelihood_files_namedtuple.likelihood_file:
1054 self.
__write_likelihood_file(self.likelihood_files_namedtuple.likelihood_file, description, snapshot =
True, verbose = verbose)