51 from fpconst
import NaN, NegInf, PosInf
56 NegInf = float(
"-inf")
57 PosInf = float(
"+inf")
60 import multiprocessing
61 import multiprocessing.queues
65 from scipy
import interpolate
66 from scipy
import optimize
70 from scipy.optimize
import curve_fit
72 from gstlal.curve_fit
import curve_fit
73 from scipy
import stats
74 from scipy.special
import ive
76 sqlite3.enable_callback_tracebacks(
True)
80 from glue
import iterutils
81 from glue.ligolw
import ligolw
82 from glue.ligolw
import array
as ligolw_array
83 from glue.ligolw
import param
as ligolw_param
84 from glue.ligolw
import lsctables
85 from glue.ligolw
import dbtables
86 from glue.ligolw
import utils
as ligolw_utils
87 from glue.ligolw.utils
import search_summary
as ligolw_search_summary
88 from glue.ligolw.utils
import segments
as ligolw_segments
89 from glue.segmentsUtils
import vote
90 from glue.text_progress_bar
import ProgressBar
93 from pylal
import rate
94 from pylal
import snglcoinc
118 with numpy.errstate(divide =
"ignore"):
119 a = numpy.log(ive(v,z))
125 with numpy.errstate(divide =
"ignore", invalid =
"ignore"):
126 b = -math.log(2. * math.pi) / 2. * numpy.log(z)
127 z = numpy.clip(z, 1., PosInf)
128 b += numpy.log1p((mu - 1.) / (8. * z) * (-1. + (mu - 9.) / (16. * z) * (1. - (mu - 25.) / (24. * z))))
130 return z + numpy.where(z < 1e8, a, b)
133 def ncx2logpdf(x, k, l):
134 return -math.log(2.) - (x+l)/2. + (k/4.-.5) * (numpy.log(x) - numpy.log(l)) + logiv(k/2.-1., numpy.sqrt(l * x))
136 def ncx2pdf(x, k, l):
137 return numpy.exp(ncx2logpdf(x, k, l))
149 def fap_after_trials(p, m):
151 Given the probability, p, that an event occurs, compute the
152 probability of at least one such event occuring after m independent
155 The return value is 1 - (1 - p)^m computed avoiding round-off
156 errors and underflows. m cannot be negative but need not be an
161 >>> fap_after_trials(0.5, 1)
163 >>> fap_after_trials(0.066967008463192584, 10)
165 >>> fap_after_trials(0.0069075045629640984, 100)
167 >>> fap_after_trials(0.00069290700954747807, 1000)
169 >>> fap_after_trials(0.000069312315846428086, 10000)
171 >>> fap_after_trials(.000000006931471781576803, 100000000)
173 >>> fap_after_trials(0.000000000000000069314718055994534, 10000000000000000)
175 >>> "%.15g" % fap_after_trials(0.1, 21.854345326782834)
177 >>> "%.15g" % fap_after_trials(1e-17, 2.3025850929940458e+17)
179 >>> fap_after_trials(0.1, .2)
246 raise ValueError(
"m = %g must be positive" % m)
247 if not (0. <= p <= 1.):
248 raise ValueError(
"p = %g must be between 0 and 1 inclusively" % p)
261 return -m * math.log1p(-p)
262 except OverflowError:
275 for n
in itertools.count():
276 term *= (n - m) * p / (n + 1.)
279 if abs(term) <= 1e-18 * s[0]:
288 x = m * math.log1p(-p)
289 except OverflowError:
300 if x > -0.69314718055994529:
311 for n
in itertools.count(2):
315 if abs(term) <= -1e-18 * s[0]:
319 return 1. - math.exp(x)
322 fap_after_trials_arr = numpy.frompyfunc(fap_after_trials, 2, 1)
325 def trials_from_faps(p0, p1):
327 Given the probabiity, p0, of an event occuring, and the
328 probability, p1, of at least one such event being observed after
329 some number of independent trials, solve for and return the number
330 of trials, m, that relates the two probabilities. The three
331 quantities are related by p1 = 1 - (1 - p0)^m. Generally the
332 return value is not an integer.
334 See also fap_after_trials(). Note that if p0 is 0 or 1 then p1
335 must be 0 or 1 respectively, and in both cases m is undefined.
336 Otherwise if p1 is 1 then inf is returned.
341 if p0 == 0
or p0 == 1:
344 raise ValueError(
"m undefined")
352 return math.log1p(-p1) / math.log1p(-p0)
371 A simple binary tree in which look-ups return the value of the
372 closest leaf. Only float objects are supported for the keys and
373 values. Look-ups raise KeyError if the tree is empty.
377 >>> x = NearestLeafTree()
398 >>> x.to_xml(u"H1").write()
399 <Array Type="real_8" Name="H1:nearestleaftree:array">
402 <Stream Delimiter=" " Type="Local">
410 Initialize a NearestLeafTree.
414 >>> x = NearestLeafTree()
415 >>> x = NearestLeafTree([(100., 120.), (104., 100.), (102., 110.)])
416 >>> y = {100.: 120., 104.: 100., 102.: 100.}
417 >>> x = NearestLeafTree(y.items())
419 self.
tree = list(items)
426 >>> x = NearestLeafTree()
427 >>> x[100.:200.] = 0.
430 NearestLeaftree([(100, 0), (150, 1), (200, 0)])
439 if x.step
is not None:
440 raise ValueError(
"%s: step not supported" % repr(x))
443 raise IndexError(
"open-ended slice not supported with empty tree")
444 x = slice(self.
minkey(), x.stop)
447 raise IndexError(
"open-ended slice not supported with empty tree")
448 x = slice(x.start, self.
maxkey())
450 raise ValueError(
"%s: bounds out of order" % repr(x))
451 lo = bisect.bisect_left(self.
tree, (x.start, NegInf))
452 hi = bisect.bisect_right(self.
tree, (x.stop, PosInf))
453 self.
tree[lo:hi] = ((x.start, val), (x.stop, val))
457 lo = bisect.bisect_left(self.
tree, (x, NegInf))
458 hi = bisect.bisect_right(self.
tree, (x, PosInf))
459 self.
tree[lo:hi] = ((x, val),)
461 def __getitem__(self, x):
465 raise ValueError(
"slices not supported")
466 hi = bisect.bisect_right(self.
tree, (x, PosInf))
468 x_hi, val_hi = self.
tree[hi]
470 x_hi, val_hi = self.
tree[-1]
472 x_lo, val_lo = x_hi, val_hi
474 x_lo, val_lo = self.
tree[hi - 1]
477 return val_lo
if x < x_lo + (x_hi - x_lo) / 2.
else val_hi
483 >>> x = NearestLeafTree([(100., 0.), (150., 1.), (200., 0.)])
486 NearestLeafTree([(100., 0.), (200., 0.)])
491 if x.step
is not None:
492 raise ValueError(
"%s: step not supported" % repr(x))
497 x = slice(self.
minkey(), x.stop)
502 x = slice(x.start, self.
maxkey())
506 lo = bisect.bisect_left(self.
tree, (x.start, NegInf))
507 hi = bisect.bisect_right(self.
tree, (x.stop, PosInf))
512 lo = bisect.bisect_left(self.
tree, (x, NegInf))
513 if self.
tree[lo][0] != x:
517 def __nonzero__(self):
518 return bool(self.
tree)
520 def __iadd__(self, other):
521 for x, val
in other.tree:
526 return [x
for x, val
in self.
tree]
529 return [val
for x, val
in self.
tree]
532 return list(self.
tree)
536 Return the minimum value stored in the tree. This is O(n).
539 raise ValueError(
"empty tree")
540 return min(val
for x, val
in self.
tree)
544 Return the minimum key stored in the tree. This is O(1).
547 raise ValueError(
"empty tree")
548 return self.
tree[0][0]
552 Return the maximum value stored in the tree. This is O(n).
555 raise ValueError(
"empty tree")
556 return max(val
for x, val
in self.
tree)
560 Return the maximum key stored in the tree. This is O(1).
563 raise ValueError(
"empty tree")
564 return self.
tree[-1][0]
566 def __contains__(self, x):
568 return bool(self.
tree)
and self.
tree[bisect.bisect_left(self.
tree, (x, NegInf))][0] == x
573 return len(self.
tree)
576 return "NearestLeaftree([%s])" %
", ".join(
"(%g, %g)" % item
for item
in self.
tree)
579 def from_xml(cls, xml, name):
580 return cls(map(tuple, ligolw_array.get_array(xml,
u"%s:nearestleaftree" % name).array[:]))
582 def to_xml(self, name):
583 return ligolw_array.from_array(
u"%s:nearestleaftree" % name, numpy.array(self.
tree, dtype =
"double"))
587 def __iadd__(self, other):
588 for key, history
in other.iteritems():
592 self[key] = copy.deepcopy(history)
597 Return the minimum key stored in the trees.
599 minkeys = tuple(history.minkey()
for history
in self.values()
if history)
601 raise ValueError(
"empty trees")
606 Return the maximum key stored in the trees.
608 maxkeys = tuple(history.maxkey()
for history
in self.values()
if history)
610 raise ValueError(
"empty trees")
613 def getdict(self, x):
614 return dict((key, value[x])
for key, value
in self.iteritems())
618 Generator yielding a sequence of random horizon distance
619 dictionaries chosen by drawing random times uniformly
620 distributed between the lowest and highest times recorded
621 in the history and returning the dictionary of horizon
622 distances for each of those times.
629 yield getdict(rnd(x_min, x_max))
633 Returns a list of the unique sets of horizon distances
634 recorded in the histories.
638 all_x = set(x
for value
in self.values()
for x
in value.keys())
642 result = set(frozenset(self.
getdict(x).items())
for x
in all_x)
646 return map(dict, result)
649 def from_xml(cls, xml, name):
650 xml = [elem
for elem
in xml.getElementsByTagName(ligolw.LIGO_LW.tagName)
if elem.hasAttribute(
u"Name")
and elem.Name ==
u"%s:horizonhistories" % name]
654 raise ValueError(
"document must contain exactly 1 HorizonHistories named '%s'" % name)
655 keys = [elem.Name.replace(
u":nearestleaftree",
u"")
for elem
in xml.getElementsByTagName(ligolw.Array.tagName)
if elem.hasAttribute(
u"Name")
and elem.Name.endswith(
u":nearestleaftree")]
658 self[key] = NearestLeafTree.from_xml(xml, key)
661 def to_xml(self, name):
662 xml = ligolw.LIGO_LW({
u"Name":
u"%s:horizonhistories" % name})
663 for key, value
in self.items():
664 xml.appendChild(value.to_xml(key))
676 __slots__ = (
"horizons",)
680 ligo_lw_name_suffix =
u"gstlal_inspiral_coincparamsdistributions"
682 instrument_categories = snglcoinc.InstrumentCategories()
698 def quantize_horizon_distances(horizon_distances, log_distance_tolerance = PosInf, min_ratio = 0.1):
699 horizon_distance_norm = max(horizon_distances.values())
700 assert horizon_distance_norm != 0.
701 if math.isinf(log_distance_tolerance):
702 return dict((instrument, 1.)
for instrument
in horizon_distances)
703 min_distance = min_ratio * horizon_distance_norm
704 return dict((instrument, (0.
if horizon_distance < min_distance
else math.exp(round(math.log(horizon_distance / horizon_distance_norm) / log_distance_tolerance) * log_distance_tolerance)))
for instrument, horizon_distance
in horizon_distances.items())
708 "instruments": rate.NDBins((rate.LinearBins(0.5, instrument_categories.max() + 0.5, instrument_categories.max()),)),
709 "H1_snr_chi": rate.NDBins((rate.ATanLogarithmicBins(3.6, 70., 260), rate.ATanLogarithmicBins(.001, 0.5, 200))),
710 "H2_snr_chi": rate.NDBins((rate.ATanLogarithmicBins(3.6, 70., 260), rate.ATanLogarithmicBins(.001, 0.5, 200))),
711 "H1H2_snr_chi": rate.NDBins((rate.ATanLogarithmicBins(3.6, 70., 260), rate.ATanLogarithmicBins(.001, 0.5, 200))),
712 "L1_snr_chi": rate.NDBins((rate.ATanLogarithmicBins(3.6, 70., 260), rate.ATanLogarithmicBins(.001, 0.5, 200))),
713 "V1_snr_chi": rate.NDBins((rate.ATanLogarithmicBins(3.6, 70., 260), rate.ATanLogarithmicBins(.001, 0.5, 200))),
714 "E1_snr_chi": rate.NDBins((rate.ATanLogarithmicBins(3.6, 70., 260), rate.ATanLogarithmicBins(.001, 0.5, 200))),
715 "E2_snr_chi": rate.NDBins((rate.ATanLogarithmicBins(3.6, 70., 260), rate.ATanLogarithmicBins(.001, 0.5, 200))),
716 "E3_snr_chi": rate.NDBins((rate.ATanLogarithmicBins(3.6, 70., 260), rate.ATanLogarithmicBins(.001, 0.5, 200))),
717 "E0_snr_chi": rate.NDBins((rate.ATanLogarithmicBins(3.6, 70., 260), rate.ATanLogarithmicBins(.001, 0.5, 200)))
720 def __init__(self, *args, **kwargs):
721 super(ThincaCoincParamsDistributions, self).__init__(*args, **kwargs)
736 def __iadd__(self, other):
742 super(ThincaCoincParamsDistributions, self).__iadd__(other)
764 max_cached_snr_joint_pdfs = int(5**3 * 4)
765 snr_joint_pdf_cache = {}
767 def get_snr_joint_pdf(self, instruments, horizon_distances, verbose = False):
790 age = max(age
for ignored, ignored, age
in self.snr_joint_pdf_cache.values()) + 1
794 print >>sys.stderr,
"For horizon distances %s" %
", ".join(
"%s = %.4g Mpc" % item
for item
in sorted(horizon_distances.items()))
795 progressbar = ProgressBar(text =
"%s SNR PDF" %
", ".join(sorted(key[0])))
798 binnedarray = self.
joint_pdf_of_snrs(key[0], dict(key[1]), progressbar = progressbar)
800 lnbinnedarray = binnedarray.copy()
801 with numpy.errstate(divide =
"ignore"):
802 lnbinnedarray.array = numpy.log(lnbinnedarray.array)
803 pdf = rate.InterpBinnedArray(lnbinnedarray, fill_value = NegInf)
808 del self.
snr_joint_pdf_cache[min((age, key)
for key, (ignored, ignored, age)
in self.snr_joint_pdf_cache.items())[1]]
813 def coinc_params(self, events, offsetvector):
827 params =
CoincParams((
"%s_snr_chi" % event.ifo, (event.snr, event.chisq / event.snr**2))
for event
in events)
828 if "H2_snr_chi" in params
and "H1_snr_chi" in params:
829 del params[
"H2_snr_chi"]
835 params[
"instruments"] = (ThincaCoincParamsDistributions.instrument_categories.category(event.ifo
for event
in events),)
848 params.horizons = self.horizon_history.getdict(float(events[0].get_end()))
867 def lnP_signal(self, params):
870 snrs = sorted((name.split(
"_")[0], value[0])
for name, value
in params.items()
if name.endswith(
"_snr_chi"))
872 snr_pdf = self.
get_snr_joint_pdf((instrument
for instrument, rho
in snrs), params.horizons)
874 lnP_signal = snr_pdf(*tuple(rho
for instrument, rho
in snrs))
882 lnP_signal += super(ThincaCoincParamsDistributions, self).lnP_signal(params)
886 lnP_noise = self.lnP_noise(params)
887 if math.isinf(lnP_noise)
and math.isinf(lnP_signal):
888 if lnP_noise < 0.
and lnP_signal < 0.:
890 if lnP_noise > 0.
and lnP_signal > 0.:
892 lnP_signal += math.log(.99)
893 lnP_noise += math.log(0.01)
894 return max(lnP_signal, lnP_noise) + math.log1p(math.exp(-abs(lnP_signal - lnP_noise)))
896 def add_background_prior(self, instruments, n = 1., transition = 23., verbose = False):
905 print >>sys.stderr,
"adding tilt to (SNR, \\chi^2) background PDFs ..."
906 for instrument
in instruments:
907 binarr = self.background_rates[
"%s_snr_chi" % instrument]
909 progressbar = ProgressBar(instrument, max = len(binarr.bins[0]))
915 new_binarr = rate.BinnedArray(binarr.bins)
917 transition_factor = transition**5. * math.exp(-transition**2. / 2.)
919 dchi2_over_snr2s = new_binarr.bins[1].upper() - new_binarr.bins[1].lower()
920 dchi2_over_snr2s[numpy.isinf(dchi2_over_snr2s)] = 0.
921 for snr, dsnr
in zip(new_binarr.bins[0].centres(), new_binarr.bins[0].upper() - new_binarr.bins[0].lower()):
935 p = math.exp(-snr**2. / 2.)
936 p += transition_factor / snr**5.
937 new_binarr[snr,:] += p * dsnr * dchi2_over_snr2s
938 if progressbar
is not None:
939 progressbar.increment()
944 new_binarr.array[:new_binarr.bins[0][self.
snr_min],:] = 0.
945 new_binarr.array *= n / new_binarr.array.sum()
947 self.background_rates[
"instruments"][self.instrument_categories.category([instrument]),] += n
950 def add_instrument_combination_counts(self, segs, verbose = False):
969 print >>sys.stderr,
"synthesizing background-like instrument combination probabilities ..."
970 coincsynth = snglcoinc.CoincSynthesizer(
971 eventlists = dict((instrument, self.background_rates[
"instruments"][self.instrument_categories.category([instrument]),])
for instrument
in segs),
997 livetime = get_live_time(segs)
999 coincidence_bins = 0.
1000 for instruments
in coincsynth.all_instrument_combos:
1001 predicted_count = coincsynth.rates[frozenset(instruments)] * livetime
1002 observed_count = self.zero_lag_rates[
"instruments"][self.instrument_categories.category(instruments),]
1003 if predicted_count > 0
and observed_count > 0:
1004 coincidence_bins += (predicted_count / observed_count)**(1. / (len(instruments) - 1))
1007 assert coincidence_bins > 0.
1008 coincidence_bins /= N
1010 print >>sys.stderr,
"\tthere seems to be %g effective disjoint coincidence bin(s)" % coincidence_bins
1011 assert coincidence_bins >= 1.
1013 coincsynth.mu = dict((instrument, rate / coincidence_bins)
for instrument, rate
in coincsynth.mu.items())
1017 for instruments, count
in coincsynth.mean_coinc_count.items():
1018 self.background_rates[
"instruments"][self.instrument_categories.category(instruments),] = count * coincidence_bins
1021 def add_foreground_snrchi_prior(self, instruments, n, prefactors_range, df, verbose = False):
1023 print >>sys.stderr,
"synthesizing signal-like (SNR, \\chi^2) distributions ..."
1024 pfs = numpy.linspace(prefactors_range[0], prefactors_range[1], 10)
1025 for instrument
in instruments:
1026 binarr = self.injection_rates[
"%s_snr_chi" % instrument]
1028 progressbar = ProgressBar(instrument, max = len(binarr.bins[0]))
1034 new_binarr = rate.BinnedArray(binarr.bins)
1037 chi2_over_snr2s = new_binarr.bins[1].centres()
1038 dchi2_over_snr2s = new_binarr.bins[1].upper() - new_binarr.bins[1].lower()
1039 dchi2_over_snr2s[numpy.isinf(dchi2_over_snr2s)] = 0.
1040 for snr, dsnr
in zip(new_binarr.bins[0].centres(), new_binarr.bins[0].upper() - new_binarr.bins[0].lower()):
1041 if math.isinf(dsnr):
1045 dsnr_over_snr4 = dsnr / snr**4.
1047 for chi2_over_snr2, dchi2_over_snr2
in zip(chi2_over_snr2s, dchi2_over_snr2s):
1048 chi2 = chi2_over_snr2 * snr2 * df
1049 with numpy.errstate(over =
"ignore", divide =
"ignore", invalid =
"ignore"):
1050 v = ncx2pdf(chi2, df, pfs * snr2)
1053 v = v[numpy.isfinite(v)]
1054 assert (v >= 0.).all(), v
1058 p = v.sum() * dsnr_over_snr4 * dchi2_over_snr2
1059 assert p >= 0.
and not (math.isinf(p)
or math.isnan(p)), p
1060 new_binarr[snr, chi2_over_snr2] = p
1061 if progressbar
is not None:
1062 progressbar.increment()
1067 new_binarr.array[:new_binarr.bins[0][self.
snr_min],:] = 0.
1068 new_binarr.array *= n / new_binarr.array.sum()
1070 binarr += new_binarr
1072 def populate_prob_of_instruments_given_signal(self, segs, n = 1., verbose = False):
1077 assert len(segs) > 1
1091 P_live = snglcoinc.CoincSynthesizer(segmentlists = segs).P_live
1092 for instruments
in P:
1093 P[instruments] *= sum(sorted(p
for on_instruments, p
in P_live.items()
if on_instruments >= instruments))
1095 total = sum(sorted(P.values()))
1096 for instruments
in P:
1097 P[instruments] /= total
1099 for instruments, p
in P.items():
1100 self.injection_rates[
"instruments"][self.instrument_categories.category(instruments),] += n * p
1102 def _rebuild_interpolators(self):
1103 super(ThincaCoincParamsDistributions, self)._rebuild_interpolators()
1111 def mkinterp(binnedarray):
1112 with numpy.errstate(divide =
"ignore"):
1116 return numpy.hstack(([NaN], numpy.log(binnedarray.array))).__getitem__
1117 if "instruments" in self.background_pdf:
1118 self.background_lnpdf_interp[
"instruments"] = mkinterp(self.background_pdf[
"instruments"])
1119 if "instruments" in self.injection_pdf:
1120 self.injection_lnpdf_interp[
"instruments"] = mkinterp(self.injection_pdf[
"instruments"])
1121 if "instruments" in self.zero_lag_pdf:
1122 self.zero_lag_lnpdf_interp[
"instruments"] = mkinterp(self.zero_lag_pdf[
"instruments"])
1124 def pdf_from_rates_instruments(self, key, pdf_dict):
1127 binnedarray = pdf_dict[key]
1128 for category
in self.instrument_categories.values():
1129 binnedarray[category,] = 0
1130 with numpy.errstate(invalid =
"ignore"):
1131 binnedarray.array /= binnedarray.array.sum()
1133 def pdf_from_rates_snrchi2(self, key, pdf_dict, snr_kernel_width_at_8 = math.sqrt(2) / 4.0, sigma = 10.):
1135 binnedarray = pdf_dict[key]
1138 snr_bins = binnedarray.bins[0]
1139 snr_per_bin_at_8 = (snr_bins.upper() - snr_bins.lower())[snr_bins[8.]]
1140 snr_kernel_bins = snr_kernel_width_at_8 / snr_per_bin_at_8
1141 assert snr_kernel_bins >= 2.5, snr_kernel_bins
1142 kernel = rate.gaussian_window(snr_kernel_bins, 5, sigma = sigma)
1145 rate.filter_array(binnedarray.array, kernel)
1149 binnedarray.array[:binnedarray.bins[0][self.
snr_min],:] = 0.
1152 with numpy.errstate(invalid =
"ignore"):
1153 binnedarray.to_pdf()
1158 if pdf_dict
is self.injection_pdf
and not numpy.isnan(binnedarray.array).all():
1159 bin_sizes = binnedarray.bins[1].upper() - binnedarray.bins[1].lower()
1160 for i
in xrange(binnedarray.array.shape[0]):
1161 nonzero = binnedarray.array[i] != 0
1162 if not nonzero.any():
1168 norm = numpy.dot(numpy.compress(nonzero, binnedarray.array[i]), numpy.compress(nonzero, bin_sizes))
1169 assert not math.isnan(norm),
"encountered impossible PDF: %s is non-zero in a bin with infinite volume" % name
1170 binnedarray.array[i] /= norm
1173 def from_xml(cls, xml, name):
1174 self = super(ThincaCoincParamsDistributions, cls).from_xml(xml, name)
1175 xml = self.get_xml_root(xml, name)
1177 prefix =
u"cached_snr_joint_pdf"
1178 for elem
in [elem
for elem
in xml.childNodes
if elem.Name.startswith(
u"%s:" % prefix)]:
1179 key = ligolw_param.get_pyvalue(elem,
u"key").strip().split(
u";")
1180 key = frozenset(lsctables.instrument_set_from_ifos(key[0].strip())), frozenset((inst.strip(), float(dist.strip()))
for inst, dist
in (inst_dist.strip().split(
u"=")
for inst_dist
in key[1].strip().split(
u",")))
1181 binnedarray = rate.BinnedArray.from_xml(elem, prefix)
1183 age = max(age
for ignored, ignored, age
in self.snr_joint_pdf_cache.values()) + 1
1186 lnbinnedarray = binnedarray.copy()
1187 with numpy.errstate(divide =
"ignore"):
1188 lnbinnedarray.array = numpy.log(lnbinnedarray.array)
1189 self.
snr_joint_pdf_cache[key] = rate.InterpBinnedArray(lnbinnedarray, fill_value = NegInf), binnedarray, age
1191 del self.
snr_joint_pdf_cache[min((age, key)
for key, (ignored, ignored, age)
in self.snr_joint_pdf_cache.items())[1]]
1194 def to_xml(self, name):
1195 xml = super(ThincaCoincParamsDistributions, self).to_xml(name)
1196 xml.appendChild(self.horizon_history.to_xml(name))
1197 prefix =
u"cached_snr_joint_pdf"
1198 for key, (ignored, binnedarray, ignored)
in self.snr_joint_pdf_cache.items():
1199 elem = xml.appendChild(binnedarray.to_xml(prefix))
1200 elem.appendChild(ligolw_param.new_param(
u"key",
u"lstring",
"%s;%s" % (lsctables.ifos_from_instrument_set(key[0]),
u",".join(
u"%s=%.17g" % inst_dist
for inst_dist
in sorted(key[1])))))
1206 Dictionary mapping instrument combination (as a frozenset)
1207 to number of zero-lag coincs observed. An additional entry
1208 with key None stores the total.
1210 count_above_threshold = dict((frozenset(self.instrument_categories.instruments(int(round(category)))), count)
for category, count
in zip(self.zero_lag_rates[
"instruments"].bins.centres()[0], self.zero_lag_rates[
"instruments"].array))
1211 count_above_threshold[
None] = sum(sorted(count_above_threshold.values()))
1212 return count_above_threshold
1214 @count_above_threshold.setter
1216 self.zero_lag_rates[
"instruments"].array[:] = 0.
1217 for instruments, count
in count_above_threshold.items():
1218 if instruments
is not None:
1219 self.zero_lag_rates[
"instruments"][self.instrument_categories.category(instruments),] = count
1222 def Pinstrument_noise(self):
1224 for category, p
in zip(self.background_pdf[
"instruments"].bins.centres()[0], self.background_pdf[
"instruments"].array):
1225 instruments = frozenset(self.instrument_categories.instruments(int(round(category))))
1226 if len(instruments) < 2
or not p:
1232 def Pinstrument_signal(self):
1234 for category, p
in zip(self.injection_pdf[
"instruments"].bins.centres()[0], self.injection_pdf[
"instruments"].array):
1235 instruments = frozenset(self.instrument_categories.instruments(int(round(category))))
1236 if len(instruments) < 2
or not p:
1243 Generator that yields an endless sequence of randomly
1244 generated parameter dictionaries for the given instruments.
1245 NOTE: the parameters will be within the domain of the
1246 repsective binnings but are not drawn from the PDF stored
1247 in those binnings --- this is not an MCMC style sampler.
1248 The return value is a tuple, the first element of which is
1249 the random parameter dictionary and the second is the
1250 natural logarithm (up to an arbitrary constant) of the PDF
1251 from which the parameters have been drawn evaluated at the
1252 co-ordinates in the parameter dictionary.
1256 >>> x = iter(ThincaCoincParamsDistributions().random_params(("H1", "L1", "V1")))
1263 The sequence is suitable for input to the
1264 pylal.snglcoinc.LnLikelihoodRatio.samples() log likelihood
1267 snr_slope = 0.8 / len(instruments)**3
1269 keys = tuple(
"%s_snr_chi" % instrument
for instrument
in instruments)
1270 base_params = {
"instruments": (self.instrument_categories.category(instruments),)}
1271 horizongen = iter(self.horizon_history.randhorizons()).next
1273 log_P_horizons = -math.log(self.horizon_history.maxkey() - self.horizon_history.minkey())
1274 coordgens = tuple(iter(self.
binnings[key].randcoord(ns = (snr_slope, 1.), domain = (slice(self.
snr_min,
None), slice(
None,
None)))).next
for key
in keys)
1276 seq = sum((coordgen()
for coordgen
in coordgens), ())
1278 params.update(base_params)
1279 params.horizons = horizongen()
1285 yield params, sum(seq[1::2], log_P_horizons)
1289 Generator that yields an endless sequence of randomly
1290 generated parameter dictionaries drawn from the
1291 distribution of parameters expected for the given
1292 injection, which is an instance of a SimInspiral table row
1293 object (see glue.ligolw.lsctables.SimInspiral for more
1294 information). The return value is a tuple, the first
1295 element of which is the random parameter dictionary and the
1302 The sequence is suitable for input to the
1303 pylal.snglcoinc.LnLikelihoodRatio.samples() log likelihood
1308 The second element in each tuple in the sequence is merely
1309 a placeholder, not the natural logarithm of the PDF from
1310 which the sample has been drawn, as in the case of
1311 random_params(). Therefore, when used in combination with
1312 pylal.snglcoinc.LnLikelihoodRatio.samples(), the two
1313 probability densities computed and returned by that
1314 generator along with each log likelihood ratio value will
1315 simply be the probability densities of the signal and noise
1316 populations at that point in parameter space. They cannot
1317 be used to form an importance weighted sampler of the log
1326 if horizon_distance
is None:
1327 horizon_distance = self.
horizon_history[float(sim.get_time_geocent())]
1338 cosi2 = math.cos(sim.inclination)**2.
1339 gmst = lal.GreenwichMeanSiderealTime(lal.LIGOTimeGPS(0, sim.get_time_geocent().ns()))
1341 for instrument, DH
in horizon_distance.items():
1342 fp, fc = lal.ComputeDetAMResponse(lalsimulation.DetectorPrefixToLALDetector(str(instrument)).response, sim.longitude, sim.latitude, sim.polarization, gmst)
1343 snr_0[instrument] = snr_efficiency * 8. * DH * math.sqrt(fp**2. * (1. + cosi2)**2. / 4. + fc**2. * cosi2) / sim.distance
1351 rvs = stats.ncx2(2., snr**2.).rvs
1352 math_sqrt = math.sqrt
1354 yield math_sqrt(rvs())
1356 def chi2_over_snr2_gen(instrument, snr):
1357 rates_lnx = numpy.log(self.injection_rates[
"%s_snr_chi" % instrument].bins[1].centres())
1359 rates_cdf = self.injection_rates[
"%s_snr_chi" % instrument][max(snr, self.
snr_min),:].cumsum()
1362 rates_cdf += numpy.linspace(0., 0.001 * rates_cdf[-1], len(rates_cdf))
1363 rates_cdf /= rates_cdf[-1]
1364 assert not numpy.isnan(rates_cdf).any()
1366 interp = interpolate.interp1d(rates_cdf, rates_lnx)
1368 random_uniform = random.uniform
1370 yield math_exp(float(interp(random_uniform(0., 1.))))
1372 gens = dict(((instrument,
"%s_snr_chi" % instrument), (iter(snr_gen(snr)).next, iter(chi2_over_snr2_gen(instrument, snr)).next))
for instrument, snr
in snr_0.items())
1382 for (instrument, key), (snr, chi2_over_snr2)
in gens.items():
1386 params[key] = snr, chi2_over_snr2()
1387 instruments.append(instrument)
1388 if len(instruments) < 2:
1390 params[
"instruments"] = (ThincaCoincParamsDistributions.instrument_categories.category(instruments),)
1391 params.horizons = horizon_distance
1396 def joint_pdf_of_snrs(cls, instruments, inst_horiz_mapping, n_samples = 80000, bins = rate.ATanLogarithmicBins(3.6, 120., 100), progressbar =
None):
1398 Return a BinnedArray representing the joint probability
1399 density of measuring a set of SNRs from a network of
1400 instruments. The inst_horiz_mapping is a dictionary
1401 mapping instrument name (e.g., "H1") to horizon distance
1402 (arbitrary units). n_samples is the number of lines over
1403 which to calculate the density in the SNR space. The axes
1404 of the PDF correspond to the instruments in alphabetical
1408 instruments = sorted(instruments)
1410 DH_times_8 = 8. * numpy.array([inst_horiz_mapping[inst]
for inst
in instruments])
1411 resps = tuple(lalsimulation.DetectorPrefixToLALDetector(str(inst)).response
for inst
in instruments)
1416 DH_times_8_other = 8. * numpy.array([dist
for inst, dist
in inst_horiz_mapping.items()
if inst
not in instruments])
1417 resps_other = tuple(lalsimulation.DetectorPrefixToLALDetector(str(inst)).response
for inst
in inst_horiz_mapping
if inst
not in instruments)
1423 pdf = rate.BinnedArray(rate.NDBins([bins] * len(instruments)))
1424 snr_sequence = rate.ATanLogarithmicBins(3.6, 120., 300)
1425 snr_snrlo_snrhi_sequence = numpy.array(zip(snr_sequence.centres(), snr_sequence.lower(), snr_sequence.upper())[:-1])
1428 assert type(cls.snr_min)
is float
1429 snr_min = cls.snr_min - 3.0
1430 assert snr_min > 0.0
1439 if DH_times_8.min() == 0.:
1450 if progressbar
is not None:
1451 progressbar.max = n_samples
1453 random_uniform = random.uniform
1454 twopi = 2. * math.pi
1456 xlal_am_resp = lal.ComputeDetAMResponse
1460 rice_rvs =
lambda x: numpy.sqrt(stats.ncx2.rvs(2., x**2.))
1461 for i
in xrange(n_samples):
1464 theta = acos(random_uniform(-1., 1.))
1465 phi = random_uniform(0., twopi)
1466 psi = random_uniform(0., twopi)
1467 cosi2 = random_uniform(-1., 1.)**2.
1470 fpfc2 = numpy.array(tuple(xlal_am_resp(resp, phi, pi_2 - theta, psi, gmst)
for resp
in resps))**2.
1471 fpfc2_other = numpy.array(tuple(xlal_am_resp(resp, phi, pi_2 - theta, psi, gmst)
for resp
in resps_other))**2.
1474 fpfc_factors = ((1. + cosi2)**2. / 4., cosi2)
1475 snr_times_D = DH_times_8 * numpy.dot(fpfc2, fpfc_factors)**0.5
1479 max_snr_times_D = snr_times_D.max()
1490 start_index = snr_sequence[max_snr_times_D / (snr_times_D.min() / snr_min)]
1491 except ZeroDivisionError:
1502 if len(DH_times_8_other):
1503 min_D_other = (DH_times_8_other * numpy.dot(fpfc2_other, fpfc_factors)**0.5).min() / cls.snr_min
1505 end_index = snr_sequence[max_snr_times_D / min_D_other] + 1
1506 except ZeroDivisionError:
1541 for D, Dhi, Dlo
in max_snr_times_D / snr_snrlo_snrhi_sequence[start_index:end_index]:
1542 pdf[tuple(rice_rvs(snr_times_D / D))] += Dhi**3. - Dlo**3.
1544 if progressbar
is not None:
1545 progressbar.increment()
1547 assert numpy.isfinite(pdf.array).all()
1551 rate.filter_array(pdf.array, rate.gaussian_window(*(1.875,) * len(pdf.array.shape)))
1554 numpy.clip(pdf.array, 0., PosInf, pdf.array)
1559 range_all = slice(
None,
None)
1560 range_low = slice(
None, pdf.bins[0][cls.snr_min])
1561 for i
in xrange(len(instruments)):
1562 slices = [range_all] * len(instruments)
1563 slices[i] = range_low
1564 pdf.array[slices] = 0.
1571 def P_instruments_given_signal(horizon_history, n_samples = 500000, min_distance = 0.):
1583 raise ValueError(
"n_samples=%d must be >= 1" % n_samples)
1584 if min_distance < 0.:
1585 raise ValueError(
"min_distance=%g must be >= 0" % min_distance)
1588 names = tuple(horizon_history)
1590 raise ValueError(
"horizon_history is empty")
1592 resps = [lalsimulation.DetectorPrefixToLALDetector(str(inst)).response
for inst
in names]
1596 result = dict.fromkeys((frozenset(instruments)
for n
in xrange(2, len(names) + 1)
for instruments
in iterutils.choices(names, n)), 0.0)
1605 rand_horizon_distances = iter(horizon_history.randhorizons()).next
1610 indexes = tuple(range(len(names)))
1614 numpy_array = numpy.array
1615 numpy_dot = numpy.dot
1616 numpy_sqrt = numpy.sqrt
1617 random_uniform = random.uniform
1618 twopi = 2. * math.pi
1620 xlal_am_resp = lal.ComputeDetAMResponse
1623 for i
in xrange(n_samples):
1627 DH = numpy_array(map(rand_horizon_distances().__getitem__, names))
1631 theta = acos(random_uniform(-1., 1.))
1632 phi = random_uniform(0., twopi)
1633 psi = random_uniform(0., twopi)
1634 cosi2 = random_uniform(-1., 1.)**2.
1638 fpfc2 = numpy_array(tuple(xlal_am_resp(resp, phi, pi_2 - theta, psi, gmst)
for resp
in resps))**2.
1643 snr_times_D_over_8 = DH * numpy_sqrt(numpy_dot(fpfc2, ((1. + cosi2)**2. / 4., cosi2)))
1665 V_at_snr_threshold = snr_times_D_over_8**3.
1670 order = sorted(indexes, key = V_at_snr_threshold.__getitem__, reverse =
True)
1671 ordered_names = tuple(names[i]
for i
in order)
1679 instruments = (frozenset(ordered_names[:n])
for n
in xrange(2, len(order) + 1))
1680 V = tuple(V_at_snr_threshold[i]
for i
in order[1:])
1681 if V[0] <= min_distance:
1691 P = tuple(x / V[0]
for x
in V)
1697 for key, p, pnext
in zip(instruments, P, P[1:] + (0.,)):
1698 result[key] += p - pnext
1700 result[key] /= n_samples
1706 total = sum(sorted(result.values()))
1707 assert abs(total - 1.) < 1e-13
1709 result[key] /= total
1727 def binned_log_likelihood_ratio_rates_from_samples(signal_rates, noise_rates, samples, nsamples):
1729 Populate signal and noise BinnedArray histograms from a sequence of
1730 samples (which can be a generator). The first nsamples elements
1731 from the sequence are used. The samples must be a sequence of
1732 three-element tuples (or sequences) in which the first element is a
1733 value of the ranking statistic (likelihood ratio) and the second
1734 and third elements the logs of the probabilities of obtaining that
1735 value of the ranking statistic in the signal and noise populations
1740 for ln_lamb, lnP_signal, lnP_noise
in itertools.islice(samples, nsamples):
1742 raise ValueError(
"encountered NaN likelihood ratio")
1743 if isnan(lnP_signal)
or isnan(lnP_noise):
1744 raise ValueError(
"encountered NaN signal or noise model probability densities")
1745 signal_rates[ln_lamb,] += exp(lnP_signal)
1746 noise_rates[ln_lamb,] += exp(lnP_noise)
1747 return signal_rates, noise_rates
1750 def binned_log_likelihood_ratio_rates_from_samples_wrapper(queue, *args, **kwargs):
1752 queue.put(binned_log_likelihood_ratio_rates_from_samples(*args, **kwargs))
1768 ligo_lw_name_suffix =
u"gstlal_inspiral_rankingdata"
1775 "ln_likelihood_ratio": rate.NDBins((rate.ATanBins(0., 110., 3000),))
1779 "ln_likelihood_ratio": rate.gaussian_window(8.)
1786 ln_likelihood_ratio_threshold = NegInf
1789 def __init__(self, coinc_params_distributions, instruments, process_id = None, nsamples = 1000000, verbose = False):
1802 instruments = tuple(instruments)
1803 for key
in [frozenset(ifos)
for n
in range(2, len(instruments) + 1)
for ifos
in iterutils.choices(instruments, n)]:
1813 if coinc_params_distributions
is None:
1824 print >>sys.stderr,
"computing ranking statistic PDFs for %s" %
", ".join(sorted(key))
1825 q = multiprocessing.queues.SimpleQueue()
1826 p = multiprocessing.Process(target =
lambda: binned_log_likelihood_ratio_rates_from_samples_wrapper(
1830 snglcoinc.LnLikelihoodRatio(coinc_params_distributions).samples(coinc_params_distributions.random_params(key)),
1834 threads.append((p, q, key))
1836 p, q, key = threads.pop(0)
1840 raise Exception(
"sampling thread failed")
1842 print >>sys.stderr,
"done computing ranking statistic PDFs"
1856 for instruments, binnedarray
in self.background_likelihood_rates.items():
1857 if binnedarray.array.any():
1858 binnedarray.array *= coinc_params_distributions.background_rates[
"instruments"][coinc_params_distributions.instrument_categories.category(instruments),] / binnedarray.array.sum()
1873 for instruments, binnedarray
in self.signal_likelihood_rates.items():
1874 if binnedarray.array.any():
1875 binnedarray.array *= coinc_params_distributions.injection_rates[
"instruments"][coinc_params_distributions.instrument_categories.category(instruments),] / binnedarray.array.sum()
1890 def collect_zero_lag_rates(self, connection, coinc_def_id):
1891 for instruments, ln_likelihood_ratio
in connection.cursor().execute(
"""
1893 coinc_inspiral.ifos,
1894 coinc_event.likelihood
1897 JOIN coinc_event ON (
1898 coinc_event.coinc_event_id == coinc_inspiral.coinc_event_id
1901 coinc_event.coinc_def_id == ?
1908 time_slide.time_slide_id == coinc_event.time_slide_id
1909 AND time_slide.offset != 0
1911 """, (coinc_def_id,)):
1912 assert ln_likelihood_ratio
is not None,
"null likelihood ratio encountered. probably coincs have not been ranked"
1927 def _compute_combined_rates(self):
1932 def compute_combined_rates(rates_dict):
1934 del rates_dict[
None]
1937 total_rate = rates_dict.itervalues().next().copy()
1942 total_rate.array[:] = sum(binnedarray.array
for binnedarray
in rates_dict.values())
1943 rates_dict[
None] = total_rate
1949 def finish(self, verbose = False):
1950 self.background_likelihood_pdfs.clear()
1951 self.signal_likelihood_pdfs.clear()
1952 self.zero_lag_likelihood_pdfs.clear()
1953 def build_pdf(binnedarray, filt):
1955 pdf = binnedarray.copy()
1956 rate.filter_array(pdf.array, filt)
1968 for key, binnedarray
in self.background_likelihood_rates.items():
1969 assert not numpy.isnan(binnedarray.array).any(),
"%s noise model log likelihood ratio counts contain NaNs" % (key
if key
is not None else "combined")
1971 if progressbar
is not None:
1972 progressbar.increment()
1973 for key, binnedarray
in self.signal_likelihood_rates.items():
1974 assert not numpy.isnan(binnedarray.array).any(),
"%s signal model log likelihood ratio counts contain NaNs" % (key
if key
is not None else "combined")
1976 if progressbar
is not None:
1977 progressbar.increment()
1978 for key, binnedarray
in self.zero_lag_likelihood_rates.items():
1979 assert not numpy.isnan(binnedarray.array).any(),
"%s zero lag log likelihood ratio counts contain NaNs" % (key
if key
is not None else "combined")
1981 if progressbar
is not None:
1982 progressbar.increment()
1984 def __iadd__(self, other):
1991 def from_xml(cls, xml, name):
1994 xml, = [elem
for elem
in xml.getElementsByTagName(ligolw.LIGO_LW.tagName)
if elem.hasAttribute(
u"Name")
and elem.Name ==
u"%s:%s" % (name, cls.ligo_lw_name_suffix)]
1997 self = cls(
None, (), process_id = ligolw_param.get_pyvalue(xml,
u"process_id"))
2000 def reconstruct(xml, prefix, target_dict):
2001 for ba_elem
in [elem
for elem
in xml.getElementsByTagName(ligolw.LIGO_LW.tagName)
if elem.hasAttribute(
u"Name")
and (
"_%s" % prefix)
in elem.Name]:
2002 ifo_set = frozenset(lsctables.instrument_set_from_ifos(ba_elem.Name.split(
"_")[0]))
2003 target_dict[ifo_set] = rate.BinnedArray.from_xml(ba_elem, ba_elem.Name.split(
":")[0])
2021 def to_xml(self, name):
2023 xml.appendChild(ligolw_param.new_param(
u"process_id",
u"ilwd:char", self.
process_id))
2024 def store(xml, prefix, source_dict):
2025 for key, binnedarray
in source_dict.items():
2027 ifostr = lsctables.ifos_from_instrument_set(key).replace(
",",
"")
2028 xml.appendChild(binnedarray.to_xml(
u"%s_%s" % (ifostr, prefix)))
2046 def __init__(self, ranking_stats, livetime = None):
2051 bgcounts_ba = ranking_stats.background_likelihood_rates[
None]
2052 bgpdf_ba = ranking_stats.background_likelihood_pdfs[
None]
2054 zlagcounts_ba = ranking_stats.zero_lag_likelihood_rates[
None]
2057 assert not numpy.isnan(bgcounts_ba.array).any(),
"log likelihood ratio rates contains NaNs"
2058 assert not (bgcounts_ba.array < 0.0).any(),
"log likelihood ratio rate contains negative values"
2059 assert not numpy.isnan(bgpdf_ba.array).any(),
"log likelihood ratio pdf contains NaNs"
2060 assert not (bgpdf_ba.array < 0.0).any(),
"log likelihood ratio pdf contains negative values"
2063 finite_bins = numpy.isfinite(bgcounts_ba.bins.volumes())
2064 ranks = bgcounts_ba.bins.upper()[0].compress(finite_bins)
2065 drank = bgcounts_ba.bins.volumes().compress(finite_bins)
2068 bgcounts_ba_array = bgcounts_ba.array.compress(finite_bins)
2069 bgpdf_ba_array = bgpdf_ba.array.compress(finite_bins)
2070 zlagcounts_ba_array = zlagcounts_ba.array.compress(finite_bins)
2074 extinct_bf_pdf = self.
extinct(bgcounts_ba_array, bgpdf_ba_array, zlagcounts_ba_array, ranks)
2080 weights = extinct_bf_pdf * drank
2081 cdf = weights.cumsum()
2083 ccdf = weights[::-1].cumsum()[::-1]
2096 assert not numpy.isnan(cdf).any(),
"log likelihood ratio CDF contains NaNs"
2097 assert not numpy.isnan(ccdf).any(),
"log likelihood ratio CCDF contains NaNs"
2098 assert ((0. <= cdf) & (cdf <= 1.)).all(),
"log likelihood ratio CDF failed to be normalized"
2099 assert ((0. <= ccdf) & (ccdf <= 1.)).all(),
"log likelihood ratio CCDF failed to be normalized"
2100 assert (abs(1. - (cdf[:-1] + ccdf[1:])) < 1e-12).all(),
"log likelihood ratio CDF + CCDF != 1 (max error = %g)" % abs(1. - (cdf[:-1] + ccdf[1:])).max()
2111 def extinct(self, bgcounts_ba_array, bgpdf_ba_array, zlagcounts_ba_array, ranks):
2115 zero_lag_compcumcount = zlagcounts_ba_array[::-1].cumsum()[::-1]
2116 bg_compcumcount = bgcounts_ba_array[::-1].cumsum()[::-1]
2125 rank_range = numpy.logical_and(ranks > fit_min_rank, numpy.logical_and(zero_lag_compcumcount < fit_max_counts, zero_lag_compcumcount > fit_min_counts))
2126 if fit_min_counts < 100.:
2127 warnings.warn(
"There are less than 100 coincidences, extinction effects on background may not be accurately calculated, which will decrease the accuracy of the combined instruments background estimation.")
2128 if zero_lag_compcumcount.compress(rank_range).size < 1:
2129 raise ValueError(
"not enough zero lag data to fit background")
2133 obs_counts = interpolate.interp1d(ranks, bg_compcumcount)
2134 bg_pdf_interp = interpolate.interp1d(ranks, bgpdf_ba_array)
2136 def extincted_counts(x, N_ratio):
2137 out = max(zero_lag_compcumcount) * (1. - numpy.exp(-obs_counts(x) * N_ratio))
2138 out[~numpy.isfinite(out)] = 0.
2141 def extincted_pdf(x, N_ratio):
2142 out = N_ratio * numpy.exp(-obs_counts(x) * N_ratio) * bg_pdf_interp(x)
2143 out[~numpy.isfinite(out)] = 0.
2148 precluster_normalization, precluster_covariance_matrix =
curve_fit(
2151 zero_lag_compcumcount.compress(rank_range),
2152 sigma = zero_lag_compcumcount.compress(rank_range)**.5,
2156 N_ratio = precluster_normalization[0]
2158 return extincted_pdf(ranks, N_ratio)
2160 def fap_from_rank(self, rank):
2167 def far_from_rank(self, rank):
2171 assert self.
livetime is not None,
"cannot compute FAR without livetime"
2177 log_tdp = math.log(tdp)
2181 if log_tdp >= -1e-9:
2186 def assign_faps(self, connection):
2192 connection.cursor().execute(
"""
2196 false_alarm_rate = (
2198 fap(coinc_event.likelihood)
2202 coinc_event.coinc_event_id == coinc_inspiral.coinc_event_id
2206 def assign_fars(self, connection):
2210 connection.cursor().execute(
"""
2216 far(coinc_event.likelihood)
2220 coinc_event.coinc_event_id == coinc_inspiral.coinc_event_id
2234 def gen_likelihood_control_doc(xmldoc, process, coinc_params_distributions, ranking_data, seglists, name = u"gstlal_inspiral_likelihood", comment = None):
2235 node = xmldoc.childNodes[-1]
2236 assert node.tagName == ligolw.LIGO_LW.tagName
2238 if coinc_params_distributions
is not None:
2239 coinc_params_distributions.process_id = process.process_id
2240 node.appendChild(coinc_params_distributions.to_xml(name))
2242 if ranking_data
is not None:
2243 ranking_data.process_id = process.process_id
2244 node.appendChild(ranking_data.to_xml(name))
2246 llwsegments = ligolw_segments.LigolwSegments(xmldoc)
2247 llwsegments.insert_from_segmentlistdict(seglists,
u"%s:segments" % name, comment = comment)
2248 llwsegments.finalize(process)
2253 def parse_likelihood_control_doc(xmldoc, name = u"gstlal_inspiral_likelihood"):
2254 coinc_params_distributions = ranking_data = process_id =
None
2256 coinc_params_distributions = ThincaCoincParamsDistributions.from_xml(xmldoc, name)
2260 process_id = coinc_params_distributions.process_id
2262 ranking_data = RankingData.from_xml(xmldoc, name)
2266 if process_id
is None:
2267 process_id = ranking_data.process_id
2268 if coinc_params_distributions
is None and ranking_data
is None:
2269 raise ValueError(
"document does not contain likelihood ratio data")
2270 seglists = ligolw_segments.segmenttable_get_by_name(xmldoc,
u"%s:segments" % name).coalesce()
2271 return coinc_params_distributions, ranking_data, seglists
2283 def get_live_time(seglistdict, verbose = False):
2284 livetime = float(abs(vote((segs
for instrument, segs
in seglistdict.items()
if instrument !=
"H2"), 2)))
2286 print >> sys.stderr,
"Livetime: %.3g s" % livetime
2290 def get_live_time_segs_from_search_summary_table(connection, program_name = "gstlal_inspiral"):
2291 xmldoc = dbtables.get_xml(connection)
2292 farsegs = ligolw_search_summary.segmentlistdict_fromsearchsummary(xmldoc, program_name).coalesce()