Package pylal :: Module grbsummary
[hide private]
[frames] | no frames]

Source Code for Module pylal.grbsummary

  1  from __future__ import division 
  2   
  3  import sys 
  4  itertools = __import__("itertools")  # absolute import of system-wide itertools 
  5   
  6  import numpy 
  7   
  8  from glue import iterutils 
  9  from glue import segments, segmentsUtils 
 10  from glue.ligolw import ligolw 
 11  from glue.ligolw import table 
 12  from glue.ligolw import lsctables 
 13  from glue.ligolw import utils 
 14  from glue.ligolw.utils import ligolw_add 
 15  from glue.ligolw.utils import search_summary as ligolw_search_summary 
 16  from lal import rate 
 17  from pylal.SnglInspiralUtils import SnglInspiralID_old 
 18   
 19  ############################################################################## 
 20  # Detector threshold function 
 21  ############################################################################## 
 22   
23 -def directional_horizon(ifos, RA, dec, gps_time, horizons=None):
24 """ 25 Return a dictionary of sensitivity numbers for each detector, based on 26 a known sky location and an optional input dictionary of inspiral horizon 27 distances for a reference source of the user's choice. 28 If the horizons dictionary is specified, the returned values are interpreted 29 as inspiral horizons in that direction. 30 """ 31 # Convert type if necessary 32 if type(gps_time)==int: gps_time=float(gps_time) 33 34 from pylal import antenna 35 36 # Sensitivies specifies relative SNRs of a reference signal (BNS) 37 if horizons is None: 38 horizons={} 39 for det in ifos: 40 horizons[det]=1.0 41 else: 42 assert len(ifos)==len(horizons) 43 44 resps={} 45 # Make a dictionary of average responses 46 for det in ifos: 47 resps[det]=antenna.response(gps_time,RA,dec,0,0,'radians',det)[3]*horizons[det] 48 49 return resps
50
51 -def detector_thresholds(horizons,min_threshold,max_threshold=7.5):
52 """ 53 Return a set of detector thresholds adjusted for a particular 54 set of inspiral horizon distances (calculated with directional_horizon). 55 The min_threshold specified the minimum threshold which will be set 56 for all detectors less sensitive than the best one. The most sensitive 57 detector will have its threshold adjusted upward to a maximum of max_threshold. 58 """ 59 assert min_threshold < max_threshold 60 threshs={} 61 worst_horizon=min(horizons.values()) 62 best_horizon=max(horizons.values()) 63 # Assuming that lowest threshold is in worst detector, return thresholds 64 for det in horizons.keys(): 65 if horizons[det]<best_horizon: 66 threshs[det]=min_threshold 67 else: 68 threshs[det]=min_threshold*(horizons[det]/worst_horizon) 69 if threshs[det]>max_threshold: threshs[det]=max_threshold 70 return threshs
71 72 ############################################################################## 73 # Convenience functions 74 ############################################################################## 75 sensitivity_dict = {"H1": 1, "L1": 2, "H2": 3, "V1": 4, "G1": 5}
76 -def sensitivity_cmp(ifo1, ifo2):
77 """ 78 Provide a comparison operator for IFOs such that they would get sorted 79 from most sensitive to least sensitive. 80 """ 81 return cmp(sensitivity_dict[ifo1], sensitivity_dict[ifo2])
82 83 ############################################################################## 84 # Segment-manipulation functions 85 ############################################################################## 86
87 -def compute_offsource_segment(analyzable, on_source, padding_time=0, 88 min_trials=None, max_trials=None, symmetric=True):
89 """ 90 Compute and return the maximal off-source segment subject to the 91 following constraints: 92 93 1) The off-source segment is constrained to lie within a segment from the 94 analyzable segment list and to contain the on_source segment. If 95 no such segment exists, return None. 96 2) The off-source segment length is a multiple of the on-source segment 97 length. This multiple (minus one for the on-source segment) is called 98 the number of trials. By default, the number of trials is bounded 99 only by the availability of analyzable time. 100 101 Optionally: 102 3) padding_time is subtracted from the analyzable segments, but added 103 back to the off-source segment. This represents time that is thrown 104 away as part of the filtering process. 105 4) max_trials caps the number of trials that the off-source segment 106 can contain. The truncation is performed so that the resulting 107 off-source segment is as symmetric as possible. 108 5) symmetric being True will simply truncate the off-source segment to 109 be the symmetric about the on-source segment. 110 """ 111 quantization_time = abs(on_source) 112 113 try: 114 super_seg = analyzable[analyzable.find(on_source)].contract(padding_time) 115 except ValueError: 116 return None 117 118 # check again after taking padding into account 119 if on_source not in super_seg: 120 return None 121 122 nplus = (super_seg[1] - on_source[1]) // quantization_time 123 nminus = (on_source[0] - super_seg[0]) // quantization_time 124 125 if (max_trials is not None) and (nplus + nminus > max_trials): 126 half_max = max_trials // 2 127 if nplus < half_max: 128 # left sticks out, so cut it 129 remainder = max_trials - nplus 130 nminus = min(remainder, nminus) 131 elif nminus < half_max: 132 # right sticks out, so cut it 133 remainder = max_trials - nminus 134 nplus = min(remainder, nplus) 135 else: 136 # both sides stick out, so cut as symmetrically as possible 137 nminus = half_max 138 nplus = max_trials - half_max # odd trial sticks out on right 139 140 if symmetric: 141 nplus = nminus = min(nplus, nminus) 142 143 if (min_trials is not None) and (nplus + nminus < min_trials): 144 return None 145 146 return segments.segment((on_source[0] - nminus*quantization_time - padding_time, 147 on_source[1] + nplus*quantization_time + padding_time))
148
149 -def multi_ifo_compute_offsource_segment(analyzable_dict, on_source, **kwargs):
150 """ 151 Return the off-source segment determined for multiple IFO times along with 152 the IFO combo that determined that segment. Calls 153 compute_offsource_segment as necessary, passing all kwargs as necessary. 154 """ 155 # sieve down to relevant segments and IFOs; sort IFOs by sensitivity 156 new_analyzable_dict = segments.segmentlistdict() 157 for ifo, seglist in analyzable_dict.iteritems(): 158 try: 159 ind = seglist.find(on_source) 160 except ValueError: 161 continue 162 new_analyzable_dict[ifo] = segments.segmentlist([seglist[ind]]) 163 analyzable_ifos = new_analyzable_dict.keys() 164 analyzable_ifos.sort(sensitivity_cmp) 165 166 # now try getting off-source segments; start trying with all IFOs, then 167 # work our way to smaller and smaller subsets; exclude single IFOs. 168 test_combos = itertools.chain( \ 169 *itertools.imap(lambda n: iterutils.choices(analyzable_ifos, n), 170 xrange(len(analyzable_ifos), 1, -1))) 171 172 off_source_segment = None 173 the_ifo_combo = [] 174 for ifo_combo in test_combos: 175 trial_seglist = new_analyzable_dict.intersection(ifo_combo) 176 temp_segment = compute_offsource_segment(trial_seglist, on_source, 177 **kwargs) 178 if temp_segment is not None: 179 off_source_segment = temp_segment 180 the_ifo_combo = list(ifo_combo) 181 the_ifo_combo.sort() 182 break 183 184 return off_source_segment, the_ifo_combo
185
186 -def get_segs_from_doc(doc):
187 """ 188 Return the segments from a document 189 @param doc: document containing the desired segments 190 """ 191 192 # get segments 193 seg_dict = ligolw_search_summary.segmentlistdict_fromsearchsummary(doc) 194 segs = seg_dict.union(seg_dict.iterkeys()).coalesce() 195 # typecast to ints, which are better behaved and faster than LIGOTimeGPS 196 segs = segments.segmentlist([segments.segment(int(seg[0]), int(seg[1]))\ 197 for seg in segs]) 198 199 return segs
200 201
202 -def get_exttrig_trials_from_docs(onsource_doc, offsource_doc, veto_files):
203 """ 204 Return a tuple of (off-source time bins, off-source veto mask, 205 index of trial that is on source). 206 The off-source veto mask is a one-dimensional boolean array where True 207 means vetoed. 208 @param onsource_doc: Document describing the on-source files 209 @param offsource_doc: Document describing the off-source files 210 @param veto_files: List of filenames containing vetoes 211 """ 212 213 # extract the segments 214 on_segs = get_segs_from_docs(onsource_doc) 215 off_segs = get_segs_from_docs(offsource_doc) 216 217 return get_exttrig_trials(on_segs, off_segs, veto_files)
218
219 -def get_exttrig_trials(on_segs, off_segs, veto_files):
220 """ 221 Return a tuple of (off-source time bins, off-source veto mask, 222 index of trial that is on source). 223 The off-source veto mask is a one-dimensional boolean array where True 224 means vetoed. 225 @param on_segs: On-source segments 226 @param off_segs: Off-source segments 227 @param veto_files: List of filenames containing vetoes 228 """ 229 230 # Check that offsource length is a multiple of the onsource segment length 231 trial_len = int(abs(on_segs)) 232 if abs(off_segs) % trial_len != 0: 233 raise ValueError, "The provided file's analysis segment is not "\ 234 "divisible by the fold time." 235 extent = (off_segs | on_segs).extent() 236 237 # generate bins for trials 238 num_trials = int(abs(extent)) // trial_len 239 trial_bins = rate.LinearBins(extent[0], extent[1], num_trials) 240 241 # incorporate veto file; in trial_veto_mask, True means vetoed. 242 trial_veto_mask = numpy.zeros(num_trials, dtype=numpy.bool8) 243 for veto_file in veto_files: 244 new_veto_segs = segmentsUtils.fromsegwizard(open(veto_file), 245 coltype=int) 246 if new_veto_segs.intersects(on_segs): 247 print >>sys.stderr, "warning: %s overlaps on-source segment" \ 248 % veto_file 249 trial_veto_mask |= rate.bins_spanned(trial_bins, new_veto_segs).astype(bool) 250 251 # identify onsource trial index 252 onsource_mask = rate.bins_spanned(trial_bins, on_segs).astype(bool) 253 if sum(onsource_mask) != 1: 254 raise ValueError, "on-source segment spans more or less than one trial" 255 onsource_ind = numpy.arange(len(onsource_mask))[onsource_mask] 256 257 return trial_bins, trial_veto_mask, onsource_ind
258
259 -def get_mean_mchirp(coinc):
260 """ 261 Return the arithmetic average of the mchirps of all triggers in coinc. 262 """ 263 return sum(t.mchirp for t in coinc) / coinc.numifos
264 265 266 ############################################################################## 267 # XML convenience code 268 ############################################################################## 269
270 -def load_external_triggers(filename):
271 doc = ligolw_add.ligolw_add(ligolw.Document(), [filename]) 272 return lsctables.ExtTriggersTable.get_table(doc)
273
274 -def write_rows(rows, table_type, filename):
275 """ 276 Create an empty LIGO_LW XML document, add a table of table_type, 277 insert the given rows, then write the document to a file. 278 """ 279 # prepare a new XML document 280 xmldoc = ligolw.Document() 281 xmldoc.appendChild(ligolw.LIGO_LW()) 282 tbl = lsctables.New(table_type) 283 xmldoc.childNodes[-1].appendChild(tbl) 284 285 # insert our rows 286 tbl.extend(rows) 287 288 # write out the document 289 utils.write_filename(xmldoc, filename)
290
291 -def load_cache(xmldoc, cache, sieve_pattern, exact_match=False, 292 verbose=False):
293 """ 294 Return a parsed and ligolw_added XML document from the files matched by 295 sieve_pattern in the given cache. 296 """ 297 subcache = cache.sieve(description=sieve_pattern, exact_match=exact_match) 298 found, missed = subcache.checkfilesexist() 299 if len(found) == 0: 300 print >>sys.stderr, "warning: no files found for pattern %s" \ 301 % sieve_pattern 302 303 # turn on event_id mangling 304 old_id = lsctables.SnglInspiralTable.next_id 305 lsctables.SnglInspiralTable.next_id = SnglInspiralID_old(0) 306 307 # reduce memory footprint at the expense of speed 308 # table.TableStream.RowBuilder = table.InterningRowBuilder 309 310 urls = [c.url for c in found] 311 try: 312 xmldoc = ligolw_add.ligolw_add(ligolw.Document(), urls, verbose=verbose) 313 except ligolw.ElementError: 314 # FIXME: backwards compatibility for int_8s SnglInspiralTable event_ids 315 lsctables.SnglInspiralTable.validcolumns["event_id"] = "int_8s" 316 lsctables.SnglInspiralID = int 317 xmldoc = ligolw_add.ligolw_add(ligolw.Document(), urls, verbose=verbose) 318 319 # turn off event_id mangling 320 lsctables.SnglInspiralTable.next_id = old_id 321 322 return xmldoc
323
324 -def get_num_slides(xmldoc):
325 """ 326 Return the value of --num-slides found in the process_params table of 327 xmldoc. If no such entry is found, return 0. 328 """ 329 # don't be too picky what program had --num-slides 330 for row in lsctables.ProcessParamsTable.get_table(xmldoc): 331 if row.param == "--num-slides": 332 return int(row.value) 333 return 0
334 ##################################################################################### 335 #timeslides functions# 336 ##################################################################################### 337
338 -def retrieve_ring_boundaries(xmldoc):
339 # 340 # grab the segment list for any instrument selected at random (they 341 # are all the same) 342 # 343 344 rings = ligolw_search_summary.segmentlistdict_fromsearchsummary(xmldoc, program = "thinca").popitem()[1] 345 346 # 347 # because the input often contains two thinca jobs the rings might 348 # be duplicated; use set() to uniqueify them then sort them. 349 # 350 351 rings = segments.segmentlist(set(rings)) 352 rings.sort() 353 354 # 355 # check that the (sorted) rings are non-intersecting 356 # 357 358 for i in range(len(rings) - 1): 359 if rings[i].intersects(rings[i + 1]): 360 raise ValueError, "non-disjoint thinca rings detected in search_summary table" 361 362 # 363 # cast to int to prevent explosions later 364 # 365 366 for i, ring in enumerate(rings): 367 rings[i] = segments.segment(int(ring[0]), int(ring[1])) 368 369 # 370 # done 371 # 372 return rings
373