Package pylal :: Module followup_page
[hide private]
[frames] | no frames]

Source Code for Module pylal.followup_page

  1  import sys, os, socket, re 
  2  import glob, math 
  3  from glue import cbcwebpage 
  4  from glue import lal 
  5  from glue import segments 
  6  from pylal import fu_utils 
  7   
  8  # 
  9  # useful functions 
 10  # 
 11   
12 -def parse_plot_cache_for_some_images(cache, basepath, tag='.*.png', brk=None):
13 out = [] 14 #FIXME make faster 15 for l in open(cache).readlines(): 16 if re.match(tag, l): 17 img = basepath + '/' + l 18 out.append((img.strip(), img.replace('.png', '_thumb.png').strip())) 19 if brk: return out[0] 20 return out
21 22
23 -def parse_plot_cache_for_image(cache, basepath, tag):
24 return parse_plot_cache_for_some_images(cache, basepath, tag='.*.png', brk=True)
25 26
27 -def parse_plot_cache_for_all_images(cache, basepath):
28 return parse_plot_cache_for_some_images(cache, basepath)
29 30
31 -def cache_parser(cachefile):
32 coinc = {} 33 f = open(cachefile) 34 out_cache = [] 35 for l in f.readlines(): 36 if "COINC_INFO" in l: 37 c = l.split() 38 coinc.setdefault(c[1].replace('COINC_INFO_',''),[]).append(c[4].replace('file://localhost','')) 39 else: out_cache.append(lal.CacheEntry(l)) 40 return coinc, out_cache
41
42 -class Coinc(object):
43 - def __init__(self, coinc, search, cache):
44 self.cache = cache 45 # set up some useful cache views 46 self.htqscan_cache = self.parse_cache_by_desc("WPIPELINE_FG_HT_"+search.upper()) 47 self.seismicqscan_cache = self.parse_cache_by_desc("WPIPELINE_FG_SEIS_RDS_"+search.upper()) 48 #(CVT) 49 self.rdsqscan_cache = self.parse_cache_by_desc("WPIPELINE_FG_RDS_"+search.upper()) 50 self.plotsnrchisq_cache = self.parse_cache_by_desc("PLOTSNRCHISQ_PIPE__"+search.upper()) 51 self.plotchia_cache = self.parse_cache_by_desc("PLOTCHIATIMESERIES__"+search.upper()) 52 self.skymap_cache = self.parse_cache_by_desc("PYLAL_PLOT_INSPIRAL_SKYMAP__"+search.upper()) 53 self.analyze_qscan_ht_cache = self.parse_cache_by_desc("ANALYSEQSCAN.PY_FG_HT_"+search.upper()) 54 self.analyze_qscan_rds_cache = self.parse_cache_by_desc("ANALYSEQSCAN.PY_FG_RDS_"+search.upper()) 55 self.analyze_qscan_seis_cache = self.parse_cache_by_desc("ANALYSEQSCAN.PY_FG_SEIS_RDS_"+search.upper()) 56 self.flag_cache = self.parse_cache_by_desc("FOLLOWUPQUERYDQ.PY__"+search.upper()) 57 self.veto_cache = self.parse_cache_by_desc("FOLLOWUPQUERYVETO.PY__"+search.upper()) 58 59 f = open(coinc) 60 line = f.readlines() 61 d = line[1].split() 62 self.dir = d[0] 63 self.rank = d[1] 64 self.cfar = d[2] 65 self.coincsnr = d[3] 66 self.ifos = d[4] 67 self.instruments = d[5] 68 self.coinctime = d[6] 69 self.coincmass = d[7] 70 self.time = {} 71 self.snr = {} 72 self.chisq = {} 73 self.mass1 = {} 74 self.mass2 = {} 75 for l in line[3:]: 76 #don't look at ifos not found in coincidence since the parameters are stolen from another ifo 77 d = l.split() 78 if d[1].strip() not in self.ifos: d[3:] = ["0" for i in d[3:]] 79 self.time[d[1]] = d[2] 80 self.snr[d[1]] = d[3] 81 self.chisq[d[1]] = d[4] 82 self.mass1[d[1]] = d[5] 83 self.mass2[d[1]] = d[6]
84
85 - def parse_cache_by_desc(self, tag, cache=None):
86 if cache is None: cache = self.cache 87 return [l for l in cache if tag in l.description]
88
89 - def parse_cache_by_time_and_ifo(self, time, ifo, cache=None):
90 if cache is None: cache = self.cache 91 return [l for l in cache if float(time) == float(l.segment[0]) and str(ifo) == str(l.observatory)]
92
93 - def write_param_table(self, page):
94 page.add_section("param", "Parameter table for %s" % (self.coinctime,)) 95 params = [["<b>RANK</b>", "<b>CFAR</b>", "<b>TIME</b>", "<b>SNR</b>", "<b>MASS</b>","<b>IFOS</b>","<b>INSTRUMENTS</b>"],[self.rank, self.cfar, self.coinctime, self.coincsnr, self.coincmass, self.ifos, self.instruments]] 96 page.sections["param"].add_table(params, title="Coinc Parameter Table", caption="Coinc parameters for the event", tag="coincparamtable") 97 98 params = [["<b>IFO</b>","<b>TIME</b>", "<b>SNR</b>", "<b>CHISQ</b>", "<b>MASS1</b>", "<b>MASS2</b>"]] 99 for ifo, data in self.time.items(): 100 params.append([ifo, self.time[ifo], self.snr[ifo], self.chisq[ifo], self.mass1[ifo], self.mass2[ifo]]) 101 page.sections["param"].add_table(params, title="Sngl Parameter Table", caption="Sngl parameters for the event", tag="snglparamtable")
102
103 - def add_htqscan(self, page):
104 self._add_qscan(page, self.htqscan_cache, self.analyze_qscan_ht_cache, "h(t)")
105
106 - def add_seismicqscan(self, page):
107 self._add_qscan(page, self.seismicqscan_cache, self.analyze_qscan_seis_cache, "SEISMIC")
108
109 - def add_rdsqscan(self, page):
110 self._add_qscan(page, self.rdsqscan_cache, self.analyze_qscan_rds_cache, "RDS")
111
112 - def _add_qscan(self, page, thiscache, ancache, name):
113 job_list = [] 114 for ifo, time in self.time.items(): 115 c = self.parse_cache_by_time_and_ifo(time, ifo, thiscache) 116 if not c: continue 117 else: job_list.append(c) 118 if not job_list: return # get out of here if these jobs were not found 119 120 page.add_section(name, "%s Qscan for %s" % (name, self.coinctime,)) 121 page.sections[name].div("This section gives the %s omega scans and plots that summarize the significance." % (name,)) 122 img_col = {} 123 # since qscans are already by default on web space, they are handled differently 124 for ifo, time in self.time.items(): 125 c = self.parse_cache_by_time_and_ifo(time, ifo, thiscache) 126 if not c: continue # get out of here if these jobs were not found 127 128 ca = self.parse_cache_by_time_and_ifo(time, ifo, ancache) 129 page.sections[name].add("<a href=%s>LINK TO %s QSCAN</a><br>" % (cbcwebpage.web_path_to_url(c[0].url.replace('file://localhost','')),ifo)) 130 confile = '%s/configuration.txt' % (c[0].path,) 131 try: qconf = fu_utils.omega_config_parser(confile) 132 except ValueError: 133 print >>sys.stderr, "File %s could not be parsed" % ( confile, ) 134 continue 135 plots = qconf.to_plot_tuple() 136 self._add_qscan_plots(page, plots, c, ca, img_col, ifo) 137 138 self._finish_qscan(page, img_col, name)
139
140 - def _add_qscan_plots(self, page, plots, c, ca, img_col, ifo):
141 # get the analyze qscan cache of images 142 cfile = [l.path for l in ca if l.path.endswith('.cache')][0] 143 for i,plot in enumerate(plots): 144 img_col.setdefault(plot[0],{}) 145 # first the qscans 146 thumb = plot[1].strip().replace(".png",".thumb.png") 147 pat = c[0].url.replace('file://localhost','')+'/' + plot[1] 148 img_glob = glob.glob(pat) 149 pat = c[0].url.replace('file://localhost','')+'/' + thumb 150 thumb_glob = glob.glob(pat) 151 # now for the analyze qscan stuff 152 basename = '/' + os.path.split(cfile)[0].lstrip('/') 153 analyze_images = parse_plot_cache_for_some_images(cfile, basename, '.*%s.*' % (plot[0].replace(':','_'),)) 154 if analyze_images: 155 img_glob.extend([im[0] for im in analyze_images]) 156 thumb_glob.extend([im[1] for im in analyze_images]) 157 for img, thmb in zip(img_glob, thumb_glob): 158 img = '/' + img.lstrip('/') 159 thmb = '/' + thmb.lstrip('/') 160 img_col[plot[0]].setdefault(ifo,[]).append(cbcwebpage._imagelinkcpy(img, thmb)())
161
162 - def _finish_qscan(self, page, img_col, type):
163 for name, plot in sorted(img_col.items()): 164 #FIXME terrible hack to make nice sections 165 if plot: 166 chan = name[:6] 167 try: page.sections[type].sections[chan] 168 except: page.sections[type].add_section(chan,chan) 169 page.sections[type].sections[chan].div('<br><hr><b>%s</b>' % (name,)) 170 for ifo, row in plot.items(): 171 title = '%s %s' % (ifo, name) 172 page.sections[type].sections[chan].add_table([row],title,title + " qscans and plots summarizing significance")
173 174
175 - def add_plotsnrchisq(self, page):
176 for ifo, time in self.time.items(): 177 # Parse plotting codes nearly useless "cache" file 178 c = self.parse_cache_by_time_and_ifo(time, ifo, self.plotsnrchisq_cache) 179 if not c: return # get out of here if these jobs were not found 180 181 page.add_section("plotsnrchisq", "SNR, Chisq and template time series for %s" % (self.coinctime,)) 182 img_row = [] 183 ifo_row = [] 184 table = [] 185 for ifo, time in self.time.items(): 186 # Parse plotting codes nearly useless "cache" file 187 c = self.parse_cache_by_time_and_ifo(time, ifo, self.plotsnrchisq_cache) 188 cfile = c[0].url.replace('file://localhost','') 189 path = '/' + os.path.split(cfile.rstrip('/').lstrip('/'))[0] 190 #clist = open(cfile).readlines() 191 plots = ['snr-','snr_zoom-', 'rsq-', 'rsq_zoom-', 'chisq-', 'chisq_zoom-', 'PSD-', 'fft_of_template_and_asd-', 'template-', 'white_template-'] 192 plot_list = [] 193 for plot in plots: 194 img, thumb = parse_plot_cache_for_image(cfile, path, plot) 195 plot_list.append(cbcwebpage._imagelinkcpy(img,thumb,plot)) 196 img_row.append(plot_list) 197 ifo_row.append(ifo) 198 table.append(ifo_row) 199 for row in zip(*img_row): table.append(row) 200 page.sections["plotsnrchisq"].add_table(table, "Plots of inspiral stuff", "Plots of snr, snrzoom, rsq, rsqzoom, chisq, chisqzoom, PSD, fft of templates and PSD, template and whitened template by ifo", tag="plotsnrchisq")
201
202 - def add_plotchia(self, page):
203 c = self.parse_cache_by_time_and_ifo(self.coinctime, self.instruments, self.plotchia_cache) 204 if not c: return # if the job didn't finish return 205 206 page.add_section("plotchia", "Coherent Code Plots for %s" % (self.coinctime,)) 207 img_row = [] 208 ifo_row = [] 209 table = [] 210 plot_list = [] 211 212 cfile = c[0].url.replace('file://localhost','') 213 path = '/' + os.path.split(cfile.rstrip('/').lstrip('/'))[0] 214 #try: clist = open(cfile).readlines() 215 #except: 216 # print >>sys.stderr, "couldn't find cachefile %s" % (cfile,) 217 # page.sections["plotchia"].add("<br><b>plot chia job did not finish correctly</b><br>") 218 # return 219 220 for num, plot in enumerate(parse_plot_cache_for_all_images(cfile, path)): 221 plot_list.append(cbcwebpage._imagelinkcpy(plot[0],plot[1],"chia"+str(num))) 222 # group by 3s 223 plot_list = [plot_list[i*3:i*3+3] for i in range(int(math.ceil(len(plot_list) / 3.)))] 224 page.sections["plotchia"].add_table(plot_list, "Plots of coherent inspiral stuff", "all of plotchiatimeseries output", tag="plotchia")
225
226 - def add_skymap(self,page):
227 c = self.parse_cache_by_time_and_ifo(self.coinctime, self.instruments, self.skymap_cache) 228 if not c: return # if the job didn't finish return 229 230 page.add_section("skymap", "Sky Map for %s" % (self.coinctime,)) 231 img_row = [] 232 ifo_row = [] 233 table = [] 234 plot_list = [] 235 236 c = self.parse_cache_by_time_and_ifo(self.coinctime, self.instruments, self.skymap_cache) 237 cfile = c[0].url.replace('file://localhost','') 238 path = '/' + os.path.split(cfile.rstrip('/').lstrip('/'))[0] 239 #try: clist = open(cfile).readlines() 240 #except: 241 # print >>sys.stderr, "couldn't find cachefile %s" % (cfile,) 242 # page.sections["skymap"].add("<br><b>skymap job did not finish correctly</b><br>") 243 # return 244 245 for num, plot in enumerate(parse_plot_cache_for_all_images(cfile, path)): 246 plot_list.append(cbcwebpage._imagelinkcpy(plot[0],plot[1],"skymap"+str(num))) 247 # group by 3s 248 plot_list = [plot_list[i*3:i*3+3] for i in range(int(math.ceil(len(plot_list) / 3.)))] 249 page.sections["skymap"].add_table(plot_list, "Sky map", "lalapps skymap plot", tag="plotchia")
250
251 - def add_checklist(self, page):
252 page.add_section("checklist", "Detection Checklist for %s" % (self.coinctime,)) 253 page.sections["checklist"].add("<a href=https://www.lsc-group.phys.uwm.edu/ligovirgo/cbcnote/followup_%s>DETECTION CHECKLIST FOR %s</a><br>" % (self.coinctime,self.coinctime)) 254 page.sections["checklist"].add('<i>NOTE IF PAGE DOES NOT EXIST CHOOSE "FUCheckListTemplate" FROM THE TEMPLATE SECTION<br>')
255
256 - def add_dq(self, page):
257 page.add_section("DQ", "Data Quality for %s" % (self.coinctime,)) 258 page.sections["DQ"].div("This section gives vetoes and flags that were on") 259 260 ca = self.parse_cache_by_time_and_ifo(self.coinctime, self.instruments, self.flag_cache) 261 if ca and os.path.isfile(ca[0].path): 262 tab, title = cbcwebpage.wiki_table_parse(ca[0].path) 263 #FIXME HACK, may stop working 264 for t in tab[0]: t[0] = t[0].replace('<rowbgcolor','</td></tr><tr bgcolor') + '<td>' + t[0] 265 page.sections["DQ"].add_table(tab[0], 'dq flags', 'dq flags: Yellow denotes before, red during and green after') 266 else: page.sections["DQ"].div("Job did not finish") 267 268 ca = self.parse_cache_by_time_and_ifo(self.coinctime, self.instruments, self.veto_cache) 269 if ca and os.path.isfile(ca[0].path): 270 tab, title = cbcwebpage.wiki_table_parse(ca[0].path) 271 #FIXME HACK, may stop working 272 for t in tab[0]: t[0] = t[0].replace('<rowbgcolor','</td></tr><tr bgcolor') + '<td>' + t[0] 273 page.sections["DQ"].add_table(tab[0], 'vetoes', 'vetoes: Yellow denotes before, red during and green after') 274 else: page.sections["DQ"].div("Job did not finish")
275