Package pylal :: Module pylal_exttrig_llutils
[hide private]
[frames] | no frames]

Source Code for Module pylal.pylal_exttrig_llutils

   1  #!/usr/bin/python 
   2   
   3  import os, sys, shutil, time 
   4  import pickle, glob 
   5  import subprocess, commands 
   6  import ConfigParser, optparse 
   7  import itertools 
   8  import urllib 
   9  from datetime import datetime 
  10   
  11  import numpy as np 
  12  import matplotlib 
  13  matplotlib.use('Agg') 
  14   
  15  from glue import segments 
  16  from glue import segmentsUtils 
  17  from glue.ligolw import lsctables 
  18  from glue.ligolw import ligolw 
  19  from glue.ligolw import table 
  20  from glue.ligolw import utils 
  21  from pylal.plotsegments import PlotSegmentsPlot 
  22  from pylal.grbsummary import multi_ifo_compute_offsource_segment as micos 
  23  from pylal import antenna 
  24  from pylal.xlal import date 
  25  from pylal import git_version 
  26   
  27  # the config parser to be used in some of the functions 
  28  cp = None 
  29  maindir = None 
  30   
  31  template_trigger_hipe = "./lalapps_trigger_hipe"\ 
  32    " --number-buffer-left 8 --number-buffer-right 8"\ 
  33    " --verbose --skip-datafind "\ 
  34    " --injection-config injectionsWI.ini" \ 
  35    " --user-tag onoff" 
  36   
  37  template_trigger_hipe_inj = "./lalapps_trigger_hipe"\ 
  38    " --number-buffer-left 8 --number-buffer-right 8" \ 
  39    " --verbose --skip-datafind "\ 
  40    " --user-tag inj "\ 
  41    " --overwrite-dir" 
  42   
  43  # list of used IFOs 
  44  basic_ifolist = ['H1','H2','L1','V1'] 
  45   
  46  # some predefinitions of colors and run times in S6 
  47  colors = itertools.cycle(['b', 'g', 'r', 'c', 'm', 'y']) 
  48   
  49  # specify the runtimes during S6; the end of S6B is adjusted so to consider 
  50  # GRB 100112 inside S6B 
  51  runtimes = {'A':[931035296,935798487],'B':[937800015,947347215],\ 
  52              'C':[949003215,961545615],'D':[961545615, 971654415] } 
  53   
  54   
  55  offset_gps_to_linux = 315964800 # see http://www.epochconverter.com/ for 6 Jan 1980 00:00:00 GPS = 000000000 
  56   
  57  ### template for generating webpages (probably outdated) 
  58  total_summary_prefix = """ 
  59  <body style="color: rgb(0, 0, 0); background-color: rgb(221, 255, 255);" alink="#000099" link="#000099" vlink="#990099"> 
  60   
  61  <h1>Summary of Gamma Ray Burst low-latency results during S6</h1> 
  62   
  63  <span style="font-weight: bold;"><br><br> 
  64  The following table contain a list of Gamma Ray Bursts occured during S6, with information about time, position on the sky, as well as duration and redshift (if available). This table has been automatically created by pylal_exttrig_llmonitor (in pylal_exttrig_llutils.py) to show a summary of the low-latency inspiral analysis of the GRBs during S6. A page describing this search can be found in the <a href="https://www.lsc-group.phys.uwm.edu/ligovirgo/cbcnote/S6Plan/090706044855TriggeredSearchLow_Latency_Exttrig_Search#preview">wiki</a>. The page containing Isabels list of GRB triggers can be found <a href="https://ldas-jobs.ligo.caltech.edu/~xpipeline/S6/grb/online/triggers/S6Agrbs_list.html">here</a> which might differ from this page. <br><br> 
  65   
  66  A detailed explanation of the terms, expressions and used colors can be found <a href="s6_exttrig_info.html">here</a>.<br> 
  67   
  68  Total number of GRB in this list: %d<br> 
  69  Number of GRB with data: %d <br> 
  70  Number of GRB without data: %d<br> 
  71  Number of long GRB: %d (with data %d)<br> 
  72  Number of short GRB: %d (with data %d)<br><br> 
  73  Number of completed GRB: %d (short: %d)<br> 
  74  Number of opened GRB: %d (short: %d)<br><br> 
  75   
  76  Date of last creation: %s<br><br> 
  77   
  78  </span><span style="font-weight: bold;"> 
  79  <br><br> 
  80  </div> 
  81  <table border="1" cellpadding="2" cellspacing="2"> 
  82    <tbody> 
  83    <td style="vertical-align: top; font-weight: bold; font-style: italic; color: rgb(51, 51, 255); background-color: rgb(255, 153, 0);">Nr</td> 
  84    <td style="vertical-align: top; font-weight: bold; font-style: italic; color: rgb(51, 51, 255); background-color: rgb(255, 153, 0);">GRB</td> 
  85    <td style="vertical-align: top; font-weight: bold; font-style: italic; color: rgb(51, 51, 255); background-color: rgb(255, 153, 0);">Status</td> 
  86    <td style="vertical-align: top; font-weight: bold; font-style: italic; color: rgb(51, 51, 255); background-color: rgb(255, 153, 0);">Tag</td> 
  87    <td style="vertical-align: top; font-weight: bold; font-style: italic; color: rgb(51, 51, 255); background-color: rgb(255, 153, 0);">GPS<br> 
  88    <td style="vertical-align: top; font-weight: bold; font-style: italic; color: rgb(51, 51, 255); background-color: rgb(255, 153, 0);">Date<br> 
  89    <td style="vertical-align: top; font-weight: bold; font-style: italic; color: rgb(51, 51, 255); background-color: rgb(255, 153, 0);">redshift<br> 
  90    <td style="vertical-align: top; font-weight: bold; font-style: italic; color: rgb(51, 51, 255); background-color: rgb(255, 153, 0);">duration<br> 
  91    <td style="vertical-align: top; font-weight: bold; font-style: italic; color: rgb(51, 51, 255); background-color: rgb(255, 153, 0);">Coord<br> 
  92    <td style="vertical-align: top; font-weight: bold; font-style: italic; color: rgb(51, 51, 255); background-color: rgb(255, 153, 0);">H1<br> 
  93    <td style="vertical-align: top; font-weight: bold; font-style: italic; color: rgb(51, 51, 255); background-color: rgb(255, 153, 0);">L1<br> 
  94    <td style="vertical-align: top; font-weight: bold; font-style: italic; color: rgb(51, 51, 255); background-color: rgb(255, 153, 0);">V1<br> 
  95    <td style="vertical-align: top; font-weight: bold; font-style: italic; color: rgb(51, 51, 255); background-color: rgb(255, 153, 0);">Sanity<br> 
  96    <td style="vertical-align: top; font-weight: bold; font-style: italic; color: rgb(51, 51, 255); background-color: rgb(255, 153, 0);">Result<br> 
  97    <td style="vertical-align: top; font-weight: bold; font-style: italic; color: rgb(51, 51, 255); background-color: rgb(255, 153, 0);">Box<br> 
  98  """#" 
  99   
 100  # ----------------------------------------------------- 
101 -def external_call(command):
102 """ 103 Makes an internal call to the shell (with the 104 current set environment), wait for completion 105 and returns the output and error of the command. 106 @param command: command to be executed internally 107 @return: a tuple (status, output, error) 108 """ 109 110 # open the command with the output directed to the pipe 111 112 p = subprocess.Popen(command, shell=True, \ 113 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 114 115 # wait for the command to complete, and get the output and 116 # error-text (if any) 117 out, err = p.communicate() 118 119 # gets the errorcode, if any. 0 means no error 120 errorcode = p.poll() 121 122 return errorcode, out, err
123 124 # -----------------------------------------------------
125 -def system_call(item, command, divert_output_to_log = True):
126 """ 127 Makes a system call. 128 @params item: a text specifying the content of the text 129 (e.g. number of the GRB the message is associated with) 130 (see also 'info') 131 @params command: the command to be executed on the bash 132 @params divert_output_to_log: If this flag is set to True the output of the 133 given command is automatically put into the log-file. 134 If the output of some command itself is further used, 135 like science segments, this flag must be set 136 to False, so that the output is diverted where it should go. 137 """ 138 l = logfile_name() 139 140 # put the command used into the log file 141 info(item, ">>> "+command) 142 143 # and the output (and error) of the command as well 144 if divert_output_to_log: 145 command_actual = command+' >>%s 2>>%s '%(l,l) 146 else: 147 command_actual = command +' 2>>%s '%l 148 149 # perform the command 150 code, out, err = external_call(command_actual) 151 152 if code>0 and len(err)>0: 153 info(item, "ERROR: " +err)
154 155 # -----------------------------------------------------
156 -def get_time():
157 """ 158 Returns the current time in human-readable format 159 """ 160 return time.asctime(time.gmtime())
161 162 # -----------------------------------------------------
163 -def get_gps_from_asc(date_string, time_string):
164 """ 165 Computes the correct GPS time from the date and time 166 as given in text strings. 167 @param date_string: date in a string format, i.e. 090717 168 @param time_string: time in a string format, i.e. 19:10:34 169 """ 170 171 # convert the date and times (as read from the trigger file) 172 # into tuples 173 a = time.strptime(date_string, "%y%m%d") 174 time_list = time_string.split('.') 175 b = time.strptime(time_list[0], "%H:%M:%S") 176 if len(time_list)==2: 177 nsecs = time_list[1] 178 nsecs += (9-len(nsecs))*'0' 179 nano_seconds = int(nsecs) 180 else: 181 nano_seconds = 0 182 183 # populate a datetime tuple 184 tm = datetime(a[0], a[1], a[2], b[3], b[4], b[5]).timetuple() 185 # and parse it, the last three entries populated as well, 186 # to the wrapped XLALUTCToGPS function. 187 gpstime = date.XLALUTCToGPS(tm) 188 189 return int(gpstime)
190 191 # -----------------------------------------------------
192 -def get_main_dir():
193 """ 194 Returns the main directory of the analysis from the 195 cp file. If that does not exist, returns the current directory. 196 """ 197 if cp is not None: 198 main_dir = cp.get('paths','main')+'/' 199 elif maindir is not None: 200 main_dir = maindir 201 else: 202 main_dir = './' 203 return main_dir
204 205 # -----------------------------------------------------
206 -def logfile_name():
207 """ 208 Returns the file of the logfile; used in 'info' and 'system_call' 209 """ 210 return get_main_dir()+'llmonitor.log'
211 212 # -----------------------------------------------------
213 -def info(item, text):
214 """ 215 Prints an info into the log-file. 216 @item: a text specifying the content of the text 217 (e.g. number of the GRB the message is associated with) 218 @text: the text to be logged 219 """ 220 msg = get_time() + ' ('+item+'): '+text 221 222 log_file = logfile_name() 223 logfile = open(log_file,'a') 224 logfile.write(msg+'\n') 225 logfile.close() 226 227 print msg
228 229 # -----------------------------------------------------
230 -def send_mail(subject, msg, email_addresses = None, extra_addresses = None):
231 """ 232 Function to send an email to a certain adress 233 @param subject: Subject line of the email 234 @param msg: Message of the email 235 @param email_addresses: list of email addresses to which the mail is sent 236 @param extra_addresses: Extra adresses to which to send the email 237 """ 238 239 # Adjust messages and subjects automatically 240 message = 'Automatic notification from pylal_exttrig_llmonitor at time '+\ 241 get_time()+'\n\n'+subject+'\n'+msg 242 subject = cp.get('notifications','head') + ': '+subject 243 244 # open file for detailed output message 245 tmp_file = '.llmonitor.email' 246 f = file(tmp_file,'w') 247 f.write(message) 248 f.close() 249 250 # select the recipients 251 if not email_addresses: 252 email_addresses = cp.get('notifications','email').replace(',',' ').split() 253 254 if extra_addresses: 255 email_addresses.extend(extra_addresses) 256 257 # send the message to all recipients 258 for address in email_addresses: 259 command = "mail -s '%s' %s < %s" % (subject, address, tmp_file) 260 system_call('email',command)
261 262 # -----------------------------------------------------
263 -def notify(grb, dag, message):
264 """ 265 Makes an email notification to all recipients listed 266 in the config file. 267 @param grb: grb dictionary for obtaining some informations 268 @param message: the message of the notification 269 """ 270 271 # construct the subject of the email 272 subject = 'Status changed for DAG GRB%s: %s' %\ 273 (grb.name, message) 274 275 # construct the message for the email 276 email_msg = 'Automatic notification from pylal_exttrig_llutils at time %s\n\n'%\ 277 get_time() 278 email_msg += subject+'\n' 279 email_msg += 'The analysis dir is %s\n' % grb.analysis_dir 280 email_msg += ' and the dagfils is %s\n' % dag.get_outname() 281 282 # send the email to all recipients 283 send_mail(subject, email_msg) 284 285 # and note it in the log-file 286 info("email"," Email notification sent with the following content: "+\ 287 email_msg.replace('\n','\n '))
288 289 290 # --------------------------------------
291 -def get_lockname():
292 """ 293 Returns the name of the lock file 294 """ 295 return get_main_dir()+'.llmonitor.lock'
296 297 # --------------------------------------
298 -def check_lock():
299 """ 300 Checks if another instance of this code is running. 301 See http://code.activestate.com/recipes/546512/ 302 """ 303 lockname = get_lockname() 304 if os.path.exists(lockname): 305 pid=open(lockname, 'r').read().strip() 306 pidRunning=commands.getoutput('ls /proc | grep %s' % pid) 307 if pidRunning: 308 return pid 309 else: 310 return None 311 312 return None
313 314 # --------------------------------------
315 -def set_lock():
316 """ 317 Sets the lock file and writes the PID of this process 318 """ 319 f = open(get_lockname(),'w') 320 f.write(str(os.getpid())) 321 f.close()
322 323 # --------------------------------------
324 -def del_lock():
325 """ 326 Removes the lock file 327 """ 328 if os.path.exists(get_lockname()): 329 os.remove(get_lockname()) 330 info('monitor','Program exit normally')
331 332 # --------------------------------------
333 -def get_dag_part(ini_file):
334 """ 335 Gets the dag-name from the ini file. 336 This might be non-robust, therefore it is 337 coded as a complete function which can be changes easily. 338 @param ini_file: the name of the ini-file 339 @param return: the common part of any dag name 340 """ 341 dag_part = ini_file.split('.')[0] 342 return dag_part
343 344 # --------------------------------------
345 -def check_file(filename):
346 """ 347 Check the existance of a file and that it is non-zero in size 348 (which is useful for segment files...) 349 @param filename: name of the file to check 350 @return: True or False 351 """ 352 353 # check the existance 354 if not os.path.exists(filename): 355 return False 356 357 # check the size 358 size = os.path.getsize(filename) 359 if size==0: 360 return False 361 else: 362 return True
363 364 # -----------------------------------------------------
365 -def get_minimum_scienceseg_length(cp):
366 """ 367 Calculate the minimum science segment that 368 can be used with the data given in the actual ini-file. 369 The procedure below is taken from trigger_hipe. 370 @params cp: the config parser instance 371 """ 372 373 # the following is just a copy-and-paste from trigger_hipe 374 paddata = int(cp.get('data', 'pad-data')) 375 if cp.has_option('data', 'segment-length'): 376 n = int(cp.get('data', 'segment-length')) 377 s = int(cp.get('data', 'number-of-segments')) 378 r = int(cp.get('data', 'sample-rate')) 379 o = int(cp.get('inspiral', 'segment-overlap')) 380 length = ( n * s - ( s - 1 ) * o ) / r 381 overlap = o / r 382 elif cp.has_option('data','block-duration'): 383 length = int(cp.get('data','block-duration')) 384 overlap = int(cp.get('data','segment-duration'))/2 385 else: 386 raise ValueError, "Cannot find segment information in [data] section of ini file." 387 388 minsciseg = length + 2 * paddata 389 390 # returns the result 391 return minsciseg
392 393 # -----------------------------------------------------
394 -def convert_segxml_to_segtxt(segxmlfile, segtxtfile):
395 """ 396 Converts a segment xml file into a segment text file for convenience. 397 """ 398 # try to open the file 399 try: 400 doc = utils.load_filename(segxmlfile) 401 except: 402 raise IOError, "Error reading file %s" % segxmlfile 403 404 # extract the segment list 405 segs = table.get_table(doc, "segment") 406 seglist = segments.segmentlist(segments.segment(s.start_time, s.end_time) for s in segs) 407 408 # and store it to a file 409 segmentsUtils.tosegwizard(file(segtxtfile, 'w'), seglist, header = True)
410 411 # --------------------------------------
412 -def read_xmlsegfile(xmlsegfile):
413 """ 414 Function to read a segment list from a xml segment file 415 """ 416 # open the file as document and extract the segments 417 doc = utils.load_filename(xmlsegfile) 418 segs = table.get_table(doc, "segment") 419 vetolist = segments.segmentlist(segments.segment(s.start_time, s.end_time) for s in segs) 420 return vetolist
421 422 # --------------------------------------
423 -def update_segment_lists(segdict, timerange, tag = None, outputdir = '.'):
424 """ 425 Function to download the latest segment lists. 426 @param segdict: the names of the segment for each IFO 427 @param timerange: the timerange the segments should cover 428 @param tag: optional parameter indicating the tag (e.g. 'grb090802') 429 @param outputdir: optional parameter indicating the output directory. 430 """ 431 432 # add an underscore in front of the tag 433 if tag is not None: 434 tag = '_'+tag 435 else: 436 tag = '' 437 438 # loop over each IFO and the associated segment 439 for ifo, seg in segdict.iteritems(): 440 441 # create the filenames 442 segxmlfile = "%s/segments%s%s.xml" % (outputdir, ifo, tag) 443 segtxtfile = "%s/%s-science%s.txt" % (outputdir, ifo, tag) 444 445 if not check_file(segxmlfile): 446 447 cmd = "ligolw_segment_query --database --query-segments --include-segments '%s'"\ 448 " --gps-start-time %d --gps-end-time %d > %s" %\ 449 (seg, timerange[0], timerange[1], segxmlfile) 450 451 pas = AnalysisSingleton() 452 pas.system(cmd, item = tag[4:], divert_output_to_log = False) 453 454 # 'convert' the data from the xml format to a convenient format 455 convert_segxml_to_segtxt(segxmlfile, segtxtfile)
456 457 458 # -----------------------------------------------------
459 -def update_veto_lists(veto_definer, timerange, path = '.', tag = None):
460 """ 461 Function to update the veto files for a given time range 462 @veto_definer: Veto definer file to use 463 @timerange: Time range to download the vetoes for 464 @path: Output path for the segment files [optional] 465 @param tag: Tag for the output files [optional] 466 """ 467 468 # add an underscore in front of the tag 469 if tag is not None: 470 tag = '_'+tag 471 else: 472 tag = '' 473 474 # prepare the call to get the veto-lists from the database 475 pas = AnalysisSingleton() 476 cmd = "ligolw_segments_from_cats --database --veto-file=%s --separate-categories "\ 477 "--gps-start-time %d --gps-end-time %d --output-dir=%s --individual-results"\ 478 % (veto_definer, timerange[0], timerange[1], path) 479 pas.system(cmd, tag[4:]) 480 481 482 # Rename the veto files for easier handling 483 veto_files = glob.glob('%s/*VETOTIME_CAT*%d*xml'% (path, timerange[0])) 484 for filename in veto_files: 485 # rename the xml file 486 p = filename.split('-') 487 newname = "%s-%s%s.xml"%(p[0], p[1], tag) 488 shutil.move(filename, newname)
489 490 # -----------------------------------------------------
491 -def get_veto_overlaps(segment, xmlsegfile):
492 """ 493 Returns all vetoes from the file 'xmlsegfile' that overlap with the given 'segment' 494 """ 495 496 # converts the segment into a segments-segment 497 testseg = segments.segment(segment) 498 499 # prepare the list of vetoes 500 list_vetoes = [] 501 502 # load the content of the veto-file 503 xmldoc = utils.load_filename(xmlsegfile, gz = False) 504 segs = lsctables.SegmentTable.get_table(xmldoc) 505 segdefs = lsctables.SegmentDefTable.get_table(xmldoc) 506 507 # create a mapping between the segments and their definitions 508 defdict = {} 509 for segdef in segdefs: 510 defdict[segdef.segment_def_id] = segdef.name 511 512 # loop over each segment 513 for seg in segs: 514 515 # need to convert to segment first ... 516 s = segments.segment(seg.start_time, seg.end_time) 517 518 # Make a printout if there is an overlap with a veto and the 519 # given segment (e.g. onsource segment) 520 if testseg.intersects(s): 521 id = seg.segment_def_id 522 list_vetoes.append([defdict[id], seg.start_time, seg.end_time]) 523 524 return list_vetoes
525 526 # -----------------------------------------------------
527 -def check_veto_time(used_ifos, list_cat, timerange, path = '.', tag = None):
528 """ 529 Function to check if the given timerange overlaps with some CAT veto 530 @param used_ifos: A list of used ifos for which SCIENCE data is available 531 @param list_cat: A list of numbers, specifying the categories that should be checked for 532 @param timerange: The range of time that should be checked 533 @param path: Output path for the segment files [optional] 534 @param tag: Tag for the output files [optional] 535 """ 536 537 pas = AnalysisSingleton() 538 539 # add an underscore in front of the tag 540 if tag is not None: 541 tag = '_'+tag 542 else: 543 tag = '' 544 545 # Loops over each IFO and check if the onsource overlaps a veto 546 clear_ifos = [] 547 for ifo in used_ifos: 548 549 # loop over all the CATs 550 vetoed_ifos = set() 551 vetoed_cats = set() 552 for cat in list_cat: 553 554 # create the filename 555 xmlsegfile = "%s/%s-VETOTIME_CAT%d%s.xml" % \ 556 (path, ifo, cat, tag) 557 vetolist = read_xmlsegfile(xmlsegfile) 558 vetolist.coalesce() 559 560 # check for overlaps, and give detailed list of veto details 561 list_overlaps = get_veto_overlaps(timerange, xmlsegfile) 562 for name, segstart, segend in list_overlaps: 563 pas.info(" - IFO %s vetoed from %d to %d by CAT%d: %s"%\ 564 (ifo, segstart, segend, cat, name), tag[4:]) 565 if vetolist.intersects_segment(segments.segment(timerange)): 566 vetoed_ifos.add(ifo) 567 vetoed_cats.add(cat) 568 569 # Check if the detector is being vetoed 570 if len(vetoed_ifos)==0: 571 clear_ifos.append(ifo) 572 else: 573 pas.info("IFO(s) %s vetoed by CAT(s): %s" %\ 574 (list(vetoed_ifos), list(vetoed_cats)), tag[4:]) 575 576 577 return clear_ifos
578 579 # -----------------------------------------------------
580 -def get_segment_info(pas,timerange, minsciseg, plot_segments_file = None, path = '.', tag = None, segs1 = False):
581 """ 582 Function to get the segment info for a timerange 583 @param timerange: The range of time the SCIENCE segments should be checked 584 @param minsciseg: The minimum time length (in seconds) for an analyzable consecutive segment 585 @param plot_segments_file: Name of the output name for the plot [optional] 586 @param path: Output path for the segment files (NOT the image) [optional] 587 @param tag: Tag for the files [optional] 588 @param segscat1: CAT1 veto times to be considered when finding the best segment [optional] 589 """ 590 pas = AnalysisSingleton() 591 592 # add an underscore in front of the tag 593 if tag is not None: 594 tag = '_'+tag 595 else: 596 tag = '' 597 598 # prepare a segment dict 599 segdict = segments.segmentlistdict() 600 601 # get the segment dicts, check ALL ifos 602 for ifo in basic_ifolist: 603 if not pas.cp.has_option('segments','%s-segments'%ifo.lower()): 604 continue 605 ifo_segfile = '%s/%s-science%s.txt' % (path, ifo, tag) 606 if ifo_segfile is not None: 607 tmplist = segmentsUtils.fromsegwizard(open(ifo_segfile)) 608 segdict[ifo] = segments.segmentlist([s for s in tmplist \ 609 if abs(s) > minsciseg]) 610 611 # in case a CAT1 veto needs to be applied beforehand 612 # (i.e. before the data availability check), do it here 613 if segs1: 614 615 # create the filename 616 xmlsegfile = "%s/%s-VETOTIME_CAT1%s.xml" % \ 617 (path, ifo, tag) 618 vetoes1 = read_xmlsegfile(xmlsegfile) 619 vetoes1.coalesce() 620 621 # check for overlaps, and give detailed list of veto details 622 list_overlaps = get_veto_overlaps(timerange, xmlsegfile) 623 for name, segstart, segend in list_overlaps: 624 pas.info(" - CAT1 preveto for IFO %s, vetoed from %d to %d: %s"%\ 625 (ifo, segstart, segend, name), tag[4:]) 626 627 # 'subtract' the CAT1 vetoes from the SCIENCE segments 628 segdict[ifo] -= vetoes1 629 630 ifolist = segdict.keys() 631 ifolist.sort() 632 633 # create the onsource segment 634 onSourceSegment = segments.segment(timerange[0], timerange[1]) 635 636 # convert string in integer 637 pas = AnalysisSingleton() 638 padding_time = int(pas.cp.get('exttrig','padding_time')) 639 num_trials = int(pas.cp.get('exttrig','num_trials')) 640 symmetric = False 641 offSourceSegment, grb_ifolist = micos(segdict, onSourceSegment,\ 642 padding_time = padding_time, max_trials = num_trials,\ 643 min_trials = num_trials, symmetric = symmetric) 644 645 grb_ifolist.sort() 646 ifo_times = "".join(grb_ifolist) 647 648 # make a plot of the segments if required 649 if plot_segments_file: 650 plot_segment_info(segdict, onSourceSegment, offSourceSegment, timerange[1]-1, plot_segments_file) 651 652 # return the essential data 653 return offSourceSegment, grb_ifolist, ifo_times
654 655 # -----------------------------------------------------
656 -def plot_segment_info(segdict, onsource, offsource, centertime, output_filename, plot_offset = 1000, tag = ''):
657 """ 658 Function to plot the segments around a 'centertime' 659 @param segdict: dictionary containing the segments of the science data 660 @param onsource: the onsource segment 661 @param offsource: the offsource segment 662 @param centertime: the time at which the origin is set 663 @param output_filename: output filename of the plot 664 @param plot_offet: additional times (in second) on either side of the range 665 @param tag: full tag denoting e.g. the GRB (with the underscore before) 666 """ 667 668 # get some basic information 669 pas = AnalysisSingleton() 670 num_trials = int(pas.cp.get('exttrig','num_trials')) 671 672 # calculate the times 673 length_off_source = num_trials*(abs(onsource)) 674 plot_offSourceSegment = segments.segment(onsource[0] - length_off_source, 675 onsource[1] + length_off_source) 676 677 effective_window = segments.segmentlist([plot_offSourceSegment]).\ 678 protract(plot_offset) 679 effective_segdict = segdict.map(lambda sl: sl & effective_window) 680 681 # create the plot 682 plot = PlotSegmentsPlot(centertime) 683 plot.add_contents(effective_segdict) 684 if offsource: 685 plot.set_window(offsource, plot_offset) 686 plot.highlight_segment(onsource) 687 plot.finalize() 688 plot.ax.set_title('Segments for GRB '+tag[4:]) 689 plot.savefig(output_filename) 690 plot.close()
691 692 693 # -----------------------------------------------------
694 -def get_available_ifos(trigger, minscilength, path = '.', tag = '', useold = False, offset = 2000, onsource = None):
695 """ 696 Function for a full scale check how many IFOs are available for a given time 697 Requires a CP to be set with the following fileds: 698 'data','science_segment_[IFO]' 699 'data','veto_definer' 700 'analysis','onsource_left' 701 'analysis','onsource_right' 702 """ 703 704 trend_ifos = [] 705 706 # get the Pylal Analysis Singleton 707 pas = AnalysisSingleton() 708 709 # make a cross check 710 trial_length = int(pas.cp.get('exttrig','onsource_left')) + int(pas.cp.get('exttrig','onsource_right')) 711 padding_time = int(pas.cp.get('exttrig','padding_time')) 712 num_trials = int(pas.cp.get('exttrig','num_trials')) 713 check_scilength = (num_trials+1)*trial_length + 2*padding_time 714 715 if minscilength != check_scilength: 716 raise AssertionError, "Inconsistent science length requirement! "\ 717 "Actual requirement is %d seconds, while num_trials=%d suggest %d seconds."%\ 718 (minscilength, num_trials, check_scilength) 719 720 # get the science segment specifier from the config file 721 seg_names = {} 722 for ifo in basic_ifolist: 723 if pas.cp.has_option('segments','%s-segments'%ifo.lower()): 724 seg_names[ifo] = pas.cp.get('segments','%s-segments'%ifo.lower()) 725 726 # update the science segments around the trigger time 727 timerange = [ trigger - offset, trigger + offset] 728 update_segment_lists(seg_names, timerange, tag = tag, outputdir = path) 729 730 # check if enough data is available 731 if onsource is None: 732 onsource = [trigger - int(pas.cp.get('exttrig','onsource_left')), \ 733 trigger + int(pas.cp.get('exttrig','onsource_right'))] 734 offsource, ifolist, ifotimes = get_segment_info(pas,onsource, minscilength, tag = tag, path = path) 735 trend_ifos.append(ifolist) 736 737 # check the vetoes if there is enough data 738 if len(ifolist)>1: 739 740 # define some time ranges 741 deltat = 500 742 starttime = offsource[0]-deltat 743 endtime = offsource[1]+deltat 744 duration = endtime-starttime 745 746 # check if these files are available 747 avail = True 748 for ifo in ifolist: 749 for cat in [1,2,3]: 750 xmlsegfile = "%s/%s-VETOTIME_CAT%d_%s.xml" % (path, ifo, cat, tag) 751 if not os.path.exists(xmlsegfile): avail = False 752 753 # update the veto list if required or if files are missing 754 if not useold or not avail: 755 veto_definer_file_url = pas.cp.get('exttrig','cvs_veto_definer') 756 veto_definer_file,headers = urllib.urlretrieve(veto_definer_file_url,os.path.basename(veto_definer_file_url)) 757 update_veto_lists(veto_definer_file, [starttime, endtime], \ 758 tag = tag, path = path) 759 760 761 # read all CAT1 veto lists into a dictionary 762 segsdict = {} 763 for ifo in ifolist: 764 xmlsegfile = "%s/%s-VETOTIME_CAT1_%s.xml" % (path, ifo,tag) 765 segsdict[ifo] = read_xmlsegfile(xmlsegfile) 766 segsdict[ifo].coalesce() 767 768 # do the segment check again, including the CAT1 segs 769 outname = 'plot_segments_%s.png' % tag 770 offsource, ifolist, ifotimes = get_segment_info(pas,onsource, minscilength, plot_segments_file=outname, \ 771 segs1 = True, tag = tag, path = path) 772 trend_ifos.append(ifolist) 773 774 # check any CAT2/3 interference with the onsource 775 new_ifos = check_veto_time(ifolist, [2,3], onsource, tag = tag, path = path) 776 nifos = "".join(new_ifos) 777 trend_ifos.append(new_ifos) 778 779 # return the list of available IFOs and the offsource segment 780 return new_ifos, onsource, offsource, trend_ifos 781 782 else: 783 return ifolist, onsource, offsource, trend_ifos
784 785 786 787 # -----------------------------------------------------
788 -def read_adjusted_onsource(filename):
789 """ 790 Reads the adjusted onsource times for GRBs inspected manually. 791 Uses the simple file format 792 """ 793 794 grbs = {} 795 refdict = {} 796 # loop over the lines of the file 797 for linex in file(filename): 798 799 # take out the \n and split up the line 800 line = linex.replace('\n','') 801 w = line.split() 802 803 # reject any inline comments or empty lines 804 if len(linex)<3 or linex[0]=='#': 805 806 # fill the reference dict if this happens to be a reference entry 807 if 'REF' in linex: 808 refdict[int(w[2])] = w[3] 809 continue 810 811 # read the information 812 name = w[0] 813 try: 814 start = int(w[1]) 815 end = int(w[2]) 816 used = True 817 except: 818 used = False 819 820 comment = " ".join(w[3:]) 821 822 if used: 823 grbs[name] = {'onsource':[start, end], 'used':used,\ 824 'comment':comment} 825 else: 826 grbs[name] = {'onsource':None, 'used':used, 'comment':comment} 827 828 # return the list of checks and the reference dict 829 return grbs, refdict
830 831 832 833 # -----------------------------------------------------
834 -def read_adjusted_onsource_long(filename):
835 """ 836 Reads the adjusted onsource times for GRBs inspected manually. 837 Uses the Jordi-type of file 838 """ 839 840 grbs = {} 841 # loop over the lines of the file 842 for line in file(filename): 843 w = line.split() 844 if len(w)<7: 845 continue 846 847 # read the information 848 name = w[1] 849 try: 850 number = int(name[:6]) 851 except: 852 continue 853 854 try: 855 start = int(w[3]) 856 end = int(w[5]) 857 used = True 858 except: 859 used = False 860 861 862 comment = line[40:].replace('\n','').replace('|','').strip() 863 864 if used: 865 grbs[name] = {'onsource':[start, end], 'used':used,\ 866 'comment':comment} 867 else: 868 grbs[name] = {'onsource':None, 'used':used, 'comment':comment} 869 870 return grbs
871 872 # -----------------------------------------------------
873 -def parse_trigger_list(trigger_file, processed = [], max_number = None, specific_name = None):
874 """ 875 This function parses the GRB list provided by Isabel 876 and returns a list of new GRBs 877 @param trigger_file: The name of the trigger file to parse 878 @param processed: List of triggers already processed [optional] 879 @param max_number: Returns at maximum this number of new triggers 880 @param specific_name: Will return a list with only the trigger information 881 for this specific item, if it is found [optional] 882 """ 883 884 # prepare the list of new triggers 885 counter = 0 886 new_triggers = {'name':[], 'ra':[], 'de':[], 'box':[],'gps':[],\ 887 'duration':[], 'sat':[]} 888 889 # open the file 890 for line in file(trigger_file): 891 892 # leave out any empty or commented line 893 if len(line)==0 or line[0]=="#": 894 continue 895 896 # check if we have reached the maximum number of GRBs 897 # to start in this round 898 if max_number: 899 if counter>=max_number: 900 break 901 902 # extract the useful information 903 w = line.split() 904 905 # get the name of the trigger 906 grb_name = w[0] 907 908 # skip if this GRB already had been processed 909 if grb_name in processed: 910 continue 911 912 # check if only a certain GRB should be processed 913 if specific_name: 914 if grb_name!=specific_name: 915 continue 916 917 # we found a new GRB!! 918 919 # check out the duration 920 try: 921 grb_duration = float(w[8]) 922 except: 923 grb_duration = None 924 925 # checkout the error box 926 try: 927 errorbox = float(w[4]) 928 except: 929 errorbox = None 930 931 # convert the time to GPS 932 grb_time = w[6] 933 grb_date = grb_name[:6] 934 grb_gps_time = get_gps_from_asc(grb_date, grb_time) 935 936 # store temporary in a dictionary 937 new_triggers['name'].append(grb_name) 938 new_triggers['ra'].append(float(w[1])) 939 new_triggers['de'].append(float(w[2])) 940 new_triggers['box'].append(errorbox) 941 new_triggers['gps'].append(grb_gps_time) 942 new_triggers['duration'].append(grb_duration) 943 new_triggers['sat'].append(w[10]) 944 counter += 1 945 946 return new_triggers
947 948 # --------------------------------------
949 -def get_empty_exttrig_row():
950 """ 951 Returns an empty exttrig row 952 @return: empty exttrig table row 953 """ 954 row = lsctables.ExtTriggersTable() 955 956 row.process_id = None 957 row.det_alts = None 958 row.det_band = None 959 row.det_fluence = None 960 row.det_fluence_int = None 961 row.det_name = None 962 row.det_peak = None 963 row.det_peak_int = None 964 row.det_snr = '' 965 row.email_time = 0 966 row.event_dec = 0.0 967 row.event_dec_err = 0.0 968 row.event_epoch = '' 969 row.event_err_type = '' 970 row.event_ra = 0.0 971 row.event_ra_err = 0.0 972 row.start_time = 0 973 row.start_time_ns = 0 974 row.event_type = '' 975 row.event_z = 0.0 976 row.event_z_err = 0.0 977 row.notice_comments = '' 978 row.notice_id = '' 979 row.notice_sequence = '' 980 row.notice_time = 0 981 row.notice_type = '' 982 row.notice_url = '' 983 row.obs_fov_dec = 0.0 984 row.obs_fov_dec_width = 0.0 985 row.obs_fov_ra = 0.0 986 row.obs_fov_ra_width = 0.0 987 row.obs_loc_ele = 0.0 988 row.obs_loc_lat = 0.0 989 row.obs_loc_long = 0.0 990 row.ligo_fave_lho = 0.0 991 row.ligo_fave_llo = 0.0 992 row.ligo_delay = 0.0 993 row.event_number_gcn = 0 994 row.event_number_grb = '' 995 row.event_status = 0 996 return row
997 998 # --------------------------------------
999 -def get_monitor_filename():
1000 """ 1001 Returns the name of the monitor pickle filename 1002 @return: name of the monitor pickle file 1003 """ 1004 return get_main_dir()+'llmonitor.pickle'
1005 1006 # --------------------------------------
1007 -def read_monitor_list():
1008 """ 1009 Opens the monitor pickle file (usually llmonitor.pickle) 1010 and return its contents. 1011 @return: list of GRB instances from the pickle file 1012 """ 1013 1014 monitor_file = get_monitor_filename() 1015 try: 1016 monitor_list = pickle.load(file(monitor_file)) 1017 except IOError: 1018 # create an empty file if it does not exist 1019 monitor_list = [] 1020 pickle.dump(monitor_list, file(monitor_file,'w')) 1021 return monitor_list
1022 1023 # --------------------------------------
1024 -def write_monitor_list(monitor_list):
1025 """ 1026 Writes the monitor list to file 1027 @param monitor_list: list to be written to file 1028 """ 1029 monitor_file = get_monitor_filename() 1030 pickle.dump(monitor_list, file(monitor_file,'w'))
1031 1032 # --------------------------------------
1033 -def read_grb_from_list(grb_name):
1034 """ 1035 Returns the object associated with the given GRB. 1036 @params grb_name: name of the GRB without the leading 'GRB' 1037 """ 1038 grb_list = read_monitor_list() 1039 for grb in grb_list: 1040 if grb.name==grb_name: 1041 return grb 1042 return None
1043 1044 # --------------------------------------
1045 -def copy_exttrig_nofications():
1046 """ 1047 Copying all relevant files to the working directory, 1048 usually from CIT from Isabels pwd 1049 """ 1050 alert_loc = cp.get('alerts','alert_location') 1051 main_loc = cp.get('paths','main') 1052 cmd = 'scp %s %s >> ~/cp.log 2>&1' % (alert_loc, main_loc) 1053 system_call('monitor', cmd)
1054 1055 # --------------------------------------
1056 -def update_durations(monitor_list):
1057 """ 1058 Reads the local copy of the parsed circular and 1059 updated any duration information in the monitor_list structure 1060 @params monitor_list: list of all GRBs and DAGs 1061 """ 1062 # name of the circular file 1063 circular_file = cp.get('paths','main')+'/'+cp.get('alerts','circular_file') 1064 1065 # Read all durations from the circular file 1066 dict_duration = {} 1067 for line in file(circular_file): 1068 parts = line.split() 1069 grb_name = parts[2] 1070 duration = float(parts[13]) 1071 1072 # store the duration. A value of zero means it is unknown 1073 if duration>0: 1074 dict_duration[grb_name]=duration 1075 1076 # Read the list of all processed GRBs 1077 for grb in monitor_list: 1078 # update the duration information when available 1079 if grb.name in dict_duration: 1080 grb.duration = dict_duration[grb.name]
1081 1082 # --------------------------------------
1083 -def obtain_results(grb):
1084 """ 1085 Obtain the result, i.e the smallest p(c|0) 1086 @param grb: the grb stucture with all the infos in it 1087 """ 1088 1089 tag = grb.code['onoff'].tag 1090 path_to_result = '%s/GRB%s/postprocessing_%s/OPENBOX/llsummary_onoff_GRB%s.pickle' %\ 1091 (grb.analysis_dir, grb.name, tag, grb.name) 1092 if os.path.exists(path_to_result): 1093 data = pickle.load(file(path_to_result)) 1094 else: 1095 info(grb.name, "OPENBOX results file %s does not exist! "\ 1096 "Maybe this is a rerun and the --force-rerun option have been forgotten? "%path_to_result) 1097 return -1 1098 1099 min_prob = 2.0 1100 for coinc in data: 1101 if 'prob' in coinc: 1102 p = coinc['prob'] 1103 if p<min_prob: 1104 min_prob = p 1105 1106 return min_prob
1107 1108 # --------------------------------------
1109 -def generate_summary(publish_path, publish_url):
1110 """ 1111 Generating summary page, with all sanity and/or openbox results 1112 properly linked. 1113 @param publish_path: Main path to where to copy the results and files 1114 @param publish_url: The url identifier of the same path 1115 """ 1116 1117 def add(table, text): 1118 return table + '<td>' +str(text)+'</td>'
1119 1120 def add_linked_value(table, value, ref): 1121 if value>0: 1122 if ref>0: 1123 table = add(table, '<a href="http://gcn.gsfc.nasa.gov/gcn3/%d.gcn3">%.2f</a>' % (ref, value)) 1124 else: 1125 table = add(table, '%.2f' % value) 1126 else: 1127 table = add(table, '&mdash') 1128 return table 1129 1130 def create_col(l): 1131 f = 1.0 1132 if colsign==1: 1133 f = 0.95 1134 return '%d, %d, %d'%(f*l[0], f*l[1], f*l[2]) 1135 1136 1137 colsign = -1 1138 # define the colors to use cyan, red, gray, dark-yellowish 1139 coldict = {'analong':[153, 255, 255],'anashort':[255,200,200],'nolong':[100,150,150],'noshort':[130,130,70]} 1140 1141 # Read the list of all processed GRBs 1142 monitor_list = read_monitor_list() 1143 1144 short_grb_duration = float(cp.get('analysis','max-duration')) 1145 1146 # get some statistics 1147 number_short = number_long = number_data = number_nodata = number_long_data = number_short_data = 0 1148 number_complete_all = number_complete_short = number_opened_all = number_opened_short = 0 1149 for grb in monitor_list: 1150 if grb.has_data: 1151 number_data +=1 1152 if grb.duration and grb.duration<short_grb_duration: 1153 number_short += 1 1154 number_short_data +=1 1155 if grb.dag['inj'].status == 5: 1156 number_complete_short += 1 1157 if grb.openbox: 1158 number_opened_short += 1 1159 1160 else: 1161 number_long +=1 1162 number_long_data += 1 1163 1164 if grb.dag['onoff'].status==5: 1165 number_complete_all += 1 1166 if grb.openbox: 1167 number_opened_all += 1 1168 1169 else: 1170 number_nodata +=1 1171 if grb.duration and grb.duration<short_grb_duration: 1172 number_short += 1 1173 else: 1174 number_long +=1 1175 1176 1177 1178 # Bring them in timely order 1179 time_unsort = [grb.time for grb in monitor_list] 1180 index = np.argsort(time_unsort) 1181 num_grb = len(time_unsort) 1182 1183 table = total_summary_prefix % ( len(monitor_list), number_data, number_nodata, number_long, \ 1184 number_long_data, number_short, number_short_data, \ 1185 number_complete_all, number_complete_short, \ 1186 number_opened_all, number_opened_short, get_time()) 1187 1188 # Loop over all GRBs in reverse order 1189 for number, i in enumerate(index[::-1]): 1190 1191 grb = monitor_list[i] 1192 1193 # make the table background coloring 1194 if grb.duration and grb.duration<short_grb_duration: 1195 if grb.has_data: 1196 coldef = create_col(coldict['anashort']) 1197 else: 1198 coldef = create_col(coldict['noshort']) 1199 else: 1200 if grb.has_data: 1201 coldef = create_col(coldict['analong']) 1202 else: 1203 coldef = create_col(coldict['nolong']) 1204 1205 colsign = -colsign 1206 table += '<tr style="background-color: rgb(%s);">' % coldef 1207 1208 # check if the GRB has some data at all 1209 if grb.has_data: 1210 status_onoff = grb.dag['onoff'].get_status() 1211 status_inj = grb.dag['inj'].get_status() 1212 else: 1213 status_onoff = status_inj = 0 1214 ifos = "".join(grb.ifolist) 1215 1216 # put the table together 1217 table = add(table, num_grb- number) 1218 table = add(table, '<a href="http://grblog.org/grblog.php?view=burst&GRB=%s">%s</a>'%(grb.name, grb.name)) 1219 status_msg = grb.get_html_status() 1220 table = add(table, status_msg['onoff']+'<br>'+status_msg['inj']) 1221 try: 1222 tag_onoff = grb.code['onoff'].get_tag() 1223 except: 1224 tag_onoff = 'None' 1225 try: 1226 tag_lik = grb.code['inj'].get_tag() 1227 except: 1228 tag_lik = 'None' 1229 table = add(table, tag_onoff+'<br>'+tag_lik) 1230 table = add(table, grb.time) 1231 tm = date.XLALGPSToUTC(LIGOTimeGPS(grb.time)) 1232 asctime = time.strftime("%d %b %Y\n%H:%M:%S",tm) 1233 table = add(table, asctime) 1234 table = add_linked_value(table, grb.redshift, None ) 1235 table = add_linked_value(table, grb.duration, None) 1236 table = add(table, '%.2f<br>%.2f' % (grb.ra, grb.de)) 1237 for ifo in basic_ifolist: 1238 segplot_link = 'GRB%s/plot_segments_grb%s.png'%(grb.name, grb.name) 1239 1240 if ifo in grb.ifos: 1241 txt = '<b>%.2f</b>'%grb.qvalues[ifo] 1242 else: 1243 txt = '%.2f'%grb.qvalues[ifo] 1244 table = add(table, '<a href="%s">%s</a>'%(segplot_link, txt)) 1245 1246 if status_onoff==5: 1247 1248 #Add link to sanity pages 1249 htmlfile = publish_url+'/GRB%s/pylal_exttrig_llsummary_%s-sanity.html' % (grb.name, grb.name) 1250 htmlfile_inj = publish_url+'/GRB%s/pylal_exttrig_llsummary_%s-sanity_inj.html' % (grb.name, grb.name) 1251 if status_inj==5: 1252 table = add(table, '<a href="%s">onoff</a><br> <a href="%s">inj</a> '%(htmlfile, htmlfile_inj)) 1253 else: 1254 table = add(table, '<a href="%s">onoff</a><br> &mdash '%htmlfile) 1255 1256 # Add link to box 1257 if grb.openbox: 1258 # add result 1259 result = obtain_results(grb) 1260 if result<2: 1261 table = add(table, '%.2f'%result) 1262 else: 1263 table = add(table, 'no cand.') 1264 1265 # and link to the openbox details 1266 htmlfile = publish_url+'/GRB%s/OPENBOX/pylal_exttrig_llsummary_%s-OPENBOX.html' % \ 1267 (grb.name, grb.name) 1268 htmlfile_inj = publish_url+'/GRB%s/OPENBOX/%s-pylal_exttrig_llsummary_GRB%s_inj-%s.html' %\ 1269 (grb.name, ifos, grb.name, grb.get_time_string()) 1270 if status_inj==5: 1271 table = add(table, '<a href="%s">onoff</a><br> <a href="%s">lik</a> '%(htmlfile, htmlfile_inj)) 1272 else: 1273 table = add(table, '<a href="%s">onoff</a><br> &mdash '%htmlfile) 1274 1275 else: 1276 # box closed otherwise 1277 table = add(table,'box closed') 1278 table = add(table,'box closed') 1279 else: 1280 # no data available if analysis not finished 1281 table = add(table, '&mdash') 1282 table = add(table, '&mdash') 1283 table = add(table, '&mdash') 1284 1285 table +='</tr>' 1286 1287 # write out the complete html file 1288 filename = publish_path +'/total_summary.html' 1289 f = open(filename,'w') 1290 f.write(table) 1291 f.close() 1292 1293 1294 # -----------------------------------------------------
1295 -def get_code_tag():
1296 """ 1297 Returns the name of the tag currently stored in an environment variable 1298 @return: name of the tag 1299 """ 1300 # get the tag information (which is the actual, most current tag) 1301 tag = os.getenv('LAL_PYLAL_TAG') 1302 if not tag: 1303 del_lock() 1304 raise EnvironmentError, "Environment variable LAL_PYLAL_TAG is missing, which contains the "\ 1305 "tag of the code used, e.g. s6_exttrig_100119b. This should have been set in the "\ 1306 "lscsource script, called within runmonitor. Please check" 1307 return tag
1308 1309 # -----------------------------------------------------
1310 -def get_env(name, required = False):
1311 1312 # get the content of the variable 1313 content = os.getenv(name) 1314 1315 # is it required? 1316 if required and not content: 1317 raise ValueError, "Environment variable '%s' needs to be set!"%name 1318 1319 return content
1320 1321 # ----------------------------------------------------- 1322 # -----------------------------------------------------
1323 -class Singleton(object):
1324 - def __new__(cls, *args, **kw):
1325 if not hasattr(cls, '_instance'): 1326 orig = super(Singleton, cls) 1327 cls._instance = orig.__new__(cls, *args, **kw) 1328 return cls._instance
1329 1330 # ----------------------------------------------------- 1331 # -----------------------------------------------------
1332 -class AnalysisSingleton(Singleton):
1333 1334 1335 # -----------------------------------------------------
1336 - def __init__(self):
1337 if not hasattr(self, 'init'): 1338 self.read_basic_setup()
1339 1340 # -----------------------------------------------------
1341 - def read_basic_setup(self):
1342 self.init = True 1343 1344 import socket 1345 1346 # set all the basic things 1347 self.hostname = socket.gethostname() 1348 self.publishing_path = get_env('USER_PUB') 1349 self.publishing_url = get_env('USER_URL') 1350 self.cvs = get_env('USER_CVS') 1351 self.condor_log_path = get_env('USER_LOG') 1352 self.email = get_env('USER_EMAIL')
1353 1354 1355 # -----------------------------------------------------
1356 - def set_cp(self, cp):
1357 self.cp = cp
1358 1359 # -----------------------------------------------------
1360 - def get_cp(self):
1361 return self.cp
1362 1363 # -----------------------------------------------------
1364 - def set_logfile(self, logfile):
1365 self.logfile = logfile
1366 1367 # -----------------------------------------------------
1368 - def get_logfile(self):
1369 return self.logfile
1370 1371 # -----------------------------------------------------
1372 - def info(self, text, item = None, onscreen = True):
1373 """ 1374 Puts some information onto the logfile 1375 @params text: The information to be put out. 1376 @params item: optional text specifying the context of the text (e.g. GRB name). 1377 @params onscreen: optional flag to have the message printed to stdout as well. 1378 """ 1379 1380 if item is None: 1381 msg = get_time() + ': '+text 1382 else: 1383 msg = get_time() + ' ('+item+'): '+text 1384 1385 1386 logfile = open(self.logfile,'a') 1387 logfile.write(msg+'\n') 1388 logfile.close() 1389 1390 # write the message out on the screen as well 1391 if onscreen: 1392 print msg
1393 1394 # -----------------------------------------------------
1395 - def system(self, cmd, item = None, divert_output_to_log = True):
1396 """ 1397 Makes a system call. 1398 @params command: the command to be executed on the bash 1399 @params item: a text specifying the content of the text 1400 (e.g. number of the GRB the message is associated with) 1401 (see also 'info') 1402 @params divert_output_to_log: If this flag is set to True the output of the 1403 given command is automatically put into the log-file. 1404 If the output of some command itself is further used, 1405 like science segments, this flag must be set 1406 to False, so that the output is diverted where it should go. 1407 """ 1408 1409 # put the command used into the log file 1410 self.info(">>> "+cmd, item) 1411 1412 # and the output (and error) of the command as well 1413 if divert_output_to_log: 1414 command = cmd+' >>%s 2>>%s '%(self.logfile, self.logfile) 1415 else: 1416 command = cmd +' 2>>%s '%self.logfile 1417 1418 # perform the command 1419 code, out, err = external_call(command) 1420 1421 # check if there was an error 1422 if code>0 and len(err)>0: 1423 info("ERROR: " +err, item)
1424 1425 # --------------------------------------
1426 - def get_lockname(self):
1427 """ 1428 Returns the name of the lock file 1429 """ 1430 return os.path.expanduser('~/.llmonitor.lock')
1431 1432 # --------------------------------------
1433 - def check_lock(self):
1434 """ 1435 Checks if another instance of this code is running. 1436 See http://code.activestate.com/recipes/546512/ 1437 """ 1438 lockname = self.get_lockname() 1439 if os.path.exists(lockname): 1440 pid=open(lockname, 'r').read().strip() 1441 pidRunning=commands.getoutput('ls /proc | grep %s' % pid) 1442 if pidRunning: 1443 return pid 1444 else: 1445 return None 1446 1447 return None
1448 1449 # --------------------------------------
1450 - def set_lock(self):
1451 """ 1452 Sets the lock file and writes the PID of this process 1453 """ 1454 if self.check_lock() is not None: 1455 print "ERROR: Program seems to be running" 1456 sys.exit(0) 1457 1458 f = file(self.get_lockname(),'w') 1459 f.write(str(os.getpid())) 1460 f.close()
1461 1462 # --------------------------------------
1463 - def del_lock(self):
1464 """ 1465 Removes the lock file 1466 """ 1467 if os.path.exists(self.get_lockname()): 1468 os.remove(self.get_lockname()) 1469 self.info('Program exit normally', 'monitor')
1470 1471 1472 1473 1474 # ----------------------------------------------------- 1475 # -----------------------------------------------------
1476 -class CodeTagger(object):
1477
1478 - def __init__(self):
1479 # Consider the case there is no tag (i.e. for testing purposes) 1480 # then take the branch name (like s6_exttrig) 1481 if git_version.tag: 1482 self.tag = git_version.tag 1483 else: 1484 self.tag = git_version.branch 1485 self.verbose = git_version.verbose_msg 1486 self.id = git_version.id 1487 self.status = git_version.status 1488 1489 # make a consistency check 1490 self.consistency_check()
1491
1492 - def consistency_check(self):
1493 """ 1494 Makes a consistency check of the tag used 1495 """ 1496 tag_env = get_code_tag() 1497 if tag_env != self.tag: 1498 print "WARNING: The tag from git_version is %s"\ 1499 " while the tag from LAL_PYLAL_TAG is %s."\ 1500 " Will use the latter one."%(self.tag, tag_env) 1501 1502 self.tag = tag_env
1503
1504 - def get_tag(self):
1505 if self.tag: return self.tag 1506 else: return 'None'
1507 1508 # ----------------------------------------------------- 1509 # ----------------------------------------------------- 1510 # -----------------------------------------------------
1511 -class AnalysisDag(object):
1512 """ 1513 Class to hold and handle an analysis DAG and all 1514 related information. 1515 """ 1516 1517 # -----------------------------------------------------
1518 - def __init__(self, name, type, analysis_dir):
1519 """ 1520 Initializing this class with all the needed information 1521 @param name: name of the GRB 1522 @param type: what dag is this? onoff/inj 1523 #@param stage: stage of the dag, like uberdag or ligolwdag 1524 #@param inifile: inifile for this DAG 1525 #@param injfile: injection file for this DAG 1526 @param analysis_dir: path to the analysis directory 1527 """ 1528 1529 # store the input data 1530 self.name = name 1531 self.type = type 1532 self.analysis_dir = analysis_dir 1533 1534 self.dagname = None 1535 1536 self.status = 0 1537 self.status_dict = {1:'inspiral',2:'ligolw',3:'postproc'} 1538 1539 self.code_list = []
1540 1541 # --------------------------------------
1542 - def set_dagname(self, name):
1543 """ 1544 Sets the current name of the DAG 1545 @param name: name of the .dag file 1546 """ 1547 self.dagname = name
1548 1549 # --------------------------------------
1550 - def get_outname(self):
1551 """ 1552 Returns the outname of this DAG 1553 """ 1554 return self.dagname+'.dagman.out'
1555 1556 # --------------------------------------
1557 - def get_dagname(self):
1558 """ 1559 Returns the name of the DAG file 1560 """ 1561 return self.dagname
1562 1563 # --------------------------------------
1564 - def get_shname(self):
1565 """ 1566 Returns the name of the sh file 1567 """ 1568 basename = self.dagname[:-4] 1569 return basename+'.sh'
1570 1571 # --------------------------------------
1572 - def start(self):
1573 """ 1574 Start this DAG 1575 """ 1576 1577 # create the call to start the DAG 1578 dir = os.path.dirname(self.get_dagname()) 1579 dagname = os.path.basename(self.get_dagname()) 1580 cmd = 'export _CONDOR_DAGMAN_LOG_ON_NFS_IS_ERROR=FALSE;' 1581 cmd += 'cd %s;' % dir 1582 cmd += 'condor_submit_dag %s' % dagname 1583 system_call(self.name, cmd) 1584 1585 # change the status 1586 self.status = 1 1587 1588 # lets get condor some time... 1589 time.sleep(10)
1590 1591 # --------------------------------------
1592 - def get_status(self):
1593 return self.status
1594 1595 # --------------------------------------
1596 - def set_status(self, new_status):
1597 if new_status<=0: 1598 del_lock() 1599 raise ValueError, "The DAG Status variable can only be set to positive values" 1600 1601 self.status = new_status
1602 1603 # --------------------------------------
1604 - def get_stage_name(self):
1605 """ 1606 Returns the name of the stage or error of the current DAG. 1607 """ 1608 status_dict = {1:'inspiral',2:'ligolw',3:'postproc'} 1609 # check the status number and choose the text 1610 text = '' 1611 if self.status==0: 1612 text = 'Not started' 1613 elif self.status==5: 1614 text = 'Complete' 1615 elif self.status==-6: 1616 text = 'DAGFILE ERROR' 1617 else: 1618 text = status_dict[abs(self.status)] 1619 if self.status<0: 1620 text += "ERROR" 1621 1622 return text
1623 1624 # --------------------------------------
1625 - def check_status(self, grb):
1626 """ 1627 Updating the status for this DAG, 1628 and return the fstat value 1629 """ 1630 1631 # try to open the dagman.out file 1632 fstat = 0 1633 try: 1634 # read the last line only 1635 line = file(self.get_outname()).readlines()[-1] 1636 # check if the status is 0 for completed DAG 1637 # status can be 1 (error) or 2 (user delete) otherwise (and more) 1638 if "EXITING WITH STATUS" in line: 1639 if "EXITING WITH STATUS 0" in line: 1640 fstat = 1 1641 else: 1642 fstat = -1 1643 except IOError: 1644 fstat = -2 1645 1646 # change the status if the DAG was running before 1647 if self.status>0: 1648 if fstat<0: 1649 # set the status to error 1650 self.status = -self.status 1651 1652 if fstat == -1: 1653 notify(grb, self, 'DAG exited on error') 1654 elif fstat==-2: 1655 notify(grb, self, 'DAG file vanished!?') 1656 # change the status to NON-error if everything is ok 1657 if fstat>=0 and self.status<0: 1658 self.status = -self.status 1659 1660 return fstat
1661 1662 1663 1664 1665 1666 1667 # ----------------------------------------------------- 1668 # -----------------------------------------------------
1669 -class GRB(object):
1670 """ 1671 Class holding all the infos for a GRB and for setting up 1672 a new analysis DAG. 1673 """ 1674 1675 # -----------------------------------------------------
1676 - def __init__(self, grb_name=None, grb_ra=None, grb_de=None, grb_time=None, errorbox = None, sat = None):
1677 """ 1678 Initializes the GRB class with a basic set of information 1679 @param grb_name: the name of the GRB without the term 'GRB' (e.g.: 070201) 1680 @param grb_ra: right ascension of this GRB given in degrees 1681 @param grb_de: declination of this GRB given in degrees 1682 @param grb_time: GPS trigger time of the GRB 1683 @param errorbox: size of the errorbox as stated in Isabel's list 1684 @param sat: Name of the satellite providing those data 1685 """ 1686 self.name = grb_name # just the number, i.e. 091023C 1687 self.ra = float(grb_ra) 1688 self.de = float(grb_de) 1689 self.time = int(grb_time) 1690 1691 # additional GRB infos 1692 self.ifos = '' 1693 self.errorbox = errorbox 1694 self.duration = None 1695 self.redshift = None 1696 self.sat = sat 1697 self.starttime = None 1698 self.endtime = None 1699 self.openbox = False 1700 self.openbox_fap = None 1701 1702 # prepare the DAG instances 1703 self.dag = {'onoff':None, 'inj':None} 1704 1705 # prepare the code tag handling instances 1706 self.code = {'inspiral':None, 'onoff':None, 'inj':None} 1707 1708 # prepare variables for later use 1709 self.qvalues = {} 1710 self.offsource_segment = None 1711 self.onsource_segment = None 1712 self.ifolist = [] 1713 1714 # create the PAS instance 1715 self.pas = AnalysisSingleton() 1716 self.cp = self.pas.get_cp() 1717 1718 # datafind variables 1719 self.use_offline_data = True 1720 #self.type_online = {'H1':self.cp.get('data','channel_online_H1'), 'L1':self.cp.get('data','channel_online_L1'), 'V1':self.cp.get('data','channel_online_V1')} 1721 self.type_online = None 1722 self.type_offline = {'H1':'H1_'+self.cp.get('input','ligo-type'), 'L1':'L1_'+self.cp.get('input','ligo-type'), 'V1':self.cp.get('input','virgo-type')}
1723 1724 1725 # -----------------------------------------------------
1726 - def set_paths(self, input_dir=None, main_dir=None,\ 1727 ini_file = None, inj_file = None,\ 1728 config_file = None, \ 1729 condor_log_path = None, log_file=None):
1730 """ 1731 Set paths for this GRB, like the path to the main directory, and the analysis directory 1732 @param input_dir: path to the CVS directory 1733 @param main_dir: main directory for the whole online analysis 1734 @param ini_file: the name of the ini-file for the inspiral analysis 1735 @param inj-file: name of the ini-file for the injections 1736 @param config_file: name of the config file used 1737 @param condor_log_path: path to the condor log path 1738 @param log_file: file of the llmonitor log file (usually llmonitor.log) 1739 """ 1740 self.input_dir = input_dir 1741 self.main_dir = main_dir 1742 self.inifile = ini_file 1743 self.injfile = inj_file 1744 self.condor_log_path = condor_log_path 1745 self.log_file = log_file 1746 self.config_file = config_file 1747 1748 # construct the GRB specific analysis directory 1749 self.analysis_dir = self.main_dir+'/GRB'+self.name
1750 1751 # -----------------------------------------------------
1752 - def get_pylal_dir(self):
1753 return os.getenv('PYLAL_LOCATION')
1754 1755 # -----------------------------------------------------
1756 - def get_lalapps_dir(self):
1757 return os.getenv('LALAPPS_LOCATION')
1758 1759 # -----------------------------------------------------
1760 - def get_glue_dir(self):
1761 return os.getenv('GLUE_LOCATION')
1762 1763 # -----------------------------------------------------
1764 - def set_addresses(self, addresses):
1765 """ 1766 Set adresses for the online notification 1767 @addresses: list of email addresses to use 1768 """ 1769 self.addresses = addresses
1770 1771 # -----------------------------------------------------
1772 - def get_basic_dagname(self):
1773 """ 1774 Construction of the dagname 1775 @return: name of the dagfile without the '.dag' 1776 """ 1777 return self.analysis_dir+'/'+self.inifile[:-4]
1778 1779 # -----------------------------------------------------
1780 - def get_time_string(self):
1781 """ 1782 Returns the standard suffix used for plots and html files 1783 containing the GPS starttime and the length of the processed data. 1784 Example: 935460888-51000 1785 @return: standard GPS time suffix for file naming 1786 """ 1787 timestring = str(self.starttime)+'-'+str(self.endtime-self.starttime) 1788 return timestring
1789 1790 # ----------------------------------------------------- 1807 1808 # -----------------------------------------------------
1809 - def create_exttrig_xml_file(self):
1810 """ 1811 Creates an exttrig xml file with the basic informations 1812 of the GRB in it. 1813 Data given in strings or as float 1814 """ 1815 1816 # 1817 # First, create a dummy XML file with the informations in it 1818 # 1819 1820 # prepare a new XML document 1821 xmldoc = ligolw.Document() 1822 xmldoc.appendChild(ligolw.LIGO_LW()) 1823 tbl = lsctables.New(lsctables.ExtTriggersTable) 1824 xmldoc.childNodes[-1].appendChild(tbl) 1825 1826 # set the values we need 1827 row = get_empty_exttrig_row() 1828 row.event_ra = float(self.ra) 1829 row.event_dec = float(self.de) 1830 row.start_time = int(self.time) 1831 row.event_number_gcn = 9999 1832 row.event_number_grb = self.name 1833 1834 # insert into the table 1835 tbl.extend([row]) 1836 1837 # write out the trigger file 1838 self.trigger_file = 'grb%s.xml' % self.name 1839 utils.write_filename(xmldoc, self.trigger_file)
1840 1841 # -----------------------------------------------------
1842 - def create_call_datafind_online(self, starttime, endtime, ifo, output_location):
1843 """ 1844 Creates a call to the function 'lalapps_online_datafind' to find 1845 the data. To be used only for data from the last 2 weeks... 1846 """ 1847 executable = self.get_lalapps_dir()+'/bin/lalapps_online_datafind' 1848 1849 cmd = "%s --ifo %s --gps-start-time %d --gps-end-time %d --output %s" % \ 1850 (executable, ifo, starttime, endtime, output_location) 1851 return cmd
1852 1853 1854 # -----------------------------------------------------
1855 - def create_call_datafind_offline(self,starttime, endtime, ifo, output_location):
1856 """ 1857 Creates a call to the function 'ligo_data_find' to find 1858 the data after more than ~2 weeks. Don't ask me... 1859 """ 1860 executable = self.get_glue_dir()+'/bin/ligo_data_find --url-type file --lal-cache' 1861 1862 cmd = "%s --type %s --observatory %s --gps-start-time %d --gps-end-time %d > %s" %\ 1863 (executable, self.type_offline[ifo], ifo[0].upper(), starttime, endtime, output_location) 1864 return cmd
1865 1866 # -----------------------------------------------------
1867 - def run_datafind(self):
1868 """ 1869 Run the datafind command to find the data 1870 """ 1871 # create the datafind directory 1872 cache_dir = "%s/GRB%s/datafind/cache" % (self.analysis_dir, self.name) 1873 cmd = 'mkdir -p '+cache_dir 1874 system_call(self.name, cmd) 1875 1876 # get the start and end-time 1877 starttime = self.offsource_segment[0] 1878 endtime = self.offsource_segment[1] 1879 1880 # and run the datafind command for each IFO, putting 1881 # the cache files directly into them 1882 for ifo in basic_ifolist: 1883 1884 # create common cache-file names 1885 output_location = '%s/%s-DATA-%10d-%10d.cache' % (cache_dir, ifo[0].upper(), starttime, endtime) 1886 1887 # decide: should I use online data (deleted after a month or so) 1888 # or do I require offline data? That changes everything... 1889 if self.use_offline_data: 1890 cmd = self.create_call_datafind_offline(starttime, endtime, ifo, output_location) 1891 else: 1892 cmd = self.create_call_datafind_online(starttime, endtime, ifo, output_location) 1893 1894 system_call(self.name, cmd, False)
1895 1896 # -----------------------------------------------------
1897 - def check_data_to_use(self):
1898 """ 1899 Checking the difference between now and the requested data 1900 to indentify if we can use online data or have to use 1901 offline data 1902 """ 1903 1904 # get the start time 1905 starttime = self.offsource_segment[0] 1906 1907 # calculate the difference and test 1908 timediff = time.time() - offset_gps_to_linux - starttime 1909 self.use_offline_data = False 1910 if timediff>1000000: 1911 self.use_offline_data = True
1912 1913 # -----------------------------------------------------
1914 - def update_inifile(self, list_replacements):
1915 """ 1916 Function to conveniently replace some of the parameters 1917 in the ini-file by specific values. 1918 """ 1919 1920 # read the config ini-file 1921 config_file = '%s/%s' % (self.analysis_dir, self.inifile) 1922 pc = ConfigParser.ConfigParser() 1923 pc.read(config_file) 1924 1925 # Replacement loop 1926 for replacement in list_replacements: 1927 pc.set(replacement[0], replacement[1], replacement[2]) 1928 1929 # write out new ini-file 1930 cp_file = open(config_file, 'w') 1931 pc.write(cp_file) 1932 cp_file.close()
1933 1934 # -----------------------------------------------------
1935 - def get_hipe_arguments(self):
1936 """ 1937 Returns the common part for the call to lalapps_trigger_hipe 1938 """ 1939 1940 cmd = " --h1-segments H1-science_grb%s.txt" % self.name 1941 cmd += " --l1-segments L1-science_grb%s.txt" % self.name 1942 cmd += " --v1-segments V1-science_grb%s.txt" % self.name 1943 cmd += " --list "+self.trigger_file 1944 cmd += " --grb "+self.name 1945 cmd += " --onsource-left "+self.cp.get('analysis','onsource_left') 1946 cmd += " --onsource-right "+self.cp.get('analysis','onsource_right') 1947 cmd += " --config-file "+self.inifile 1948 cmd += " --log-path "+self.condor_log_path 1949 cmd += " --num-trials "+self.cp.get('analysis','num_trials') 1950 cmd += " --padding-time "+self.cp.get('analysis','padding_time') 1951 return cmd
1952 1953 # -----------------------------------------------------
1954 - def prepare_inspiral_analysis(self):
1955 """ 1956 Main piece to create and prepare the inspiral analysis 1957 """ 1958 # 1959 # Now create directories and copy a bunch of files 1960 # 1961 1962 # check if to use online or offline data 1963 self.check_data_to_use() 1964 1965 # Create the main directory 1966 if False and os.path.exists(self.analysis_dir): 1967 del_lock() 1968 raise IOError, "The directory %s already exists. Please (re)move"\ 1969 " this directory or choose another name for the "\ 1970 "analysis directory" % self.analysis_dir 1971 cmd = 'mkdir %s' % self.analysis_dir 1972 system_call(self.name, cmd) 1973 1974 # copy the relevant files from the CVS into the analysis directory 1975 files = glob.glob('%s/*.ini' % self.input_dir) 1976 self.make_cvs_copy(files, self.analysis_dir) 1977 1978 # copy the trigger file into the analysis directory 1979 # NOTE: When the monitor code is handling the analysis properly, 1980 # this call won't be needed. 1981 cmd = 'cp %s %s/' % (self.trigger_file, self.analysis_dir) 1982 system_call(self.name, cmd) 1983 1984 # Make some neccessary replacements in the config (ini) file 1985 list_replacements = [['pipeline', 'user-tag', 'GRB%s'%self.name]] 1986 if self.use_offline_data: 1987 list_replacements.append(['input', 'ligo-channel', 'LDAS-STRAIN']) 1988 self.update_inifile(list_replacements) 1989 1990 # copy executables <lalapps only because these are static executables> 1991 cmd = 'cd %s/bin; cp lalapps_coherent_inspiral lalapps_coherentbank \ 1992 lalapps_coire lalapps_frjoin lalapps_inca lalapps_inspinj lalapps_inspiral \ 1993 lalapps_inspiral_hipe lalapps_sire lalapps_thinca lalapps_tmpltbank \ 1994 lalapps_trigbank lalapps_plot_hipe lalapps_trigger_hipe %s' %\ 1995 (self.get_lalapps_dir(), self.analysis_dir) 1996 system_call(self.name, cmd) 1997 1998 # link the glue executables 1999 self.make_links(self.get_glue_dir()+'/bin', self.analysis_dir, ['ligo_data_find','ligolw_add']) 2000 2001 # set the used code version and create the setup script 2002 self.code['inspiral'] = CodeTagger() 2003 self.create_setup_script(self.analysis_dir) 2004 2005 # move the segment files 2006 cmd = 'mv %s/*-science_grb%s.txt %s' % (self.main_dir, self.name, self.analysis_dir) 2007 system_call(self.name, cmd) 2008 2009 # 2010 # make the call to trigger_hipe; create the DAG 2011 # 2012 cmd = 'cd %s;' % self.analysis_dir 2013 cmd += template_trigger_hipe 2014 cmd += self.get_hipe_arguments() 2015 system_call(self.name, cmd) 2016 2017 # Need to rename the cache-file 2018 cmd = 'cd %s/GRB%s; mv GRB%s_onoff.cache GRB%s.cache' % \ 2019 (self.analysis_dir, self.name, self.name, self.name) 2020 system_call(self.name, cmd) 2021 2022 # Call a subfunction to run the datafind command 2023 self.run_datafind() 2024 2025 # update the two DAG instances 2026 self.dag['onoff'] = AnalysisDag(self.name, 'onoff', self.analysis_dir) 2027 self.dag['inj'] = AnalysisDag(self.name, 'inj', self.analysis_dir) 2028 2029 dagfile = self.get_basic_dagname()+'_onoff_uberdag.dag' 2030 self.dag['onoff'].set_dagname(dagfile) 2031 dagfile = self.get_basic_dagname()+'_inj_uberdag.dag' 2032 self.dag['inj'].set_dagname(dagfile)
2033 2034 # -----------------------------------------------------
2035 - def prepare_injection_analysis(self):
2036 2037 2038 # 2039 # call to create the injection DAG 2040 # 2041 cmd = 'cd %s;' % self.analysis_dir 2042 cmd += template_trigger_hipe_inj 2043 cmd += self.get_hipe_arguments() 2044 cmd += " --injection-config "+self.injfile 2045 system_call(self.name, cmd) 2046 2047 # Need to unify the two cache files 2048 cmd = 'cd %s/GRB%s; cat GRB%s_inj.cache >> GRB%s.cache' % \ 2049 (self.analysis_dir, self.name, self.name, self.name) 2050 system_call(self.name, cmd, False)
2051 2052 2053 # -----------------------------------------------------
2054 - def prepare_onoff_analysis(self):
2055 """ 2056 Prepare the onoff directory with all needed files and 2057 code and prepare the DAG. Don't start DAG here 2058 @return: name of the DAG file 2059 """ 2060 2061 # get the current tag first 2062 tag = get_code_tag() 2063 pylal_dir = self.get_pylal_dir() 2064 2065 # make a consistency check 2066 test_dir = self.cp.get('paths','lalsuite')+'/'+tag+'.pylal' 2067 if os.path.normpath(test_dir)!=os.path.normpath(pylal_dir): 2068 del_lock() 2069 raise NameError, "The paths to the pylal directory does not agree. Possible error in the setup scripts. \n"\ 2070 " Name from environment: %s "\ 2071 " Name from tag: %s" % (pylal_dir, test_dir) 2072 2073 # Prepare the postprocesing directory at this stage 2074 dir_onoff = "%s/GRB%s/postprocessing_%s" % (self.analysis_dir, self.name, tag) 2075 # check the existance of the directory 2076 if os.path.exists(dir_onoff): 2077 info(self.name, " WARNING: The directory %s already exists. Maybe this is a test? " 2078 "Then (re)move the directory..." % dir_onoff) 2079 return None 2080 2081 system_call(self.name, 'mkdir -p %s/logs'%dir_onoff) 2082 2083 # copy all needed files from CVS 2084 files = glob.glob('%s/post*'%self.input_dir) 2085 self.make_cvs_copy(files, dir_onoff) 2086 2087 # link the executables directory 2088 cmd = 'cd %s; ln -s %s/bin executables' % (dir_onoff, pylal_dir) 2089 system_call(self.name, cmd) 2090 2091 # set the used code version and create the setup script 2092 self.code['onoff'] = CodeTagger() 2093 self.create_setup_script(dir_onoff) 2094 2095 # create the DAG file 2096 self.apply_sed_file(dir_onoff, 'postproc.in', 'postproc.dag') 2097 2098 # return the DAG filename 2099 dagfile = "%s/postproc.dag" % dir_onoff 2100 return dagfile
2101 2102 # -----------------------------------------------------
2103 - def prepare_lik_analysis(self):
2104 """ 2105 Prepare the likelihood directory with all needed files and 2106 code and prepare the DAG. Don't start DAG here 2107 @return: name of the DAG file 2108 """ 2109 2110 # get the current tag first 2111 tag = get_code_tag() 2112 2113 # Prepare the postprocesing directory at this stage 2114 dir_lik = "%s/GRB%s/likelihood_%s" % (self.analysis_dir, self.name, tag) 2115 if os.path.exists(dir_lik): 2116 info(self.name, " WARNING: The directory %s already exists. Maybe this is a test? " 2117 "Then (re)move the directory..." % dir_lik) 2118 return None 2119 2120 system_call(self.name, 'mkdir -p %s/logs'%dir_lik) 2121 2122 # copy all needed files from CVS 2123 files = glob.glob('%s/likelihood*'%self.input_dir) 2124 self.make_cvs_copy(files, dir_lik) 2125 2126 # link the executables directory 2127 cmd = 'cd %s; ln -s %s/bin executables' % (dir_lik, self.get_pylal_dir()) 2128 system_call(self.name, cmd) 2129 2130 # set the used code version and create the setup script 2131 self.code['inj'] = CodeTagger() 2132 self.create_setup_script(dir_lik) 2133 2134 # create the DAG file 2135 self.apply_sed_file(dir_lik, 'likelihood.in', 'likelihood.dag') 2136 2137 # return the DAG filename 2138 dagfile = "%s/likelihood.dag" % dir_lik 2139 return dagfile
2140 2141 2142 # -----------------------------------------------------
2143 - def check_analysis_directory(self, dag_key):
2144 """ 2145 Check if the dagman.out file does exist 2146 after some while for the dag with 'dag_key' 2147 """ 2148 2149 dag = self.dag[dag_key] 2150 2151 # try it for ten times ten seconds 2152 for i in range(10): 2153 time.sleep(10) 2154 success = os.path.exists(dag.get_outname()) 2155 if success: 2156 break 2157 2158 # check the status at the end 2159 if not success: 2160 2161 # send an email about this problem 2162 subject = 'Problems starting condor DAG' 2163 email_msg = 'The condor DAG %s was not started.\n' % dag.get_dagname() 2164 send_mail(subject, email_msg) 2165 2166 # set the status 2167 if dag.status>0: 2168 dag.status = -dag.status 2169 2170 return -1 2171 else: 2172 2173 return 1
2174 2175 2176 # --------------------------------------
2177 - def make_cvs_copy(self, files, dest_dir):
2178 """ 2179 Copies all the files given in the list 'files' to 2180 dest_dir and creates a file 'cvs_versions.txt' in dest_dir 2181 containing the actual CVS version of the files 2182 @param files: list of files to be copied from self.input_dir 2183 @param dest_dir: destination directory 2184 """ 2185 2186 cvs_rev_file_output = '' 2187 cmd = 'cp ' 2188 for name in files: 2189 2190 # collect all files to be copied 2191 cmd += name + ' ' 2192 2193 # retrieve the version of this file 2194 basename = os.path.basename(name) 2195 cmdtmp = "cd %s; cvs status %s " % (os.path.dirname(name), basename) 2196 code, output, error = external_call(cmdtmp) 2197 2198 # parse the output 2199 for line in output.split('\n'): 2200 if 'Working revision:' in line: 2201 rev_work = line.split()[2] 2202 2203 # add the informationto the file-text 2204 cvs_rev_file_output+='%s %s\n' % (basename, rev_work) 2205 2206 # call the copy command 2207 cmd += dest_dir 2208 system_call(self.name, cmd) 2209 2210 # create the CVS revision file 2211 cvs_rev_file_name = dest_dir+'/cvs_versions.txt' 2212 f = file(cvs_rev_file_name,'w') 2213 f.write(cvs_rev_file_output) 2214 f.close()
2215 2216 # --------------------------------------
2217 - def get_html_status(self):
2218 """ 2219 Returns the status of the DAGs of this instance 2220 in form of a dictionary. 2221 """ 2222 2223 status_dict = {} 2224 2225 # loop over the dags in this instance 2226 for key, dag in self.dag.iteritems(): 2227 if self.has_data: 2228 status = dag.get_status() 2229 text = dag.get_stage_name() 2230 if status<0: 2231 text = '<font color="#FF0000">%s</font>'%text 2232 if status==5: 2233 text = '<font color="#00aa00">%s</font>'%text 2234 else: 2235 text = 'NoData' 2236 2237 # set the text 2238 status_dict[key]=text 2239 2240 return status_dict
2241 2242 # -----------------------------------------------------
2243 - def calculate_optimality(self):
2244 2245 # Calculate the antenna factors (for informational purpose only) 2246 for ifo in basic_ifolist: 2247 _, _, _, q = antenna.response(self.time, self.ra, self.de, \ 2248 0.0, 0.0, 'degree', ifo) 2249 self.qvalues[ifo]=q
2250 2251 # -----------------------------------------------------
2252 - def apply_sed_file(self, path, infile, outfile):
2253 """ 2254 Applies the sed file to an in file 2255 """ 2256 2257 # get the sed filename 2258 sedfile = path+'/sed.file' 2259 self.create_sed_file(sedfile) 2260 2261 # run the sed command 2262 cmd = 'sed -f %s %s/%s > %s/%s' % (sedfile, path, infile, path, outfile) 2263 system_call(self.name, cmd, False)
2264 2265 # -----------------------------------------------------
2266 - def create_sed_file(self, sedfile):
2267 """ 2268 Creates the replacement sed file that will be used later 2269 on several in files. 2270 """ 2271 2272 # get the publishing paths for this GRB 2273 publishing_path = cp.get('paths','publishing_path') 2274 html_path = "%s/GRB%s" % (publishing_path, self.name) 2275 2276 # create the sed file for in-file replacements 2277 f = file(sedfile,'w') 2278 f.write("# created %s\n"%get_time()) 2279 f.write("s/@GRBNAME@/GRB%s/g\n"%self.name) 2280 f.write("s=@ANALYSISPATH@=%s=g\n"%self.analysis_dir) 2281 f.write("s/@STARTTIME@/%d/g\n"%self.starttime) 2282 f.write("s/@ENDTIME@/%d/g\n"%self.endtime) 2283 f.write("s/@IFOS@/%s/g\n"%self.ifos) 2284 f.write("s=@LOGPATH@=%s=g\n"%self.condor_log_path) 2285 f.write("s/@TRIGGERTIME@/%d/g\n"%int(self.time)) 2286 f.write("s/@RIGHTASCENSION@/%f/g\n"%float(self.ra)) 2287 f.write("s/@DECLINATION@/%f/g\n"%float(self.de)) 2288 f.write("s=@OUTPUTPATH@=html=g\n") 2289 f.write("s=@OPENBOXPATH@=OPENBOX=g\n") 2290 f.write("s=@HTMLOUTPUT@=%s=g\n"%html_path) 2291 f.write("s/@LOGNAME@/%s/g\n" % os.getenv("LOGNAME")) 2292 f.write("s/@BOUNDARIESMC@/%s/g\n" % cp.get('analysis','mc_boundaries')) 2293 f.write("s/@GRBID@/%s/g\n"%self.name) 2294 f.write("s=@GRBPICKLE@=%s=g\n"%get_monitor_filename()) 2295 f.write("s=@CONFIGFILE@=%s=g\n"%self.config_file) 2296 f.write("s/@BOUNDARIESM2@/%s/g\n" % cp.get('analysis','m2_boundaries')) 2297 vetofiles = '' 2298 for ifo in self.ifolist: 2299 vetofiles+=',../../%s-VETOTIME_CAT2_grb%s.txt' %(ifo, self.name) 2300 f.write("s=@VETOFILES@=%s=g\n" % vetofiles) 2301 f.write("s/@STATISTIC@/%s/g\n" % cp.get('analysis','statistic')) 2302 f.close()
2303 2304 2305 # -----------------------------------------------------
2306 - def get_code_setup(self):
2307 """ 2308 Returns the setup script used for the current environment 2309 @return: path to source file 2310 """ 2311 2312 # get tag first 2313 tag = get_code_tag() 2314 2315 # create the source filename 2316 source_file = cp.get('paths','lalsuite') + '/'+tag+'.pylal.rc' 2317 2318 if not os.path.exists(source_file): 2319 del_lock() 2320 raise IOError, "Source script does not seem to be present: %s. Please check." % source_file 2321 2322 return source_file
2323 2324 # -----------------------------------------------------
2325 - def create_setup_script(self, dest_dir):
2326 """ 2327 Create a setup script in the directory 2328 @param dest_dir: destination directory 2329 """ 2330 2331 # get the tag information (which is the actual, most current tag) 2332 source_file = self.get_code_setup() 2333 2334 # make the link 2335 cmd = 'cd %s; ln -s %s setup.rc' % (dest_dir, source_file) 2336 system_call(self.name, cmd)
2337 2338 2339 # -----------------------------------------------------
2340 - def cleanup(self, path):
2341 """ 2342 Cleanup of the temporary files stored in the main dir 2343 to either put them into the GRB directories 2344 or into the Auxiliary directory 2345 @param path: path to where to shift the files 2346 """ 2347 2348 cmd = 'mv %s/*grb%s* %s'%(cp.get('paths','main'), self.name, path) 2349 system_call(self.name, cmd) 2350 cmd = 'mv %s/*VETOTIME* %s'%(cp.get('paths','main'), path) 2351 system_call(self.name, cmd)
2352