1 """
2 This module contains condor jobs / node classes for the SQlite Triggered Follow Up dag
3 """
4
5 __author__ = 'Chad Hanna <channa@phys.lsu.edu>, Cristina Torres <cristina.torres@ligo.org>, Romain Gouaty <gouaty@lapp.in2p3.fr>'
6
7 import sqlite3
8
9 import sys, os, copy, math, math, subprocess, socket, re, string
10 import time as time_method
11 from optparse import *
12 import tempfile
13 import ConfigParser
14 import urlparse
15 from UserDict import UserDict
16
17 sys.path.append('@PYTHONLIBDIR@')
18
19 from glue import segments
20 from glue import segmentsUtils
21 from glue.ligolw import ligolw
22 from glue.ligolw import table
23 from glue.ligolw import lsctables
24
25 from glue.ligolw import utils
26 from glue import pipeline
27 from glue import lal
28
29 from lalapps import inspiral
30 from lalburst import date
31 from pylal.xlal import date as xlaldate
32 from pylal.xlal.datatypes.ligotimegps import LIGOTimeGPS
33
34 lsctables.LIGOTimeGPS = LIGOTimeGPS
35
36
37
38
39
40
42
43 if not os.access(output,os.F_OK): os.makedirs(output)
44 else:
45 if not os.access(output,os.W_OK):
46 print >> sys.stderr, 'path '+output+' is not writable'
47 sys.exit(1)
48
50 if time >= 815155213 and time <= 875232014: return 's5'
51 if time >= 931035296 and time <= 999999999: return 's6'
52 print >>sys.stderr, "COULD NOT DETERMINE SCIENCE RUN from %d" % (int(time),)
53 sys.exit(1)
54
56
57
58
59
60 gps = LIGOTimeGPS(time)
61 start_gps = int(date.utc_midnight(gps))
62 end_gps = start_gps + 86400
63 return str(start_gps),str(end_gps)
64
65
150
183
185 return os.path.split(os.path.abspath(os.environ['HOME']))[0]
186
188 host = socket.getfqdn()
189 return host
190
191
192
193
194
196 qscanList = []
197 cacheList = lal.Cache.fromfile(open(fileName))
198 if not cacheList:
199 return qscanList
200 cacheSelected = cacheList.sieve(description=type,ifos=ifo)
201 if time:
202 if math.floor(float(time)) != math.ceil(float(time)):
203 cacheSelected = cacheSelected.sieve(segment=segments.segment(math.floor(float(time)), math.ceil(float(time))))
204 else:
205 cacheSelected = cacheSelected.sieve(segment=segments.segment(math.floor(float(time))-0.5, math.floor(float(time))+0.5))
206
207 for cacheEntry in cacheSelected:
208 path_output = cacheEntry.path
209 time_output = str(cacheEntry.segment[0])
210 type_output = cacheEntry.description
211 ifo_output = cacheEntry.observatory
212 qscanList.append([path_output,time_output,type_output,ifo_output])
213
214 return qscanList
215
216
217
218
219
220
222 """
223 """
224
227
234
235 - def setupJob(self, name='job', dir= '', tag_base=None, cp=None):
236
237
238
239 if dir and not os.path.isdir(dir):
240 os.mkdir(dir)
241 self.name = name + "_" + tag_base + "_" + os.path.split(dir.rstrip('/'))[1]
242 if dir:
243 self.relPath = dir + '/' + self.name
244 else:
245 self.relPath = self.name
246 self.outputPath = os.getcwd() + '/' + self.relPath + '/'
247 self.tag_base = tag_base
248 if not os.path.isdir(self.relPath):
249 os.mkdir(self.relPath)
250 if not os.path.isdir(self.relPath+'/logs'):
251 os.mkdir(self.relPath+'/logs')
252 if not os.path.isdir(self.relPath+'/Images'):
253 os.mkdir(self.relPath+'/Images')
254 if not os.path.isdir(self.relPath+'/DataProducts'):
255 os.mkdir(self.relPath+'/DataProducts')
256
257 try: self.add_condor_cmd('environment',"KMP_LIBRARY=serial;MKL_SERIAL=yes")
258 except: pass
259 self.set_sub_file(self.name+'.sub')
260 self.set_stdout_file(self.outputPath+'/logs/'+self.name+'-$(macroid).out')
261 self.set_stderr_file(self.outputPath+'/logs/'+self.name+'-$(macroid).err')
262 if cp:
263 if cp.has_section("condor-memory-requirement") and cp.has_option("condor-memory-requirement",name):
264 requirement = cp.getint("condor-memory-requirement",name)
265 self.add_condor_cmd("Requirements", "(Memory > " + str(requirement) + ")")
266
267
268 -class qscanJob(pipeline.CondorDAGJob, FUJob):
269 """
270 A qscan job
271 """
272 - def __init__(self, opts, cp, dir='', tag_base=''):
273 """
274 """
275 self.__executable = string.strip(cp.get('fu-condor','qscan'))
276 self.name = os.path.split(self.__executable.rstrip('/'))[1]
277 self.__universe = "vanilla"
278 pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable)
279 self.setupJob(name=self.name,dir=dir,cp=cp,tag_base=tag_base)
280 self.setup_checkForDir()
281 self.setup_rm_lock()
282
285
287
288 checkdir_script = open('checkForDir.sh','w')
289 checkdir_script.write("""#!/bin/bash
290 if [ -d $1/$2 ]
291 then
292 matchingList=$(echo $(find $1 -name $2.bk*))
293 COUNTER=1
294 for file in $matchingList
295 do
296 let COUNTER=COUNTER+1
297 done
298 mv $1/$2 $1/$2.bk.$COUNTER
299 fi
300 """)
301 checkdir_script.close()
302 os.chmod('checkForDir.sh',0755)
303
305 rm_lock_script = open('rmLock.sh','w')
306 rm_lock_script.write("#!/bin/bash\nif [ -e $1 ]\nthen\n\trm $1\nfi")
307 rm_lock_script.close()
308 os.chmod('rmLock.sh',0755)
309
310
311
313 """
314 """
315 - def __init__(self, opts, cp, dir='', tag_base=''):
316 self.setup_proxy_script()
317 self.__executable = "getProxy.sh"
318 self.name = os.path.split(self.__executable.rstrip('/'))[1]
319 self.__universe = "local"
320 pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable)
321 self.add_condor_cmd('getenv','True')
322 self.setupJob(name=self.name,dir=dir,cp=cp,tag_base=tag_base)
323
325 proxy_script = open('getProxy.sh','w')
326 proxy_script.write("""#!/bin/bash
327 if [ ! -e \"proxy.pem\" ]
328 then
329 file=`grid-proxy-info -path`
330 cp ${file} proxy.pem
331 fi
332 """)
333 proxy_script.close()
334 os.chmod('getProxy.sh',0755)
335
336
337
339 """
340 This class sets up a script to be run as child of the remote scans in order to distribute its results to the appropriate paths. It takes the qscan tarball as input, uncompress it and copy the results to the path specified in cache file.
341 Moreover this job also deletes the temporary remote datafind cache files in order to clean up the followup directory.
342 """
343 - def __init__(self, opts, cp, dir='', tag_base=''):
344 """
345 """
346 self.setup_distrib_script(dir,tag_base)
347 self.__executable = 'distribRemoteScan_'+dir+'_'+tag_base+'.sh'
348 self.name = os.path.split(self.__executable.rstrip('/'))[1]
349 self.__universe = "vanilla"
350 pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable)
351 self.add_condor_cmd('getenv','True')
352 self.setupJob(name=self.name,dir=dir,cp=cp,tag_base=tag_base)
353
355 distrib_script = open('distribRemoteScan_'+dir+'_'+tag_base+'.sh','w')
356 distrib_script.write("""#!/bin/bash
357 currentPath=`pwd` ;
358 mv $1 $2/. ;
359 cd $2 ;
360 tar -xzvf $1 ;
361 cd $currentPath ;
362 for figPath in `find $2/$3 -name "*.png" -print` ; do
363 echo $figPath ;
364 thumbPath=`echo $figPath | sed s/.png/.thumb.png/g` ;
365 figDPI=120;
366 fullSize='600x';
367 thumbSize='300x';
368 convert -resize $thumbSize -strip -depth 8 -colors 256 $figPath $thumbPath ;
369 done
370 rm $2/$1 ;
371 if [ -e $4 ]
372 then
373 rm $4 ;
374 fi
375 """)
376 distrib_script.close()
377 os.chmod('distribRemoteScan_'+dir+'_'+tag_base+'.sh',0755)
378
379
380
382 """
383 """
384 - def __init__(self, opts, cp, tag_base='', dir=''):
385
386 self.setup_df_submission_script(dir,tag_base)
387 self.__executable = 'remoteDatafind_'+dir+'_'+tag_base+'.sh'
388 self.dir = dir
389 self.name = os.path.split(self.__executable.rstrip('/'))[1]
390 self.__universe = "vanilla"
391 pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable)
392 self.add_condor_cmd('getenv','True')
393 self.add_condor_cmd('input','proxy.pem')
394 self.add_condor_cmd('should_transfer_files','yes')
395 self.add_condor_cmd('when_to_transfer_output','ON_EXIT_OR_EVICT')
396 self.add_condor_cmd('+RunOnEGEEGrid','True')
397 self.add_condor_cmd("Requirements","(Arch == \"INTEL\" || Arch == \"X86_64\" ) && ( Pilot_SiteName == \"Bologna\")")
398 self.add_condor_cmd('transfer_output_files','$(macrooutputfile)')
399 self.setupJob(name=self.name,dir=dir,cp=cp,tag_base=tag_base)
400
402 submit_script = open('remoteDatafind_'+dir+'_'+tag_base+'.sh','w')
403 submit_script.write("""#!/bin/bash
404 . /opt/exp_software/virgo/etc/virgo-env.sh
405 . /opt/glite/etc/profile.d/grid-env.sh
406 export X509_USER_PROXY=`pwd`/proxy.pem
407 /opt/exp_software/virgo/lscsoft/etc/LSCdataFind --observatory $1 --gps-start-time $2 --gps-end-time $3 --url-type file --lal-cache --type $4 --output $5
408 outputCache=$5
409 outputQcache=${outputCache/.cache/.qcache}
410 /storage/gpfs_virgo3/virgo/omega/omega_r3270_glnxa64_binary/bin/convertlalcache $5 %s-%s-$outputQcache
411 """%(dir,tag_base))
412 submit_script.close()
413 os.chmod('remoteDatafind_'+dir+'_'+tag_base+'.sh',0755)
414
415
416
418 """
419 A remote qscan job
420 """
421 - def __init__(self, opts, cp, dir='', tag_base=''):
422 """
423 """
424 self.setup_submission_script(dir,tag_base)
425 self.__executable = "remoteScan_"+dir+"_"+tag_base+".sh"
426 self.dir = dir
427 self.name = os.path.split(self.__executable.rstrip('/'))[1]
428 self.__universe = "vanilla"
429 pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable)
430 self.add_condor_cmd('getenv','True')
431 self.add_condor_cmd('input','proxy.pem')
432 self.add_condor_cmd('should_transfer_files','yes')
433 self.add_condor_cmd('when_to_transfer_output','ON_EXIT_OR_EVICT')
434 self.add_condor_cmd('+RunOnEGEEGrid','True')
435 self.add_condor_cmd("Requirements","(Arch == \"INTEL\" || Arch == \"X86_64\" ) && ( Pilot_SiteName == \"Bologna\")")
436 self.add_condor_cmd('transfer_output_files','$(macrooutputfile)')
437 self.add_condor_cmd('transfer_input_files','$(macroinputfile)')
438 self.setupJob(name=self.name,dir=dir,cp=cp,tag_base=tag_base)
439
441 submit_script = open('remoteScan_'+dir+'_'+tag_base+'.sh','w')
442 submit_script.write("""#!/bin/bash
443 . /opt/exp_software/virgo/etc/virgo-env.sh
444 . /opt/glite/etc/profile.d/grid-env.sh
445 . /storage/gpfs_virgo3/virgo/omega/omega_env.sh
446 export X509_USER_PROXY=`pwd`/proxy.pem
447 /storage/gpfs_virgo3/virgo/omega/omega_r3270_glnxa64_binary/bin/wpipeline scan -r -c $1 -f $2 -o $3 $4
448
449 tar -czf %s-%s-$4.tgz $3
450 """%(dir,tag_base))
451 submit_script.close()
452 os.chmod('remoteScan_'+dir+'_'+tag_base+'.sh',0755)
453
454
456
457 - def __init__(self,opts,cp,dir='',tag_base=''):
458
459 self.__executable = string.strip(cp.get('fu-condor','analyseQscan'))
460 self.name = os.path.split(self.__executable.rstrip('/'))[1]
461 self.name_for_background = self.name + "_" + tag_base
462 self.__universe = "vanilla"
463 pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable)
464 self.add_condor_cmd('getenv','True')
465 self.setupJob(name=self.name,dir=dir,cp=cp,tag_base=tag_base)
466
467
468
470
471 - def __init__(self,cp,dir='', tag_base=''):
472
473 inspiral.InspiralJob.__init__(self,cp)
474
475 if tag_base == 'head':
476 self.set_executable(string.strip(cp.get('fu-condor','inspiral_head')))
477
478
479 self.name = os.path.split(self.get_executable().rstrip('/'))[1]
480 self.setupJob(name=self.name, dir=dir, cp=cp, tag_base=tag_base)
481
482
483 -class skyMapJob(pipeline.CondorDAGJob,FUJob):
484 """
485 Generates sky map data
486 """
487 - def __init__(self, options, cp, dir='', tag_base=''):
488 """
489 """
490
491 self.__executable = string.strip(cp.get('fu-condor','lalapps_skymap'))
492 self.__universe = "standard"
493 pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable)
494 self.add_condor_cmd('getenv','True')
495
496 self.name = os.path.split(self.__executable.rstrip('/'))[1]
497 self.setupJob(name=self.name,dir=dir, tag_base=tag_base)
498
499 self.ra_res = cp.get("fu-skymap", 'ra-res')
500 self.dec_res = cp.get("fu-skymap", 'dec-res')
501 self.sample_rate = cp.get("fu-skymap", 'sample-rate')
502
503
505 """
506 Plots the sky map output of lalapps_skymap
507 """
508 - def __init__(self, options, cp, dir='',tag_base=''):
509 """
510 """
511
512 self.__executable = string.strip(cp.get('fu-condor','pylal_skyPlotJob'))
513 self.__universe = "vanilla"
514 pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable)
515 self.add_condor_cmd('getenv','True')
516 self.name = os.path.split(self.__executable.rstrip('/'))[1]
517 self.setupJob(name=self.name,dir=dir,tag_base=tag_base)
518
519 self.ra_res = cp.get("fu-skymap", 'ra-res')
520 self.dec_res = cp.get("fu-skymap", 'dec-res')
521 self.sample_rate = cp.get("fu-skymap", 'sample-rate')
522
523
525 """
526 A followup plotting job for snr and chisq time series
527 """
528 - def __init__(self, options, cp, dir='', tag_base=''):
529 """
530 """
531
532 self.__executable = string.strip(cp.get('fu-condor','plotsnrchisq'))
533 self.__universe = "vanilla"
534 pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable)
535 self.add_condor_cmd('getenv','True')
536
537 self.name = os.path.split(self.__executable.rstrip('/'))[1]
538 self.setupJob(name=self.name,tag_base=tag_base,dir=dir)
539
541 - def __init__(self, config_file, dir='', tag_base=''):
542
543
544
545
546
547
548
549
550
551 self.__executable = string.strip(config_file.get('condor','datafind'))
552
553
554 self.name = os.path.split(self.__executable.rstrip('/'))[1]
555
556 self.setupJob(name=self.name, tag_base=tag_base, dir=dir)
557
558 pipeline.LSCDataFindJob.__init__(self, self.relPath, self.relPath + '/logs', config_file)
559 self.setup_cacheconv(config_file)
560
562
563 convert_script = open('cacheconv.sh','w')
564
565 convert_script.write("""#!/bin/bash
566 %s ${1} ${2}
567 if [ ${3} = \'y\' ]; then
568 cp ${2} .
569 fi
570 """ % string.strip(cp.get('fu-condor','convertcache')))
571 convert_script.close()
572 os.chmod('cacheconv.sh',0755)
573
574
575
577 """
578 This actually launches a default wiki creation job
579 """
580 - def __init__(self,opts,cp,dir='',tag_base=''):
581 """
582 """
583 self.__executable = string.strip(cp.get("fu-condor",
584 "makeCheckListWiki").strip())
585 self.name = os.path.split(self.__executable.rstrip('/'))[1]
586 self.__universe = string.strip(cp.get("makeCheckListWiki",
587 "universe").strip())
588 pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable)
589 self.add_condor_cmd('getenv','True')
590 self.setupJob(name=self.name,dir=dir,cp=cp,tag_base='_all')
591
592
593
594 -class makeFollowupPageJob(pipeline.CondorDAGJob,FUJob):
595 """
596 This actually launches a followup page job
597 """
598 - def __init__(self,opts,cp,dir='',tag_base=''):
599 """
600 """
601 self.__executable = string.strip(cp.get("fu-condor",
602 "lalapps_followup_page").strip())
603 self.name = os.path.split(self.__executable.rstrip('/'))[1]
604 self.__universe = "vanilla"
605 pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable)
606 self.add_condor_cmd('getenv','True')
607 self.setupJob(name=self.name,dir=dir,cp=cp,tag_base='_all')
608
609
610
611
613 """
614 A job which queries the ldbd database holding segment
615 information about the DQ flags.
616 """
617 defaults={"section":"fu-condor",
618 "options":{"universe":"local",
619 "dqflags":"followupQueryDQ.py"}
620 }
621
622 - def __init__(self, opts, cp, dir='', tag_base=""):
623 """
624 """
625 self.__conditionalLoadDefaults__(findFlagsJob.defaults,cp)
626
627 self.__executable = string.strip(cp.get('fu-condor','dqflags'))
628 self.__universe = string.strip(cp.get('fu-condor','universe'))
629 pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable)
630 self.add_condor_cmd('getenv','True')
631 self.name = os.path.split(self.__executable.rstrip('/'))[1]
632 self.setupJob(name=self.name,tag_base=tag_base, dir=dir)
633
634
636 """
637 A job instance that queries the segment database for known
638 types of active veto intervals.
639 """
640 defaults={"section":"fu-condor",
641 "options":{"universe":"local",
642 "vetoflags":"followupQueryVeto.py"}
643 }
644 - def __init__(self, opts,cp, dir='', tag_base=""):
645 """
646 """
647 self.__conditionalLoadDefaults__(findVetosJob.defaults,cp)
648
649 self.__executable = string.strip(cp.get('fu-condor','vetoflags'))
650 self.__universe = string.strip(cp.get('fu-condor','universe'))
651 pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable)
652 self.add_condor_cmd('getenv','True')
653 self.name = os.path.split(self.__executable.rstrip('/'))[1]
654 self.setupJob(name=self.name,tag_base=tag_base, dir=dir)
655
656
657
659 """
660 This is a job class which allows us to wrap up the script for
661 creating customized figure of merit(FOM) plots. The script,
662 followupCustomFOM.py, acutally contains a call to
663 ligo_data_find, via a subprocess. This removes our need
664 to have a datafind parent job.
665 """
666 defaults={"section":"fu-condor",
667 "options":{"universe":"vanilla",
668 "customfom":"followupCustomFOM.py"}
669 }
670 - def __init__(self, opts, cp, dir='', tag_base=""):
671 """
672 """
673 self.__conditionalLoadDefaults__(customFOMPlotJob.defaults,cp)
674
675 self.__executable = string.strip(cp.get('fu-condor','customfom'))
676 self.__universe = string.strip(cp.get('fu-condor','universe'))
677 pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable)
678 self.add_condor_cmd('getenv','True')
679 self.name = os.path.split(self.__executable.rstrip('/'))[1]
680 self.setupJob(name=self.name,tag_base=tag_base, dir=dir)
681
682
683
685 """
686 A job that performs parameter consitency check for a trigger
687 being followed up.
688 """
689 defaults={"section":"fu-condor",
690 "options":{"universe":"local",
691 "effDRatio":"followupRatioTest.py"}
692 }
693 - def __init__(self, opts, cp, dir='', tag_base=""):
694 """
695 """
696 self.__conditionalLoadDefaults__(effDRatioJob.defaults,cp)
697
698 self.__executable = string.strip(cp.get('fu-condor','effDRatio'))
699 self.__universe = string.strip(cp.get('fu-condor','universe'))
700 pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable)
701 self.add_condor_cmd('getenv','True')
702 self.name = os.path.split(self.__executable.rstrip('/'))[1]
703 self.setupJob(name=self.name,tag_base=tag_base, dir=dir)
704
705
707 """
708 Generates coherent inspiral data
709 """
710 defaults={
711 "section":"condor",
712 "options":{
713 "universe":"vanilla",
714 "chia":"lalapps_coherent_inspiral"
715 }
716 }
717
718 - def __init__(self, options, cp, dir='', tag_base=''):
719 """
720 """
721 self.__conditionalLoadDefaults__(followUpChiaJob.defaults,cp)
722
723 self.__executable = string.strip(cp.get('condor','chia'))
724 self.__universe = "standard"
725 pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable)
726 self.add_condor_cmd('getenv','True')
727
728 self.name = os.path.split(self.__executable.rstrip('/'))[1]
729 self.setupJob(name=self.name,tag_base=tag_base, dir=dir)
730
731
732
733
734
736 """
737 A followup plotting job for coherent inspiral search and null stat timeseries
738 """
739 defaults={
740 "section":"condor",
741 "options":{
742 "universe":"vanilla",
743 "plotchiatimeseries":"plotchiatimeseries"
744 }
745 }
746
747 - def __init__(self, options, cp, dir, tag_base=''):
748 """
749 """
750
751 self.__executable = string.strip(cp.get('fu-condor','plotchiatimeseries'))
752 self.__universe = "vanilla"
753 pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable)
754 self.name = os.path.split(self.__executable.rstrip('/'))[1]
755 self.add_condor_cmd('getenv','True')
756 self.setupJob(name=self.name,tag_base=tag_base, dir=dir)
757
758
759
760
761 -class mcmcJob(pipeline.CondorDAGJob, FUJob):
762 """
763 A job to set up a mcmc run
764 """
765 - def __init__(self,opts,cp,dir='',tag_base=''):
766 """
767 """
768 self.__executable = string.strip(cp.get('fu-condor','mcmc'))
769 self.name = os.path.split(self.__executable.rstrip('/'))[1]
770 self.__universe = "standard"
771 pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable)
772 self.setupJob(name=self.name,dir=dir,cp=cp,tag_base=tag_base)
773
774
775
776
778 """
779 A job to set up a spinmcmc run
780 """
781 - def __init__(self,opts,cp,dir='',tag_base=''):
782 self.__executable = string.strip(cp.get('fu-condor','spinmcmc'))
783 self.name = os.path.split(self.__executable.rstrip('/'))[1]
784 self.__universe = "standard"
785 pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable)
786 self.setupJob(name=self.name,dir=dir,cp=cp,tag_base=tag_base)
787
788
789
790
791
793 """
794 A job to set up a plotmcmc run
795 """
796 - def __init__(self,opts,cp,dir='',tag_base=''):
797 """
798 """
799 self.__executable = string.strip(cp.get('fu-condor','plotmcmc'))
800 self.name = os.path.split(self.__executable.rstrip('/'))[1]
801 self.__universe = "vanilla"
802 pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable)
803 self.add_condor_cmd('getenv','True')
804 self.setupJob(name=self.name,dir=dir,cp=cp,tag_base=tag_base)
805
806
807
808
810 """
811 A job to set up a plotspinmcmc run
812 """
813 - def __init__(self,opts,cp,dir='',tag_base=''):
814 """
815 """
816 self.__executable = string.strip(cp.get('fu-condor','plotspinmcmc'))
817 self.name = os.path.split(self.__executable.rstrip('/'))[1]
818 self.__universe = "vanilla"
819 pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable)
820 self.add_condor_cmd('getenv','True')
821 self.setupJob(name=self.name,dir=dir,cp=cp,tag_base=tag_base)
822
823
824
825
826
827
829 """
830 """
831
834
836 self.validNode = True
837
839 self.validNode = False
840
847
849 self.add_var_opt("output-path",job.outputPath)
850 self.add_var_opt("enable-output","")
851
852
854 """
855 QScan node. This node writes its output to the web directory specified in
856 the inifile + the ifo and gps time. For example:
857
858 /archive/home/channa/public_html/followup/htQscan/H1/999999999.999
859
860 The omega scan command line is
861
862 wpipeline scan -r -c H1_hoft.txt -f H-H1_RDS_C03_L2-870946612-870946742.qcache -o QSCAN/foreground-hoft-qscan/H1/870946677.52929688 870946677.52929688
863
864 """
865 - def __init__(self, dag, job, cp, opts, time, ifo, frame_cache, p_nodes=[], type="ht", variety="fg"):
866 """
867 """
868 pipeline.CondorDAGNode.__init__(self,job)
869
870 self.scan_type = variety.upper() + "_" + type.replace("seismic","seis").upper()
871 self.scan_ifo = ifo
872 preString="omega/"+ifo+"/%s/"+science_run(time).upper()+""
873 if variety == "bg":
874 self.add_var_arg('scan')
875 preString = preString%("background")
876 oldPreString="omega/" + science_run(time).upper() + "/background"
877 else:
878 self.add_var_arg('scan -r')
879 preString = preString%("foreground")
880 oldPreString="omega/" + science_run(time).upper() + "/foreground"
881 config = self.fix_config_for_science_run( cp.get('fu-'+variety+'-'+type+'-qscan', ifo+'config').strip(), time )
882 if cp.get('fu-'+variety+'-'+type+'-qscan', ifo+'config').strip() != config:
883 cp.set('fu-'+variety+'-'+type+'-qscan',ifo+'config',config)
884 self.add_var_arg("-c " + config )
885
886 if type == "ht":
887 dataString = figure_out_type(time, ifo, 'hoft')[0]
888 else:
889 dataString = figure_out_type(time, ifo, 'rds')[0]
890 if type == "seismic":
891 dataString = dataString + "_SEIS"
892 if dataString[:2]!=ifo:
893 dataString = ifo + "_" + dataString
894 timeString = "-".join(get_day_boundaries(int(time)))
895 if cp.has_option('fu-output','output-dir') and cp.get('fu-output','output-dir'):
896 output = cp.get('fu-output','output-dir') + '/' + preString + '/' + dataString + '/' + timeString
897 else:
898 output = os.getcwd() + '/' + preString + '/' + dataString + '/' + timeString
899
900
901 mkdir(output)
902
903 output_path = output+"/"+str(time)
904 self.add_var_arg("-o " + output_path)
905
906 self.output_cache = lal.CacheEntry(ifo, job.name.upper(), segments.segment(float(time), float(time)), "file://localhost/"+output_path)
907
908 self.add_var_arg("-f "+frame_cache)
909
910 self.add_var_arg(repr(time))
911
912 self.set_pre_script( "checkForDir.sh %s %s" %(output, str(time)) )
913
914 self.set_post_script( "rmLock.sh %s/%s/lock.txt" %(output, str(time)) )
915
916 if not opts.disable_dag_categories:
917 self.set_category(job.name.lower())
918
919 if not(cp.has_option('fu-remote-jobs','remote-jobs') and job.name in cp.get('fu-remote-jobs','remote-jobs') and cp.has_option('fu-remote-jobs','remote-ifos') and ifo in cp.get('fu-remote-jobs','remote-ifos')):
920 for node in p_nodes:
921 if node.validNode:
922 self.add_parent(node)
923 if not (type=="ht" and opts.no_ht_qscan) and not (type=="rds" and opts.no_rds_qscan) and not (type=="seismic" and opts.no_seismic_qscan):
924 dag.add_node(self)
925 self.validate()
926 else:
927 self.invalidate()
928 else:
929 self.invalidate()
930
932 run = science_run(time)
933 config_path = os.path.split(config)
934 out = "/".join([config_path[0], config_path[1].replace('s5',run).replace('s6',run)])
935 return out
936
937
939
940 - def __init__(self, dag, job, cp, opts):
945
946
948
949 - def __init__(self, dag, job, cp, opts, ifo, time, p_nodes=[], type=""):
950
951 pipeline.CondorDAGNode.__init__(self,job)
952 self.scan_type = type.replace("seismic","seis").upper()
953 self.scan_ifo = ifo
954
955 self.add_var_arg(p_nodes[0].name_output_file)
956 self.add_var_arg(p_nodes[0].output_path)
957 self.add_var_arg(str(time))
958
959 self.add_var_arg(p_nodes[1].localFileName)
960
961 for node in p_nodes:
962 if node.validNode:
963 self.add_parent(node)
964 dag.add_node(self)
965 self.validate()
966
967
969
970 - def __init__(self, dag, job, cp, opts, ifo, time, data_type="rds", p_nodes=[]):
971 pipeline.CondorDAGNode.__init__(self,job)
972
973 type, channel = figure_out_type(time,ifo,data_type)
974 q_time = float(cp.get("fu-q-"+data_type+"-datafind",ifo+"-search-time-range"))/2.
975 start_time = int( time - q_time - 1.)
976 end_time = int( time + q_time + 1.)
977 outputFileName = ifo[0]+'-'+type+'-'+str(start_time)+'-'+str(end_time)+'.qcache'
978
979
980
981
982 self.add_var_arg(ifo[0])
983 self.add_var_arg(str(start_time))
984 self.add_var_arg(str(end_time))
985 self.add_var_arg(type)
986 self.add_var_arg(outputFileName.replace('qcache','cache'))
987
988 self.name_output_file = job.dir + "-" + job.tag_base + "-" + outputFileName
989 self.add_macro("macrooutputfile", self.name_output_file)
990
991 if not opts.disable_dag_categories:
992 self.set_category(job.name.lower())
993 for node in p_nodes:
994 if node.validNode:
995 self.add_parent(node)
996 dag.add_node(self)
997 self.validate()
998
999
1000
1002 """
1003 """
1004 - def __init__(self, dag, job, cp, opts, time, ifo, p_nodes=[], type="ht", variety="fg"):
1005
1006 pipeline.CondorDAGNode.__init__(self,job)
1007
1008 self.scan_type = variety.upper() + "_" + type.replace("seismic","seis").upper()
1009 self.scan_ifo = ifo
1010 preString="omega/"+ifo+"/%s/"+science_run(time).upper()+""
1011 if variety == "bg":
1012 preString = preString%("background")
1013 else:
1014 preString = preString%("foreground")
1015 config = cp.get('fu-'+variety+'-'+type+'-qscan', ifo+'config').strip()
1016 self.add_var_arg( config )
1017
1018 if type == "ht":
1019 dataString = figure_out_type(time, ifo, 'hoft')[0]
1020 else:
1021 dataString = figure_out_type(time, ifo, 'rds')[0]
1022 if type == "seismic":
1023 dataString = dataString + "_SEIS"
1024 if dataString[:2]!=ifo:
1025 dataString = ifo + "_" + dataString
1026 timeString = "-".join(get_day_boundaries(int(time)))
1027 if cp.has_option('fu-output','output-dir') and cp.get('fu-output','output-dir'):
1028 output = cp.get('fu-output','output-dir') + '/' + preString + '/' + dataString + '/' + timeString
1029 else:
1030 output = os.getcwd() + '/' + preString + '/' + dataString + '/' + timeString
1031
1032
1033 mkdir(output)
1034
1035
1036 self.output_path = output
1037
1038 self.output_cache = lal.CacheEntry(ifo, job.name.replace("remoteScan_"+job.dir+"_"+job.tag_base+".sh","wpipeline").upper(), segments.segment(float(time), float(time)), "file://localhost/"+self.output_path+"/"+str(time))
1039
1040
1041
1042
1043
1044 input_cache_file = p_nodes[0].localFileName
1045 self.add_var_arg(input_cache_file)
1046 self.add_macro("macroinputfile", input_cache_file)
1047
1048
1049
1050
1051 self.add_var_arg(str(time))
1052
1053
1054 self.add_var_arg(repr(time))
1055
1056 self.name_output_file = job.dir + "-" + job.tag_base + "-" + repr(time) + ".tgz"
1057 self.add_macro("macrooutputfile", self.name_output_file)
1058
1059 if not opts.disable_dag_categories:
1060 self.set_category(job.name.lower())
1061
1062 for node in p_nodes:
1063 if node.validNode:
1064 self.add_parent(node)
1065 dag.add_node(self)
1066 self.validate()
1067
1068
1069
1071
1072 - def __init__(self, dag, job, cp, opts, time, ifo):
1073
1074 pipeline.CondorDAGNode.__init__(self,job)
1075
1076 name = job.name
1077
1078 if "SEIS" in name:
1079 data_type = "rds"
1080 shortName = "seis_rds"
1081 elif "HT" in name:
1082 data_type = "hoft"
1083 shortName = "ht"
1084 else:
1085 data_type = "rds"
1086 shortName = "rds"
1087
1088 refChannel = figure_out_type(time, ifo, data_type)[1].split(":")[-1]
1089 self.add_var_opt('ref-channel',refChannel)
1090 if cp.has_option('fu-analyse-qscan','generate-qscan-xml'):
1091 self.add_var_opt('generate-qscan-xml','')
1092 self.add_var_opt('z-threshold',cp.getfloat('fu-analyse-qscan','z-threshold'))
1093 if cp.has_option('fu-analyse-qscan','plot-z-distribution'):
1094 self.add_var_opt('plot-z-distribution','')
1095 self.add_var_opt('z-min',cp.getfloat('fu-analyse-qscan','z-min'))
1096 self.add_var_opt('z-max',cp.getfloat('fu-analyse-qscan','z-max'))
1097 self.add_var_opt('z-bins',cp.getfloat('fu-analyse-qscan','z-bins'))
1098 if cp.has_option('fu-analyse-qscan','plot-dt-distribution'):
1099 self.add_var_opt('plot-dt-distribution','')
1100 self.add_var_opt('dt-min',cp.getfloat('fu-analyse-qscan',shortName.replace('_','-') + '-dt-min'))
1101 self.add_var_opt('dt-max',cp.getfloat('fu-analyse-qscan',shortName.replace('_','-') + '-dt-max'))
1102 self.add_var_opt('dt-bins',cp.getfloat('fu-analyse-qscan','dt-bins'))
1103 if cp.has_option('fu-analyse-qscan','plot-z-scattered'):
1104 self.add_var_opt('plot-z-scattered','')
1105 if cp.has_option('fu-analyse-qscan','plot-z-scattered') or cp.has_option('fu-analyse-qscan','plot-dt-distribution'):
1106 self.add_var_opt('ref-channel',refChannel)
1107 self.add_var_opt('ifo-times',ifo)
1108 self.add_var_opt('type',name.upper().replace("ANALYSEQSCAN.PY","WPIPELINE"))
1109 self.add_var_opt('short-type',job.name_for_background.upper().replace("ANALYSEQSCAN.PY","WPIPELINE")+'_')
1110 self.add_var_opt('gps-string',str(time))
1111 self.add_var_opt('ifo-tag',ifo)
1112 self.add_var_opt('user-tag',str(time).replace('.','_') + "_" + shortName)
1113
1114 self.add_var_opt('qscan-cache-foreground',dag.basename+'.cache')
1115
1116 if cp.has_option('fu-analyse-qscan',ifo+'-background-cache'):
1117 backgroundCache = cp.get('fu-analyse-qscan',ifo+'-background-cache').strip()
1118 else:
1119 backgroundCache = figure_out_cache(time,ifo)
1120 cp.set('fu-analyse-qscan',ifo+'-background-cache',backgroundCache)
1121 self.add_var_opt('qscan-cache-background',backgroundCache)
1122
1123 self.output_file_name = "%s-analyseQscan_%s_%s-unspecified-gpstime.cache" % ( ifo, ifo, str(time).replace('.','_') + "_" + shortName)
1124 self.output_cache = lal.CacheEntry(ifo,job.name.upper(),segments.segment(float(time),float(time)),"file://localhost/"+job.outputPath+'/'+self.output_file_name)
1125
1126 self.setupPlotNode(job)
1127
1128 if not opts.disable_dag_categories:
1129 self.set_category(job.name.lower())
1130
1131
1132 for node in dag.get_nodes():
1133
1134
1135
1136
1137
1138
1139 if isinstance(node,fuQscanNode) or isinstance(node,distribRemoteQscanNode):
1140 if node.validNode:
1141 if (node.scan_type in name and node.scan_ifo == ifo):
1142 self.add_parent(node)
1143 if not (shortName=="ht" and opts.no_ht_analyseQscan) and not (shortName == "rds" and opts.no_rds_analyseQscan) and not (shortName == "seis_rds" and opts.no_seismic_analyseQscan) and backgroundCache:
1144 dag.add_node(self)
1145 self.validate()
1146 else:
1147 self.invalidate()
1148
1149
1150
1152
1153 - def __init__(self, dag, job, cp, opts, ifo, sngl=None, qscan=False, trigger_time=None, data_type="hoft", p_nodes=[]):
1154
1155 self.outputFileName = ""
1156 pipeline.LSCDataFindNode.__init__(self,job)
1157 if qscan:
1158 if sngl: time = sngl.time
1159 else: time = trigger_time
1160 self.outputFileName = self.setup_qscan(job, cp, time, ifo, data_type)
1161 else:
1162 if not sngl:
1163 print >> sys.stderr, "argument \"sngl\" should be provided to class fuDataFindNode"
1164 sys.exit(1)
1165 self.outputFileName = self.setup_inspiral(job, cp, sngl, ifo)
1166
1167 self.output_cache = lal.CacheEntry(ifo, job.name.upper(), segments.segment(self.get_start(), self.get_end()), "file://localhost/"+os.path.abspath(self.outputFileName))
1168
1169 if not opts.disable_dag_categories:
1170 self.set_category(job.name.lower())
1171
1172 if not(cp.has_option('fu-remote-jobs','remote-jobs') and job.name in cp.get('fu-remote-jobs','remote-jobs') and cp.has_option('fu-remote-jobs','remote-ifos') and ifo in cp.get('fu-remote-jobs','remote-ifos')) or opts.do_remoteScans:
1173 for node in p_nodes:
1174 if node.validNode:
1175 self.add_parent(node)
1176 if not (data_type=="hoft" and not qscan and opts.no_insp_datafind) and not (data_type=="hoft" and qscan and opts.no_htQscan_datafind) and not (data_type=="rds" and opts.no_rdsQscan_datafind):
1177 dag.add_node(self)
1178 self.validate()
1179 else:
1180 self.invalidate()
1181 else:
1182 self.invalidate
1183
1184 - def setup_qscan(self, job, cp, time, ifo, data_type):
1185
1186
1187
1188 type, channel = figure_out_type(time,ifo,data_type)
1189 self.set_type(type)
1190 self.q_time = float(cp.get("fu-q-"+data_type+"-datafind",ifo+"-search-time-range"))/2.
1191 self.set_observatory(ifo[0])
1192 self.set_start(int( time - self.q_time - 1.))
1193 self.set_end(int( time + self.q_time + 1.))
1194 lalCache = self.get_output()
1195 qCache = lalCache.rstrip("lcf") + "qcache"
1196
1197 if cp.has_option('fu-remote-jobs','remote-jobs') and job.name in cp.get('fu-remote-jobs','remote-jobs') and cp.has_option('fu-remote-jobs','remote-ifos') and ifo in cp.get('fu-remote-jobs','remote-ifos'):
1198 self.add_var_arg('--server ldr-bologna.phys.uwm.edu')
1199 postScriptTest = "y"
1200 self.localFileName = os.path.basename(qCache)
1201 else:
1202 self.add_var_arg('')
1203 postScriptTest = "n"
1204
1205 self.set_post_script(os.getcwd()+"/cacheconv.sh %s %s %s" %(lalCache,qCache,postScriptTest) )
1206 return(qCache)
1207
1209
1210
1211
1212 type, channel = figure_out_type(sngl.get_gps_start_time(),ifo)
1213 self.set_type(type)
1214 self.set_observatory(ifo[0])
1215
1216 self.set_start(sngl.get_gps_start_time()-64)
1217 self.set_end(sngl.get_gps_end_time()+64)
1218 self.add_var_arg('')
1219 lalCache = self.get_output()
1220 return(lalCache)
1221
1222
1223
1225
1226
1227 - def __init__(self, dag, job, cp, opts, sngl, frame_cache, chia, tag, p_nodes=[]):
1228
1229 tlen = 1.0
1230 self.output_file_name = ""
1231 pipeline.CondorDAGNode.__init__(self,job)
1232 pipeline.AnalysisNode.__init__(self)
1233
1234
1235
1236
1237
1238 self.set_trig_start( int(sngl.time - tlen + 0.5) )
1239 self.set_trig_end( int(sngl.time + tlen + 0.5) )
1240 if not chia:
1241 self.add_var_opt("write-snrsq","")
1242 self.add_var_opt("write-chisq","")
1243 self.add_var_opt("write-spectrum","")
1244 self.add_var_opt("write-template","")
1245 self.add_var_opt("write-cdata","")
1246
1247 skipParams = ['minimal-match', 'bank-file', 'user-tag', 'injection-file', 'trig-start-time', 'trig-end-time']
1248
1249 extension = ".xml"
1250 for row in sngl.process_params:
1251 param = row.param.strip("-")
1252 value = row.value
1253
1254 if param == 'frame-cache': value = frame_cache
1255 if param == 'snr-threshold': value = "0.1"
1256 if param == 'do-rsq-veto': continue
1257 if param == 'enable-rsq-veto': continue
1258 if param == 'chisq-threshold': value = "1.0e+06"
1259 if param == 'cluster-method': value = 'window'
1260 if param == 'cluster-window': continue
1261 if param == 'userTag': continue
1262 if param == 'user-tag': continue
1263 if param in skipParams: continue
1264 if param == 'channel-name':
1265 self.inputIfo = value[0:2]
1266
1267
1268
1269 if opts.do_grb:
1270 type,channel = figure_out_type(sngl.time,self.inputIfo)
1271 value = channel
1272 if param == 'injection-file': value = sngl.inj_file_name
1273 if param == 'gps-end-time':
1274 self.set_end(int(value))
1275 continue
1276 if param == 'gps-start-time':
1277 self.set_start(int(value))
1278 continue
1279 if param == 'ifo-tag':
1280 self.set_ifo_tag(value)
1281 continue
1282 self.add_var_opt(param,value)
1283 if param == 'pad-data':
1284 self.set_pad_data(int(value))
1285 if param == 'write-compress':
1286 extension = '.xml.gz'
1287
1288 self.add_var_opt('cluster-window',str( tlen / 2.))
1289 self.add_var_opt('disable-rsq-veto',' ')
1290 bankFile = self.write_trig_bank(sngl, 'trig_bank/' + sngl.ifo + '-TRIGBANK_FOLLOWUP_' + repr(sngl.time) + '.xml.gz')
1291 self.set_bank(bankFile)
1292
1293 if chia:
1294 self.set_user_tag( tag.upper() + "_CHIA_FOLLOWUP_" + repr(sngl.time) )
1295 else:
1296 self.set_user_tag( tag.upper() + "_FOLLOWUP_" + repr(sngl.time) )
1297
1298 self.output_file_name = job.outputPath + sngl.ifo + "-INSPIRAL_" + self.get_ifo_tag() + "_" + self.get_user_tag() + "-" + str(self.get_start()) + "-" + str(int(self.get_end())-int(self.get_start())) + extension
1299 self.outputCache = sngl.ifo + ' ' + 'INSPIRAL' + ' ' + str(self.get_start()) + ' ' + str(int(self.get_end())-int(self.get_start())) + ' ' + self.output_file_name + '\n' + sngl.ifo + ' ' + 'INSPIRAL-FRAME' + ' ' + str(self.get_start()) + ' ' + str(int(self.get_end())-int(self.get_start())) + ' ' + self.output_file_name.replace(extension,".gwf") + '\n'
1300
1301 self.add_var_opt("output-path",job.outputPath)
1302 self.output_cache = []
1303 self.output_cache.append(lal.CacheEntry(sngl.ifo, job.name.upper(), segments.segment(float(self.get_start()), float(self.get_end())), "file://localhost/"+self.output_file_name))
1304 self.output_cache.append(lal.CacheEntry(sngl.ifo, job.name.upper(), segments.segment(float(self.get_start()), float(self.get_end())), "file://localhost/"+self.output_file_name.replace(extension,'.gwf')))
1305
1306 self.output_frame_file = self.output_file_name.replace(extension,'.gwf')
1307
1308 if not opts.disable_dag_categories:
1309 self.set_category(job.name.lower())
1310
1311
1312
1313
1314
1315
1316
1317
1318 for node in p_nodes:
1319 if node.validNode:
1320 self.add_parent(node)
1321 if not opts.no_inspiral:
1322 dag.add_node(self)
1323 self.validate()
1324 else:
1325 self.invalidate()
1326
1328 try:
1329 os.mkdir('trig_bank')
1330 except: pass
1331 xmldoc = ligolw.Document()
1332 xmldoc.appendChild(ligolw.LIGO_LW())
1333
1334 process_params_table = lsctables.New(lsctables.ProcessParamsTable)
1335 xmldoc.childNodes[-1].appendChild(process_params_table)
1336
1337 sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable)
1338 xmldoc.childNodes[-1].appendChild(sngl_inspiral_table)
1339 sngl_inspiral_table.append(sngl.row)
1340
1341 utils.write_filename(xmldoc, name, verbose=False, gz = True)
1342 return name
1343
1344
1346 """
1347 This class is responsible for running a final job which will
1348 create the default top 10 triggers for each trigger type.
1349 This will place these files into the publication directory so
1350 user can push wiki content onto the CBC wiki
1351 """
1353 pipeline.CondorDAGNode.__init__(self,job)
1354
1355 self.add_var_opt('followup-directory',cp.get("makeCheckListWiki",
1356 "location").strip())
1357
1358 self.add_var_opt('ini-file',cp.get("makeCheckListWiki",
1359 "ini-file").strip())
1360 if not opts.disable_dag_categories:
1361 self.set_category(job.name.lower())
1362
1363 for parentNode in dag.get_nodes():
1364 if not isinstance(parentNode,makeFollowupPageNode):
1365 self.add_parent(parentNode)
1366 if not opts.no_makeCheckList:
1367 dag.add_node(self)
1368 self.validate()
1369 else:
1370 self.invalidate()
1371
1372
1373 -class makeFollowupPageNode(pipeline.CondorDAGNode,FUNode):
1374 """
1375 This runs the followup page
1376 """
1377 - def __init__(self,dag,job,cp,opts):
1378 pipeline.CondorDAGNode.__init__(self,job)
1379
1380 self.add_var_arg('followup_pipe.cache')
1381 if not opts.disable_dag_categories:
1382 self.set_category(job.name.lower())
1383
1384 for parentNode in dag.get_nodes():
1385 self.add_parent(parentNode)
1386 dag.add_node(self)
1387 self.validate()
1388
1389
1391 """
1392 This class is resposible for setting up a node to perform a
1393 query for the DQ flag for the trigger which under review.
1394 EXAMPLE
1395 followupQueryDQ.py --window=60,15 --trigger-time=929052945 --output-format=moinmoin --segment-url="https://segdb.ligo.caltech.edu:30015" --output-file=dqResults.wiki
1396 """
1397 defaults={"section":"findFlags",
1398 "options":{"window":"60,15",
1399 "segment-url":"https://segdb.ligo.caltech.edu",
1400 "output-format":"moinmoin",
1401 "output-file":"dqResults.wiki",
1402 "estimate-background":"",
1403 "background-location":"automatic"}
1404 }
1405 - def __init__(self, dag, job, cp, opts, coincEvent=None):
1406 """
1407 """
1408 self.__conditionalLoadDefaults__(findFlagsNode.defaults,cp)
1409 pipeline.CondorDAGNode.__init__(self,job)
1410 self.add_var_opt("trigger-time",coincEvent.time)
1411
1412 oFilename="%s-findFlags_%s_%s.wiki"%(coincEvent.instruments,
1413 coincEvent.ifos,
1414 coincEvent.time)
1415 self.add_var_opt("output-file",job.outputPath+'/DataProducts/'+oFilename)
1416 self.add_var_opt("segment-url",cp.get('findFlags','segment-url'))
1417 self.add_var_opt("output-format",cp.get('findFlags','output-format'))
1418 self.add_var_opt("window",cp.get('findFlags','window'))
1419 if cp.has_option('findFlags','estimate-background'):
1420 self.add_var_opt("estimate-background",cp.get('findFlags','estimate-background'))
1421 if cp.has_option('findFlags','background-location'):
1422 self.add_var_opt("background-location",cp.get('findFlags','background-location'))
1423 if cp.has_option('findFlags','blind'):
1424 self.add_var_opt("blind",cp.get('findFlags','blind'))
1425 self.output_cache = lal.CacheEntry(coincEvent.ifos, job.name.upper(), segments.segment(float(coincEvent.time), float(coincEvent.time)), "file://localhost/"+job.outputPath+'/DataProducts/'+oFilename)
1426
1427
1428 myArgString=""
1429 if hasattr(coincEvent, "sngl_inspiral"):
1430 for sngl in coincEvent.sngl_inspiral.itervalues():
1431 myArgString=myArgString+"%s,"%sngl.ifo
1432 elif hasattr(coincEvent, "ifos_list"):
1433 for ifo in coincEvent.ifos_list:
1434 myArgString=myArgString+"%s,"%ifo
1435 myArgString=myArgString.rstrip(",")
1436 self.add_var_opt("ifo-list",myArgString)
1437
1438 if not opts.disable_dag_categories:
1439 self.set_category(job.name.lower())
1440
1441 if not opts.no_findFlags:
1442 dag.add_node(self)
1443 self.validate()
1444 else:
1445 self.invalidate()
1446
1447
1449 """
1450 This class is responsible for creating a node in the dag which
1451 queries the segment database for veto segments active around
1452 the trigger time of the candidate.
1453 Command line example:
1454 followupQueryVeto.py --window=60,15 --trigger-time=929052945 --output-format=moinmoin --segment-url="https://segdb.ligo.caltech.edu:30015" --output-file=vetoResults.wiki
1455 """
1456 defaults={"section":"findVetoes",
1457 "options":{"window":"60,15",
1458 "segment-url":"https://segdb.ligo.caltech.edu",
1459 "output-format":"moinmoin",
1460 "output-file":"vetoResults.wiki",
1461 "estimate-background":"",
1462 "background-location":"automatic"}
1463 }
1464 - def __init__(self, dag, job, cp, opts, coincEvent=None):
1465 """
1466 """
1467 self.__conditionalLoadDefaults__(findVetosNode.defaults,cp)
1468 pipeline.CondorDAGNode.__init__(self,job)
1469 self.add_var_opt("trigger-time",coincEvent.time)
1470
1471 oFilename="%s-findVetos_%s_%s.wiki"%(coincEvent.instruments,
1472 coincEvent.ifos,
1473 coincEvent.time)
1474 self.add_var_opt("output-file",job.outputPath+'/DataProducts/'+oFilename)
1475 self.add_var_opt("segment-url",cp.get('findVetoes','segment-url'))
1476 self.add_var_opt("output-format",cp.get('findVetoes','output-format'))
1477 self.add_var_opt("window",cp.get('findVetoes','window'))
1478 if cp.has_option('findVetoes','estimate-background'):
1479 self.add_var_opt("estimate-background",cp.get('findVetoes','estimate-background'))
1480 if cp.has_option('findVetoes','background-location'):
1481 self.add_var_opt("background-location",cp.get('findVetoes','background-location'))
1482 if cp.has_option('findVetoes','blind'):
1483 self.add_var_opt("blind",cp.get('findVetoes','blind'))
1484 self.output_cache = lal.CacheEntry(coincEvent.ifos, job.name.upper(), segments.segment(float(coincEvent.time), float(coincEvent.time)), "file://localhost/"+job.outputPath+'/DataProducts/'+oFilename)
1485
1486
1487 myArgString=""
1488 if hasattr(coincEvent, "sngl_inspiral"):
1489 for sngl in coincEvent.sngl_inspiral.itervalues():
1490 myArgString=myArgString+"%s,"%sngl.ifo
1491 elif hasattr(coincEvent, "ifos_list"):
1492 for ifo in coincEvent.ifos_list:
1493 myArgString=myArgString+"%s,"%ifo
1494 myArgString=myArgString.rstrip(",")
1495 self.add_var_opt("ifo-list",myArgString)
1496
1497 if not opts.disable_dag_categories:
1498 self.set_category(job.name.lower())
1499 if not opts.no_findVetoes:
1500 dag.add_node(self)
1501 self.validate()
1502 else:
1503 self.invalidate()
1504
1505
1506
1508 """
1509 This is a node that corresponds with the job class to whip up
1510 custom FOMs. In general each node will have one condor
1511 changed variable, which is t0 (gps) of trigger.
1512 """
1513 defaults={"section":"customfoms",
1514 "options":{"plot-windows":"14400,7200",
1515 "ifo-list":"L1,H1,V1"}
1516 }
1517 - def __init__(self, dag, job, cp, opts, coincEvent):
1518 """
1519 Takes in a coincEvent object and prepares figure request.
1520 """
1521 self.__conditionalLoadDefaults__(customFOMPlotNode.defaults,cp)
1522 pipeline.CondorDAGNode.__init__(self,job)
1523 if cp.has_option('customfoms','plot-windows'):
1524 self.add_var_opt('plot-windows',cp.get('customfoms','plot-windows'))
1525 if cp.has_option('customfoms','ifo-list'):
1526 self.add_var_opt('ifo-list',cp.get('customfoms','ifo-list'))
1527 self.add_var_opt("gps-time",coincEvent.time)
1528 self.add_var_opt("verbose","")
1529 self.add_var_opt("output-path",job.outputPath+'/DataProducts/')
1530 if not opts.disable_dag_categories:
1531 self.set_category(job.name.lower())
1532
1533 if not opts.no_findVetoes and "ligo.caltech.edu" in get_hostname():
1534 dag.add_node(self)
1535 self.validate()
1536 else:
1537 self.invalidate()
1538
1539
1541 """
1542 This Node class performs a parameter consistency check using the
1543 sites claiming to detect the trigger and the observed
1544 effective distance at each site. A command line example is
1545 below:
1546 followupRatioTest.py -R /archive/home/ctorres/public_html/DQstuff/ratioTest.pickle -iL1 -jH1 -kV1 -I10 -J10 -K5 -A 1 -B 1 -C 1.0001 -pmoinmoin -o mmTable.wiki
1547 """
1548 defaults={"section":"effDRatio",
1549 "options":{"output-file":"effDRatio.wiki",
1550 "output-format":"moinmoin",
1551 "snr-ratio-test":"/archive/home/ctorres/public_html/DQstuff/ratioTest.pickle"}
1552 }
1553 - def __init__(self, dag, job, cp, opts, coincEvent=None):
1554 """
1555 """
1556 self.__conditionalLoadDefaults__(effDRatioNode.defaults,cp)
1557 pipeline.CondorDAGNode.__init__(self,job)
1558 oFilename="%s-effDRatio_%s_%s.wiki"%(coincEvent.instruments,
1559 coincEvent.ifos,
1560 coincEvent.time)
1561 self.add_var_opt("output-file",job.outputPath+'/DataProducts/'+oFilename)
1562 self.add_var_opt("output-format",cp.get('effDRatio','output-format'))
1563 self.add_var_opt("snr-ratio-test",cp.get('effDRatio','snr-ratio-test'))
1564
1565 index=1
1566 for ifo,snglEvent in coincEvent.sngl_inspiral.items():
1567 if ifo in coincEvent.ifos:
1568 myIFO=snglEvent.ifo
1569 mySNR=snglEvent.snr
1570 myTIME=snglEvent.time
1571 self.add_var_opt("ifo%i"%(index),myIFO)
1572 self.add_var_opt("snr%i"%(index),mySNR)
1573 self.add_var_opt("time%i"%(index),myTIME)
1574 index=index+1
1575 for rIndex in range(index,3+1):
1576 self.add_var_opt("ifo%i"%(rIndex),None)
1577 self.add_var_opt("snr%i"%(rIndex),None)
1578 self.add_var_opt("time%i"%(rIndex),None)
1579
1580 if not opts.disable_dag_categories:
1581 self.set_category(job.name.lower())
1582
1583 if not opts.no_effectiveRatio:
1584 dag.add_node(self)
1585 self.validate()
1586 else:
1587 self.invalidate()
1588
1589
1590
1591
1593 """
1594 A C code for computing the sky map
1595 """
1596 - def __init__(self,dag,job,cp, opts, coinc, sngl_node_dict, p_nodes=[]):
1597 self.ifo_list = ["H1","L1","V1"]
1598
1599
1600 self.ra_res = job.ra_res
1601 self.dec_res = job.dec_res
1602 self.sample_rate = job.sample_rate
1603 pipeline.CondorDAGNode.__init__(self,job)
1604
1605
1606 self.output_file_name = job.outputPath + str(coinc.time) + ".txt.gz"
1607 self.add_var_opt("output-file",self.output_file_name)
1608 self.add_var_opt("ra-res",self.ra_res)
1609 self.add_var_opt("dec-res",self.dec_res)
1610
1611
1612 for ifo in ['h1','h2','l1','v1']:
1613 self.add_var_opt(ifo+"-frame-file","none")
1614 self.add_var_opt(ifo+"-xml-file","none")
1615 self.add_var_opt(ifo+"-channel-name","none")
1616
1617
1618 self.add_var_opt("sample-rate",coinc.get_sample_rate())
1619
1620
1621 for ifo, sngl in sngl_node_dict.items():
1622 self.add_var_opt(ifo.lower()+"-frame-file",sngl.output_file_name.replace(".xml",".gwf").strip(".gz"))
1623 self.add_var_opt(ifo.lower()+"-xml-file",sngl.output_file_name)
1624 for ifo, sngl in coinc.sngl_inspiral_coh.items():
1625 self.add_var_opt( "%s-channel-name" % (ifo.lower(),), "%s:CBC-CData_%d" % (ifo.upper(), int(sngl.row.event_id)) )
1626
1627 self.output_cache = lal.CacheEntry("".join(coinc.instruments.split(",")), job.name.upper(), segments.segment(float(coinc.time), float(coinc.time)), "file://localhost/"+os.path.abspath(self.output_file_name))
1628
1629 if not opts.disable_dag_categories:
1630 self.set_category(job.name.lower())
1631
1632
1633 for node in p_nodes:
1634 if node.validNode:
1635 self.add_parent(node)
1636 if not opts.no_skymap:
1637 dag.add_node(self)
1638 self.validate()
1639 else:
1640 self.invalidate()
1641
1642
1644 """
1645 A python code for plotting the sky map
1646 """
1647 - def __init__(self, dag, job,cp, opts, coinc, skyMapNode, p_nodes = []):
1648
1649 pipeline.CondorDAGNode.__init__(self,job)
1650
1651 self.add_var_opt("map-data-file",skyMapNode.output_file_name)
1652 self.add_var_opt("user-tag",str(coinc.time))
1653 self.add_var_opt("ifo-tag",coinc.ifos)
1654 self.add_var_opt("ifo-times",coinc.instruments)
1655 self.add_var_opt("ra-res",str(skyMapNode.ra_res))
1656 self.add_var_opt("dec-res",str(skyMapNode.dec_res))
1657 self.add_var_opt("stat-value", str(coinc.combined_far))
1658
1659 self.setupPlotNode(job)
1660
1661
1662
1663 if coinc.sim:
1664 inj_ra = coinc.sim.longitude
1665 inj_dec = coinc.sim.latitude
1666 self.add_var_opt("injection-right-ascension",str(inj_ra))
1667 self.add_var_opt("injection-declination",str(inj_dec))
1668
1669 self.output_file_name = skyMapNode.output_file_name.replace('.txt','.png')
1670
1671 self.output_file_name = "%s-plot_inspiral_skymap_%s_%s-unspecified-gpstime.cache" % ( coinc.instruments, coinc.ifos, str(coinc.time))
1672
1673 self.output_cache = lal.CacheEntry("".join(coinc.instruments.split(",")), job.name.upper(), segments.segment(float(coinc.time), float(coinc.time)), "file://localhost/"+job.outputPath + '/' + self.output_file_name)
1674
1675 if not opts.disable_dag_categories:
1676 self.set_category(job.name.lower())
1677
1678 for node in p_nodes:
1679 if node.validNode:
1680 self.add_parent(node)
1681 if not opts.no_skymap:
1682 dag.add_node(self)
1683 self.validate()
1684 else:
1685 self.invalidate()
1686
1688 """
1689 A C code for computing the coherent inspiral statistic.
1690 An example command line is:
1691 lalapps_coherent_inspiral --segment-length 1048576 --dynamic-range-exponent 6.900000e+01 --low-frequency-cutoff 4.000000e+01 --bank-file H1H2-COHBANK_COHERENT_H1H2_PLAYGROUND-823269333-600.xml --sample-rate 4096 --cohsnr-threshold 5.500000e+00 --ifo-tag H1H2 --frame-type LSC-STRAIN --H1-framefile H1-INSPIRAL_COHERENT_H1H2_PLAYGROUND-823269286-2048.gwf --H2-framefile H2-INSPIRAL_COHERENT_H1H2_PLAYGROUND-823268952-2048.gwf --gps-end-time 823269933 --gps-start-time 823269333 --write-cohsnr --write-cohnullstat --write-cohphasediff --write-events --verbose
1692 """
1693
1694
1695
1696 - def __init__(self, dag, job, cp, opts, coinc, inspiral_node_dict, chia_node =None, p_nodes = []):
1697
1698
1699
1700
1701 pipeline.CondorDAGNode.__init__(self,job)
1702 pipeline.AnalysisNode.__init__(self)
1703 self.output_file_name = ""
1704 sngl = coinc.sngl_inspiral_coh.values()[0]
1705
1706 user_tag = "COHERENT-"+str(coinc.time)
1707
1708
1709 self.add_var_opt( "segment-length", sngl.get_proc_param('segment-length') )
1710 self.add_var_opt( "dynamic-range-exponent",sngl.get_proc_param('dynamic-range-exponent') )
1711 self.add_var_opt( "low-frequency-cutoff", sngl.get_proc_param('low-frequency-cutoff') )
1712 self.add_var_opt("sample-rate", sngl.get_proc_param('sample-rate') )
1713
1714 self.add_var_opt("cohsnr-threshold",cp.get('chia','cohsnr-threshold'))
1715 self.add_var_opt("ra-step",cp.get('chia','ra-step'))
1716 self.add_var_opt("dec-step",cp.get('chia','dec-step'))
1717 self.add_var_opt("cdata-length",1.0)
1718 self.add_var_opt("user-tag",user_tag)
1719 self.add_var_opt("ifo-tag",coinc.instruments.replace(',',''))
1720 self.add_var_opt("write-events","")
1721 self.add_var_opt("write-compress","")
1722 self.add_var_opt("maximize-over-chirp","")
1723 self.add_var_opt("followup","")
1724
1725 if chia_node:
1726 self.add_var_opt("exttrig","")
1727 self.add_var_opt("chia-file",chia_node.output_file_name)
1728 self.add_var_opt("write-cohsnr","")
1729 self.add_var_opt("write-cohnullstat","")
1730 self.add_var_opt("write-h1h2nullstat","")
1731 self.add_var_opt("write-cohh1h2snr","")
1732
1733
1734 hLengthAnalyzed = 1
1735
1736
1737
1738 self.add_var_opt("output-path",job.outputPath)
1739
1740
1741
1742
1743 self.start = int(coinc.time) - int(hLengthAnalyzed)
1744 self.end = int(coinc.time) + int(hLengthAnalyzed)
1745
1746 self.add_var_opt("gps-start-time",self.start)
1747 self.add_var_opt("gps-end-time",self.end)
1748
1749
1750 if chia_node:
1751 self.output_file_name = "%s/%s-CHIA_%s-%d-%d.xml.gz" % (job.outputPath, coinc.instruments.replace(',',''), user_tag, self.start, self.end-self.start )
1752 else:
1753 self.output_file_name = "%s/%s-CHIA_%s-%d-%d-ALLSKY.xml.gz" % (job.outputPath, coinc.instruments.replace(',',''), user_tag, self.start, self.end-self.start )
1754 self.output_frame_file = "%s/%s-CHIA_%s-%d-%d.gwf" % (job.outputPath, coinc.instruments.replace(',',''), user_tag, self.start, self.end-self.start )
1755 self.netnull_output_frame_file = "%s/%s-CHIA_NULL_STAT_%s-%d-%d.gwf" % (job.outputPath, coinc.instruments.replace(',',''), user_tag, self.start, self.end-self.start )
1756
1757 self.h1h2null_output_frame_file = "%s/H1H2-CHIA_NULL_STAT_%s-%d-%d.gwf" % (job.outputPath, user_tag, self.start, self.end-self.start )
1758 self.h1h2coh_output_frame_file = "%s/H1H2-CHIA_COHSNR_%s-%d-%d.gwf" % (job.outputPath, user_tag, self.start, self.end-self.start )
1759
1760
1761 self.output_cache = []
1762
1763 self.output_cache.append(lal.CacheEntry("".join(coinc.instruments.split(",")), job.name.upper(), segments.segment(float(coinc.time), float(coinc.time)), "file://localhost/"+os.path.abspath(self.output_file_name)))
1764
1765 self.output_cache.append(lal.CacheEntry("".join(coinc.instruments.split(",")), job.name.upper(), segments.segment(float(coinc.time), float(coinc.time)), "file://localhost/"+os.path.abspath(self.output_frame_file)))
1766
1767 self.output_cache.append(lal.CacheEntry("".join(coinc.instruments.split(",")), job.name.upper(), segments.segment(float(coinc.time), float(coinc.time)), "file://localhost/"+os.path.abspath(self.netnull_output_frame_file)))
1768
1769
1770 bankname = 'trig_bank/%s-COHBANK_FOLLOWUP_%s-%d-%d.xml.gz' % (coinc.instruments.replace(',',''), str(coinc.time), int(coinc.time) - int(hLengthAnalyzed), 2 * int(hLengthAnalyzed))
1771 bankFile = self.write_trigbank(coinc, bankname)
1772 self.set_bank(bankFile)
1773
1774 arg_str = ''
1775 for ifo,sngl in inspiral_node_dict.items():
1776 arg_str += " --" + ifo.lower()+"-framefile " + sngl.output_frame_file
1777
1778 self.add_var_arg(arg_str)
1779
1780 if not opts.disable_dag_categories:
1781 self.set_category(job.name.lower())
1782
1783 for node in p_nodes:
1784 if node.validNode:
1785 self.add_parent(node)
1786 if not opts.no_chia:
1787 dag.add_node(self)
1788 self.validate()
1789 else:
1790 self.invalidate()
1791
1793 try:
1794 os.mkdir('trig_bank')
1795 except: pass
1796 xmldoc = ligolw.Document()
1797 xmldoc.appendChild(ligolw.LIGO_LW())
1798
1799
1800 process_params_table = lsctables.New(lsctables.ProcessParamsTable)
1801 xmldoc.childNodes[-1].appendChild(process_params_table)
1802
1803
1804 search_summary_table = lsctables.New(lsctables.SearchSummaryTable)
1805 xmldoc.childNodes[-1].appendChild(search_summary_table)
1806 row = search_summary_table.RowType()
1807
1808 row.process_id = "process:process_id:0"
1809 row.shared_object = None
1810 row.lalwrapper_cvs_tag = None
1811 row.lal_cvs_tag = None
1812 row.comment = "Awesome"
1813 row.ifos = coinc.instruments
1814
1815 row.set_in(segments.segment(LIGOTimeGPS(self.start,0), LIGOTimeGPS(self.end,0)))
1816 row.set_out(segments.segment(LIGOTimeGPS(self.start,0), LIGOTimeGPS(self.end,0)))
1817 row.nevents = None
1818 row.nnodes = None
1819 search_summary_table.append(row)
1820
1821 sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable)
1822 xmldoc.childNodes[-1].appendChild(sngl_inspiral_table)
1823 for ifo, sngl in coinc.sngl_inspiral_coh.items():
1824 sngl_inspiral_table.append(sngl.row)
1825
1826 utils.write_filename(xmldoc, name, verbose=False, gz = True)
1827 return name
1828
1829
1830
1831
1833 """
1834 Runs an instance of a plotSNRCHISQ followup job
1835 """
1836 - def __init__(self, dag, job, cp, opts, sngl, coinc, sngl_node, p_nodes=[]):
1837
1838 """
1839 job = A CondorDAGJob that can run an instance of plotSNRCHISQ followup.
1840 """
1841 pipeline.CondorDAGNode.__init__(self,job)
1842 self.output_file_name = ""
1843 self.add_var_opt("frame-file",sngl_node.output_frame_file)
1844 self.add_var_opt("inspiral-xml-file",sngl_node.output_file_name)
1845
1846 duration = 2.0
1847 self.add_var_opt("plot-width",duration)
1848
1849 self.add_var_opt("gps",sngl.time)
1850 self.add_var_opt("gps-start-time",sngl.time-duration*.5)
1851 self.add_var_opt("gps-end-time",sngl.time+duration*.5)
1852
1853 self.add_var_opt("ifo-times", coinc.instruments)
1854 self.add_var_opt("ifo-tag", sngl.ifo)
1855
1856 self.add_var_opt("user-tag","FOLLOWUP_PLOTSNRCHISQ_" + str(sngl.time))
1857
1858 self.output_file_name = "%s-plotsnrchisq_pipe_%s_%s-%d-%d.cache" % ( coinc.instruments, sngl.ifo, "FOLLOWUP_PLOTSNRCHISQ_" + str(sngl.time), int(sngl.time-duration*.5), math.ceil(sngl.time+duration*.5) - int(sngl.time-duration*.5) )
1859
1860 self.output_cache = lal.CacheEntry(sngl.ifo, job.name.upper(), segments.segment(float(sngl.time), float(sngl.time)), "file://localhost/"+job.outputPath + '/' + self.output_file_name)
1861
1862 self.setupPlotNode(job)
1863
1864 if not opts.disable_dag_categories:
1865 self.set_category(job.name.lower())
1866
1867 for node in p_nodes:
1868 if node.validNode:
1869 self.add_parent(node)
1870 if not opts.no_plotsnrchisq:
1871 dag.add_node(self)
1872 self.validate()
1873 else:
1874 self.invalidate()
1875
1876
1877
1878
1879
1881 """
1882 Runs an instance of a plotChia followup job
1883 """
1884
1885 - def __init__(self, dag, job, cp, opts, coinc, chia_node, insp_node_dict, p_nodes=[]):
1886
1887 """
1888 job = A CondorDAGJob that can run an instance of plotChiaJob followup.
1889 """
1890
1891 pipeline.CondorDAGNode.__init__(self,job)
1892 self.output_file_name = ""
1893 user_tag = "PLOT_CHIA_" + str(coinc.time)
1894 self.add_var_opt("chiaXmlFile",chia_node.output_file_name)
1895 self.add_var_opt("chiaFrameFile",chia_node.output_frame_file)
1896 self.add_var_opt("cohH1H2SNRFrameFile",chia_node.h1h2coh_output_frame_file)
1897 self.add_var_opt("H1H2NullStatFrameFile",chia_node.h1h2null_output_frame_file)
1898 self.add_var_opt("cohNullStatFrameFile",chia_node.netnull_output_frame_file)
1899 self.add_var_opt("gps-start-time",int(coinc.time-1))
1900 self.add_var_opt("gps-end-time",int(coinc.time+1))
1901 self.add_var_opt("sample-rate",str(coinc.get_sample_rate()))
1902 self.add_var_opt("user-tag",user_tag)
1903 ifos = "".join(coinc.ifos.split(","))
1904 instruments = "".join(coinc.instruments.split(","))
1905 self.add_var_opt("ifo-tag",ifos)
1906 self.add_var_opt("ifo-times",instruments)
1907 self.setupPlotNode(job)
1908
1909 self.output_file_name = "%s-plotchiatimeseries_%s_%s-%d-%d.cache" % ( instruments, ifos, "PLOT_CHIA_" + str(coinc.time), int(coinc.time-1), math.ceil(int(coinc.time+1)) - int(coinc.time-1) )
1910
1911 self.output_cache = lal.CacheEntry(instruments, job.name.upper(), segments.segment(float(coinc.time), float(coinc.time)), "file://localhost/"+job.outputPath + '/' + self.output_file_name)
1912
1913 if not opts.disable_dag_categories:
1914 self.set_category(job.name.lower())
1915
1916 for node in p_nodes:
1917 if node.validNode:
1918 self.add_parent(node)
1919 if not opts.no_chia:
1920 dag.add_node(self)
1921 self.validate()
1922 else:
1923 self.invalidate()
1924
1925 for ifo, insp in insp_node_dict.items():
1926 self.add_var_arg("--"+ifo.lower()+"-framefile "+ insp.output_frame_file)
1927
1928
1929
1930
1931
1932 -class mcmcNode(pipeline.CondorDAGNode, FUNode):
1933 """
1934 Runs a MCMC job
1935 """
1936 - def __init__(self,dag,job,cp,opts,coinc,frame_cache_list,randomseed,p_nodes,ifo_string=None):
1937 pipeline.CondorDAGNode.__init__(self,job)
1938
1939 time_margin = string.strip(cp.get('fu-mcmc','prior-coal-time-marg'))
1940 iterations = string.strip(cp.get('fu-mcmc','iterations'))
1941 tbefore = string.strip(cp.get('fu-mcmc','tbefore'))
1942 tafter = string.strip(cp.get('fu-mcmc','tafter'))
1943
1944 massmin = string.strip(cp.get('fu-mcmc','massmin'))
1945 massmax = string.strip(cp.get('fu-mcmc','massmax'))
1946 dist90 = string.strip(cp.get('fu-mcmc','dist90'))
1947 dist10 = string.strip(cp.get('fu-mcmc','dist10'))
1948
1949 if ifo_string:
1950 IFOs = frozenset([ifo_string])
1951 self.ifonames = ifo_string
1952 sngl_insp_string = "sngl_inspiral"
1953 else:
1954 IFOs = coinc.ifos_set
1955 self.ifonames = coinc.instruments
1956 sngl_insp_string = "sngl_inspiral_coh"
1957
1958 channelNames = ""
1959 chunk_end_list={}
1960 chunk_start_list={}
1961 for itf in IFOs:
1962 sngl = eval("coinc." + sngl_insp_string + "[\'" + itf + "\']")
1963 for row in sngl.process_params:
1964 param = row.param.strip("-")
1965 value = row.value
1966 if param == 'channel-name':
1967 channel = value
1968 if param == 'gps-end-time':
1969 chunk_end = value
1970 if param == 'gps-start-time':
1971 chunk_start = value
1972 channelNames += channel + ","
1973 chunk_end_list[itf] = int(chunk_end)
1974 chunk_start_list[itf] = int(chunk_start)
1975
1976 if len(IFOs) > 1:
1977 self.ifoRef = coinc.max_trigger_ifo()
1978 else:
1979 self.ifoRef = ifo_string
1980
1981 self.add_var_opt("template",string.strip(cp.get('fu-mcmc','template')))
1982 self.add_var_opt("iterations",iterations)
1983 self.add_var_opt("randomseed",randomseed)
1984 self.add_var_opt("tcenter","%0.3f"%coinc.sngl_inspiral[self.ifoRef].time)
1985 self.add_var_opt("tbefore",tbefore)
1986 self.add_var_opt("tafter",tafter)
1987
1988 tmin = coinc.sngl_inspiral[self.ifoRef].time - float(time_margin)
1989 tmax = coinc.sngl_inspiral[self.ifoRef].time + float(time_margin)
1990 self.add_var_opt("priorparameters","[" + massmin + "," + massmax + "," + str(tmin) + "," + str(tmax) + "," + dist90 + "," + dist10 + "]")
1991
1992 param_mchirp = coinc.sngl_inspiral[self.ifoRef].row.mchirp
1993 param_eta = coinc.sngl_inspiral[self.ifoRef].row.eta
1994 param_distance = coinc.sngl_inspiral[self.ifoRef].row.eff_distance
1995 self.add_var_opt("guess","[" + str(param_mchirp) + "," + str(param_eta) + "," + str(coinc.sngl_inspiral[self.ifoRef].time) + "," + str(param_distance) + "]")
1996
1997 cacheFiles = ""
1998 for frameCache in frame_cache_list:
1999 cacheFiles += frameCache + ","
2000 self.add_var_opt("cachefile","["+cacheFiles.strip(",")+"]")
2001 self.add_var_opt("filechannel","["+channelNames.strip(",")+"]")
2002
2003 psdEstimateStart = ""
2004 psdEstimateEnd = ""
2005 for itf in IFOs:
2006 datainchunk_before = int(coinc.sngl_inspiral[self.ifoRef].time) - 75 - 64 - chunk_start_list[itf]
2007 datainchunk_after = chunk_end_list[itf] - 64 - int(coinc.sngl_inspiral[self.ifoRef].time) - 32
2008 if datainchunk_after > datainchunk_before:
2009 psdEstimateStart += str(int(coinc.sngl_inspiral[self.ifoRef].time) + 32) + ","
2010 psdEstimateEnd += str(chunk_end_list[itf] - 64) + ","
2011 else:
2012 psdEstimateStart += str(chunk_start_list[itf] + 64) + ","
2013 psdEstimateEnd += str(int(coinc.sngl_inspiral[self.ifoRef].time) - 75) + ","
2014
2015 self.add_var_opt("psdestimatestart","["+psdEstimateStart.strip(",")+"]")
2016 self.add_var_opt("psdestimateend","["+psdEstimateEnd.strip(",")+"]")
2017
2018 self.add_var_opt("importanceresample",10000)
2019
2020 self.id = job.name.upper() + '-' + self.ifonames.replace(",","") + '-' + str(int(coinc.coinc_event_id)) + '_' + randomseed
2021 self.outputName = job.outputPath + '/' + self.id
2022 self.add_var_opt("outfilename",self.outputName)
2023
2024 self.start_time = min(chunk_start_list.values())
2025 self.end_time = max(chunk_end_list.values())
2026 self.output_cache = []
2027 self.output_cache.append(lal.CacheEntry(self.ifonames.replace(",",""), job.name.upper(), segments.segment(self.start_time,self.end_time), "file://localhost/"+self.outputName+".csv"))
2028
2029 if not opts.disable_dag_categories:
2030 self.set_category(job.name.lower())
2031
2032 if opts.enable_bayesian:
2033 for node in p_nodes:
2034 if node.validNode:
2035 self.add_parent(node)
2036 dag.add_node(self)
2037 self.validate()
2038 else:
2039 self.invalidate()
2040
2041
2042
2043
2045 """
2046 Runs a SPIN MCMC job
2047 """
2048 - def __init__(self,dag,job,cp,opts,coinc,frame_cache_list,p_nodes):
2049 pipeline.CondorDAGNode.__init__(self,job)
2050
2051 iterations = string.strip(cp.get('fu-spinmcmc','iterations'))
2052 tbefore = string.strip(cp.get('fu-spinmcmc','tbefore'))
2053 tafter = string.strip(cp.get('fu-spinmcmc','tafter'))
2054
2055 IFOs = coinc.ifos_set
2056 self.ifonames = coinc.instruments
2057 sngl_insp_string = "sngl_inspiral_coh"
2058
2059 channelNames = ""
2060 ifoString = ""
2061 chunk_end_list={}
2062 chunk_start_list={}
2063 for itf in IFOs:
2064 sngl = eval("coinc." + sngl_insp_string + "[\'" + itf + "\']")
2065 for row in sngl.process_params:
2066 param = row.param.strip("-")
2067 value = row.value
2068 if param == 'channel-name':
2069 channel = value
2070 if param == 'gps-end-time':
2071 chunk_end = value
2072 if param == 'gps-start-time':
2073 chunk_start = value
2074 channelNames += channel + ","
2075 ifoString += itf + ","
2076 chunk_end_list[itf] = int(chunk_end)
2077 chunk_start_list[itf] = int(chunk_start)
2078
2079 ifoString = ifoString.replace("H1","1")
2080 ifoString = ifoString.replace("L1","2")
2081 ifoString = ifoString.replace("V1","3")
2082 self.add_var_opt("network","["+ifoString.strip(",")+"]")
2083
2084 self.ifoRef = coinc.max_trigger_ifo()
2085
2086 self.add_var_opt("nIter",iterations)
2087 self.add_var_opt("tc","%0.3f"%coinc.sngl_inspiral[self.ifoRef].time)
2088 self.add_var_opt("beforetc",tbefore)
2089 self.add_var_opt("aftertc",tafter)
2090
2091 param_mchirp = coinc.sngl_inspiral[self.ifoRef].row.mchirp
2092 param_eta = coinc.sngl_inspiral[self.ifoRef].row.eta
2093 param_distance = coinc.sngl_inspiral[self.ifoRef].row.eff_distance
2094
2095 self.add_var_opt("mChirp",param_mchirp)
2096 self.add_var_opt("eta",param_eta)
2097 self.add_var_opt("dist",param_distance)
2098
2099 cacheFiles = ""
2100 for frameCache in frame_cache_list:
2101 cacheFiles += frameCache + ","
2102 self.add_var_opt("cache","["+cacheFiles.strip(",")+"]")
2103 self.add_var_opt("channel","["+channelNames.strip(",")+"]")
2104
2105
2106
2107
2108
2109
2110
2111
2112
2113
2114
2115
2116
2117
2118
2119
2120
2121 self.id = job.name.upper() + '-' + self.ifonames.replace(",","") + '-' + str(int(coinc.coinc_event_id))
2122
2123 self.outputName = job.outputPath + '/' + self.id
2124 self.add_var_opt("outputPath",job.outputPath)
2125
2126 self.start_time = min(chunk_start_list.values())
2127 self.end_time = max(chunk_end_list.values())
2128 self.output_cache = []
2129 self.output_cache.append(lal.CacheEntry(self.ifonames.replace(",",""), job.name.upper(), segments.segment(self.start_time,self.end_time), "file://localhost/"+self.outputName))
2130 if not opts.disable_dag_categories:
2131 self.set_category(job.name.lower())
2132
2133 if opts.enable_bayesian:
2134 for node in p_nodes:
2135 if node.validNode:
2136 self.add_parent(node)
2137 dag.add_node(self)
2138 self.validate()
2139 else:
2140 self.invalidate()
2141
2142
2143
2144
2145
2147 """
2148 Runs a plotmcmc job
2149 """
2150 - def __init__(self,job,coinc,cp,opts,dag,ifo,ifonames,p_nodes):
2151 pipeline.CondorDAGNode.__init__(self,job)
2152
2153 if job.tag_base=="sngl":
2154 sngl_insp_string = "sngl_inspiral"
2155 else:
2156 sngl_insp_string = "sngl_inspiral_coh"
2157
2158 sngl = eval("coinc." + sngl_insp_string + "[\'" + ifo + "\']")
2159
2160 if cp.has_option('fu-plotmcmc','burnin'):
2161 burnin = string.strip(cp.get('fu-plotmcmc','burnin'))
2162 if burnin.strip():
2163 self.add_var_opt("burnin",burnin)
2164
2165 plot_routine = string.strip(cp.get('fu-plotmcmc','plot_routine'))
2166 executable = string.strip(cp.get('fu-plotmcmc','executable'))
2167
2168
2169 gps = sngl.time
2170 mchirp = sngl.row.mchirp
2171 eta = sngl.row.eta
2172 distance = sngl.row.eff_distance
2173 phi = "0.0"
2174
2175 self.add_var_opt("plot-routine",plot_routine)
2176 self.add_var_opt("executable",executable)
2177 self.add_var_opt("reference-time",gps)
2178 self.add_var_opt("reference-mchirp",mchirp)
2179 self.add_var_opt("reference-eta",eta)
2180 self.add_var_opt("reference-distance",distance)
2181 self.add_var_opt("reference-phi",phi)
2182
2183
2184 mcmcfilelist = ""
2185 for node in p_nodes:
2186 mcmcfilelist += node.outputName + '.csv,'
2187 self.add_var_opt("mcmc-file",mcmcfilelist.strip(','))
2188
2189 self.id = job.name.upper() + '-' + ifonames.replace(",","") + '-' + str(int(coinc.coinc_event_id))
2190 self.add_var_opt("identity",self.id)
2191
2192 self.add_var_opt("output-path",job.outputPath)
2193 self.output_cache = lal.CacheEntry(ifonames.replace(",",""), job.name.upper(), segments.segment(p_nodes[0].start_time,p_nodes[0].end_time), "file://localhost/"+job.outputPath+"/"+self.id)
2194
2195 if not opts.disable_dag_categories:
2196 self.set_category(job.name.lower())
2197
2198 if opts.enable_bayesian:
2199 for node in p_nodes:
2200 if node.validNode:
2201 self.add_parent(node)
2202 dag.add_node(self)
2203 self.validate()
2204 else:
2205 self.invalidate()
2206
2207
2208
2209
2210
2212
2213 - def __init__(self,job,coinc,cp,opts,dag,ifo,ifonames,p_nodes):
2214 pipeline.CondorDAGNode.__init__(self,job)
2215
2216 sngl_insp_string = "sngl_inspiral_coh"
2217
2218 sngl = eval("coinc." + sngl_insp_string + "[\'" + ifo + "\']")
2219
2220 plot_routine = string.strip(cp.get('fu-plotmcmc','plot_routine'))
2221 executable = string.strip(cp.get('fu-plotmcmc','executable'))
2222
2223
2224 gps = sngl.time
2225 mchirp = sngl.row.mchirp
2226 eta = sngl.row.eta
2227 distance = sngl.row.eff_distance
2228
2229 phi = "0.0"
2230 a_spin1 = "0.5"
2231 cs_th_sp1 = "0.1"
2232 phi_spin1 = "0.4"
2233 a_spin2 = "0.5"
2234 cs_th_sp2 = "0.5"
2235 phi_spin2 = "0.3"
2236
2237 self.add_var_opt("plot-routine",plot_routine)
2238 self.add_var_opt("executable",executable)
2239 self.add_var_opt("reference-time",gps)
2240 self.add_var_opt("reference-mchirp",mchirp)
2241 self.add_var_opt("reference-eta",eta)
2242 self.add_var_opt("reference-distance",distance)
2243 self.add_var_opt("reference-phi",phi)
2244 self.add_var_opt("reference-a_spin1",a_spin1)
2245 self.add_var_opt("reference-a_spin2",a_spin2)
2246 self.add_var_opt("reference-phi_spin1",phi_spin1)
2247 self.add_var_opt("reference-phi_spin2",phi_spin2)
2248 self.add_var_opt("reference-cs_th_sp1",cs_th_sp1)
2249 self.add_var_opt("reference-cs_th_sp2",cs_th_sp2)
2250
2251
2252 mcmcfilelist = ""
2253 for node in p_nodes:
2254 mcmcfilelist += node.outputName
2255 self.add_var_opt("mcmc-file",mcmcfilelist.strip(','))
2256
2257 self.id = job.name.upper() + '-' + ifonames.replace(",","") + '-' + str(int(coinc.coinc_event_id))
2258 self.add_var_opt("identity",self.id)
2259
2260 self.add_var_opt("output-path",job.outputPath)
2261 self.output_cache = lal.CacheEntry(ifonames.replace(",",""), job.name.upper(), segments.segment(p_nodes[0].start_time,p_nodes[0].end_time), "file://localhost/"+job.outputPath+"/"+self.id)
2262
2263 if not opts.disable_dag_categories:
2264 self.set_category(job.name.lower())
2265
2266 if opts.enable_bayesian:
2267 for node in p_nodes:
2268 if node.validNode:
2269 self.add_parent(node)
2270 dag.add_node(self)
2271 self.validate()
2272 else:
2273 self.invalidate()
2274
2275
2276
2277
2278
2279
2281 - def __init__(self, config_file, cp, opts):
2282 log_path = cp.get('fu-output','log-path').strip()
2283 self.basename = re.sub(r'\.ini',r'', os.path.split(config_file)[1])
2284 tempfile.tempdir = log_path
2285 tempfile.template = self.basename + '.dag.log.'
2286 logfile = tempfile.mktemp()
2287 fh = open( logfile, "w" )
2288 fh.close()
2289 pipeline.CondorDAG.__init__(self,logfile)
2290 self.set_dag_file(self.basename)
2291 self.jobsDict = {}
2292 self.node_id = 0
2293 self.output_cache = []
2294 if not opts.disable_dag_categories:
2295 for cp_opt in cp.options('condor-max-jobs'):
2296 self.add_maxjobs_category(cp_opt,cp.getint('condor-max-jobs',cp_opt))
2297
2299 self.node_id += 1
2300 node.add_macro("macroid", self.node_id)
2301 pipeline.CondorDAG.add_node(self, node)
2302 try: self.output_cache.extend(node.output_cache)
2303 except:
2304 try: self.output_cache.append(node.output_cache)
2305 except: pass
2306
2308 self.write_sub_files()
2309 self.write_dag()
2310 self.write_script()
2311 self.write_output_cache()
2312
2314 f = open(self.basename+".cache",'w')
2315 for c in self.output_cache:
2316 f.write(str(c)+'\n')
2317 f.close()
2318
2319
2320
2321
2324 cp = ConfigParser.ConfigParser()
2325 self.cp = cp
2326 self.time_now = "_".join([str(i) for i in time_method.gmtime()[0:6]])
2327 self.ini_file=self.time_now + ".ini"
2328 home_base = home_dirs()
2329
2330
2331 cp.add_section("condor")
2332 cp.set("condor","datafind",self.which("ligo_data_find"))
2333 cp.set("condor","inspiral",self.which("lalapps_inspiral"))
2334 cp.set("condor","chia", self.which("lalapps_coherent_inspiral"))
2335 cp.set("condor","universe","standard")
2336
2337 cp.add_section("inspiral")
2338 cp.add_section("data")
2339
2340
2341 cp.add_section("datafind")
2342
2343
2344 cp.add_section("fu-condor")
2345 cp.set("fu-condor","plotsnrchisq",self.which("plotsnrchisq_pipe"))
2346 cp.set("fu-condor","lalapps_skymap",self.which("lalapps_skymap"))
2347 cp.set("fu-condor","pylal_skyPlotJob",self.which("pylal_plot_inspiral_skymap"))
2348 cp.set("fu-condor","datafind",self.which("ligo_data_find"))
2349 cp.set("fu-condor","convertcache",self.which("convertlalcache.pl"))
2350 cp.set("fu-condor","chia", self.which("lalapps_coherent_inspiral"))
2351 cp.set("fu-condor","plotchiatimeseries", self.which("plotchiatimeseries"))
2352 cp.set("fu-condor","effDRatio", self.which("followupRatioTest.py"))
2353 cp.set("fu-condor","vetoflags", self.which("followupQueryVeto.py"))
2354 cp.set("fu-condor","customfom", self.which("followupCustomFOM.py"))
2355 cp.set("fu-condor","dqflags", self.which("followupQueryDQ.py"))
2356 cp.set("fu-condor","mcmc", self.which("lalapps_followupMcmc"))
2357 cp.set("fu-condor","spinmcmc", self.which("lalapps_spinspiral"))
2358 cp.set("fu-condor","plotmcmc", self.which("plotmcmc.py"))
2359 cp.set("fu-condor","plotspinmcmc", self.which("plotspinmcmc.py"))
2360
2361
2362 self.set_qscan_executable()
2363 cp.set("fu-condor","analyseQscan", self.which("analyseQscan.py"))
2364 cp.set("fu-condor","makeCheckListWiki",self.which("makeCheckListWiki.py"))
2365 cp.set("fu-condor","lalapps_followup_page",self.which("lalapps_followup_page"))
2366
2367 cp.add_section("makeCheckListWiki")
2368 cp.set("makeCheckListWiki","universe","local")
2369 cp.set("makeCheckListWiki","location",os.getcwd())
2370
2371 cp.set("makeCheckListWiki","ini-file",os.path.abspath(self.ini_file))
2372
2373
2374 cp.add_section("fu-q-hoft-datafind")
2375 for ifo in ["H1","H2","L1","V1"]:
2376 cp.set("fu-q-hoft-datafind",ifo+"-search-time-range","128")
2377
2378
2379 cp.add_section("fu-q-rds-datafind")
2380 for ifo in ["H1","H2","L1"]:
2381 cp.set("fu-q-rds-datafind",ifo+"-search-time-range","1024")
2382 cp.set("fu-q-rds-datafind","V1-search-time-range","2048")
2383
2384
2385 cp.add_section("fu-fg-ht-qscan")
2386 for config in ["H1config","H2config","L1config","V1config"]:
2387 cp.set("fu-fg-ht-qscan",config,self.__find_config("s5_foreground_" + self.__config_name(config[:2],'hoft') + ".txt","QSCAN CONFIG"))
2388
2389
2390 cp.add_section("fu-fg-rds-qscan")
2391 for config in ["H1config","H2config","L1config"]:
2392 cp.set("fu-fg-rds-qscan",config,self.__find_config("s5_foreground_" + self.__config_name(config[:2],'rds') + ".txt","QSCAN CONFIG"))
2393 cp.set("fu-fg-rds-qscan","V1config","/storage/gpfs_virgo3/virgo/omega/configurations/s6_foreground_V1-raw-cbc.txt")
2394
2395
2396 cp.add_section("fu-fg-seismic-qscan")
2397 for config in ["H1config","H2config","L1config"]:
2398 cp.set("fu-fg-seismic-qscan",config,self.__find_config("s5_foreground_" + self.__config_name(config[:2],'seismic') + ".txt","QSCAN CONFIG"))
2399 cp.set("fu-fg-seismic-qscan","V1config","/storage/gpfs_virgo3/virgo/omega/configurations/s6_foreground_V1-raw-seismic-cbc.txt")
2400
2401
2402 cp.add_section("fu-analyse-qscan")
2403 cp.set("fu-analyse-qscan","generate-qscan-xml","")
2404 cp.set("fu-analyse-qscan","z-threshold","0.0")
2405 cp.set("fu-analyse-qscan","z-min","0.0")
2406 cp.set("fu-analyse-qscan","z-max","30.0")
2407 cp.set("fu-analyse-qscan","z-bins","60")
2408 cp.set("fu-analyse-qscan","rds-dt-min","-0.6")
2409 cp.set("fu-analyse-qscan","rds-dt-max","0.6")
2410 cp.set("fu-analyse-qscan","ht-dt-min","-0.6")
2411 cp.set("fu-analyse-qscan","ht-dt-max","0.6")
2412 cp.set("fu-analyse-qscan","seis-rds-dt-min","-4.2")
2413 cp.set("fu-analyse-qscan","seis-rds-dt-max","4.2")
2414 cp.set("fu-analyse-qscan","dt-bins","120")
2415 cp.set("fu-analyse-qscan","plot-dt-distribution","")
2416 cp.set("fu-analyse-qscan","plot-z-scattered","")
2417 cp.set("fu-analyse-qscan","plot-z-distribution","")
2418
2419
2420 cp.add_section("fu-skymap")
2421 cp.set("fu-skymap","ra-res","1024")
2422 cp.set("fu-skymap","dec-res","512")
2423 cp.set("fu-skymap","sample-rate","4096")
2424
2425
2426 cp.add_section("fu-output")
2427 cp.set("fu-output","log-path",self.log_path())
2428 cp.set("fu-output","output-dir",self.web_dir())
2429 cp.set("fu-output","web-url", self.web_url())
2430
2431
2432 cp.add_section("chia")
2433 cp.set('chia','cohsnr-threshold', "1")
2434 cp.set('chia','ra-step', "1")
2435 cp.set('chia','dec-step', "1")
2436 cp.set('chia','numCohTrigs', "2000")
2437 cp.set('chia', 'sample-rate', "4096")
2438
2439
2440 cp.add_section("effDRatio")
2441 cp.set('effDRatio','snr-ratio-test',self.__find_config("ratioTest.pickle","RATIO TEST PICKLE"))
2442
2443
2444 cp.add_section("fu-mcmc")
2445 cp.set("fu-mcmc","chain_nb","6")
2446 cp.set("fu-mcmc","prior-coal-time-marg","0.050")
2447 cp.set("fu-mcmc","iterations","1000000")
2448 cp.set("fu-mcmc","tbefore","30")
2449 cp.set("fu-mcmc","tafter","1")
2450 cp.set("fu-mcmc","template","20SP")
2451
2452 cp.set("fu-mcmc","massmin","1.0")
2453 cp.set("fu-mcmc","massmax","15.0")
2454 cp.set("fu-mcmc","dist90","40.0")
2455 cp.set("fu-mcmc","dist10","80.0")
2456
2457
2458 cp.add_section("fu-plotmcmc")
2459 cp.set("fu-plotmcmc","plot_routine",self.__find_routine("mcmcsummary.R","R SCRIPT FOR MCMC PLOTS"))
2460 cp.set("fu-plotmcmc","executable","/usr/bin/R")
2461
2462
2463 cp.add_section("fu-spinmcmc")
2464 cp.set("fu-spinmcmc","iterations","1000000")
2465 cp.set("fu-spinmcmc","tbefore","30")
2466 cp.set("fu-spinmcmc","tafter","1")
2467
2468
2469 cp.add_section("fu-remote-jobs")
2470 remoteIfos,remoteJobs = self.get_remote_jobs()
2471 cp.set('fu-remote-jobs','remote-ifos',remoteIfos)
2472 cp.set('fu-remote-jobs','remote-jobs',remoteJobs)
2473
2474
2475 cp.add_section("condor-max-jobs")
2476 cp.set("condor-max-jobs","remoteScan_full_data_FG_RDS.sh_FG_RDS_full_data","20")
2477 cp.set("condor-max-jobs","remoteScan_full_data_FG_SEIS_RDS.sh_FG_SEIS_RDS_full_data","20")
2478 cp.set("condor-max-jobs","remoteScan_playground_FG_RDS.sh_FG_RDS_playground","20")
2479 cp.set("condor-max-jobs","remoteScan_playground_FG_SEIS_RDS.sh_FG_SEIS_RDS_playground","20")
2480 cp.set("condor-max-jobs","remoteScan_time_slides_FG_RDS.sh_FG_RDS_time_slides","20")
2481 cp.set("condor-max-jobs","remoteScan_time_slides_FG_SEIS_RDS.sh_FG_SEIS_RDS_time_slides","20")
2482 cp.set("condor-max-jobs","remoteDatafind_full_data_Q_RDS.sh_Q_RDS_full_data","10")
2483 cp.set("condor-max-jobs","remoteDatafind_full_data_Q_RDS.sh_Q_RDS_playground","10")
2484 cp.set("condor-max-jobs","remoteDatafind_full_data_Q_RDS.sh_Q_RDS_time_slides","10")
2485 cp.set("condor-max-jobs","ligo_data_find_HT_full_data","3")
2486 cp.set("condor-max-jobs","ligo_data_find_Q_HT_full_data","3")
2487 cp.set("condor-max-jobs","ligo_data_find_Q_RDS_full_data","3")
2488 cp.set("condor-max-jobs","ligo_data_find_HT_playground","3")
2489 cp.set("condor-max-jobs","ligo_data_find_Q_HT_playground","3")
2490 cp.set("condor-max-jobs","ligo_data_find_Q_RDS_playground","3")
2491 cp.set("condor-max-jobs","ligo_data_find_HT_time_slides","3")
2492 cp.set("condor-max-jobs","ligo_data_find_Q_HT_time_slides","3")
2493 cp.set("condor-max-jobs","ligo_data_find_Q_RDS_time_slides","3")
2494 cp.set("condor-max-jobs","lalapps_followupmcmc_sngl_full_data","20")
2495 cp.set("condor-max-jobs","lalapps_followupmcmc_sngl_playground","20")
2496 cp.set("condor-max-jobs","lalapps_followupmcmc_sngl_time_slides","20")
2497 cp.set("condor-max-jobs","lalapps_followupmcmc_coh_full_data","20")
2498 cp.set("condor-max-jobs","lalapps_followupmcmc_coh_playground","20")
2499 cp.set("condor-max-jobs","lalapps_followupmcmc_coh_time_slides","20")
2500 cp.set("condor-max-jobs","lalapps_spinspiral_coh_full_data","20")
2501 cp.set("condor-max-jobs","lalapps_spinspiral_coh_playground","20")
2502 cp.set("condor-max-jobs","lalapps_spinspiral_coh_time_slides","20")
2503
2504
2505
2506
2507
2508
2509
2510 if configfile:
2511 user_cp = ConfigParser.ConfigParser()
2512 user_cp.read(configfile)
2513 else:
2514
2515 try:
2516 user_cp = ConfigParser.ConfigParser()
2517 user_cp.read('followup_pipe.ini')
2518 except: pass
2519
2520 if user_cp: self.overwrite_config(user_cp,cp)
2521
2524
2527
2529 host = get_hostname()
2530 if 'phy.syr.edu' in host:
2531 self.cp.set("fu-condor","qscan",home_dirs()+"/rgouaty/opt/omega/omega_r3270_glnxa64_binary/bin/wpipeline")
2532 else:
2533 self.cp.set("fu-condor","qscan",home_dirs()+"/romain/opt/omega/omega_r3270_glnxa64_binary/bin/wpipeline")
2534
2536 fileMap={
2537 "L1":{"hoft":"L1_hoft_cbc","rds":"L0L1-RDS_R_L1-cbc","seismic":"L0L1-RDS_R_L1-seismic-cbc"},
2538 "H1":{"hoft":"H1_hoft_cbc","rds":"H0H1-RDS_R_L1-cbc","seismic":"H0H1-RDS_R_L1-seismic-cbc"},
2539 "H2":{"hoft":"H2_hoft_cbc","rds":"H0H2-RDS_R_L1-cbc","seismic":"H0H2-RDS_R_L1-seismic-cbc"},
2540 "V1":{"hoft":"V1_hoft_cbc","rds":"V1-raw-cbc","seismic":"V1-raw-seismic-cbc"}
2541 }
2542 return fileMap[ifo][type]
2543
2545
2546 path = self.which('lalapps_inspiral')
2547 if path: path = os.path.split(path)[0]
2548 else:
2549 print >>sys.stderr, "COULD NOT FIND " + description + " FILE %s IN %s, ABORTING" % (config, path)
2550 raise ValueError
2551 sys.exit(1)
2552 out = path.replace('bin','share/lalapps') + '/' + config
2553 if not os.path.isfile(out):
2554 print >>sys.stderr, "COULD NOT FIND " + description + " FILE %s IN %s, ABORTING" % (config, out)
2555 raise ValueError
2556 sys.exit(1)
2557 return out
2558
2560 path = self.which('lalapps_inspiral')
2561 if path: path = os.path.split(path)[0]
2562 else:
2563 print >>sys.stderr, "COULD NOT FIND " + description + " FILE %s IN %s, ABORTING" % (script, path)
2564 raise ValueError
2565 sys.exit(1)
2566 out = path.replace('bin','share/lalapps') + '/' + script
2567 if not os.path.isfile(out):
2568 print >>sys.stderr, "COULD NOT FIND " + description + " FILE %s IN %s, ABORTING" % (script, out)
2569 raise ValueError
2570 sys.exit(1)
2571 return out
2572
2574 host = get_hostname()
2575
2576 if 'caltech.edu' in host: return os.path.abspath(os.environ['HOME']) + '/public_html/followups/' + self.time_now
2577 if 'phys.uwm.edu' in host: return os.path.abspath(os.environ['HOME']) + '/public_html/followups/' + self.time_now
2578 if 'phy.syr.edu' in host: return os.path.abspath(os.environ['HOME']) + '/public_html/followups/' + self.time_now
2579 if 'aei.uni-hannover.de' in host: return os.path.abspath(os.environ['HOME']) + '/WWW/LSC/followups/' + self.time_now
2580 print sys.stderr, "WARNING: could not find web directory, returning empty string"
2581 return ''
2582
2584 host = get_hostname()
2585
2586 if 'ligo.caltech.edu' in host: return "https://ldas-jobs.ligo.caltech.edu/~" +os.environ['USER'] + '/followups/' + self.time_now
2587 if 'ligo-la.caltech.edu' in host: return "https://ldas-jobs.ligo-la.caltech.edu/~" +os.environ['USER'] + '/followups/' + self.time_now
2588 if 'ligo-wa.caltech.edu' in host: return "https://ldas-jobs.ligo-wa.caltech.edu/~" +os.environ['USER'] + '/followups/' + self.time_now
2589 if 'phys.uwm.edu' in host: return "https://ldas-jobs.phys.uwm.edu/~" + os.environ['USER'] + '/followups/' + self.time_now
2590 if 'phy.syr.edu' in host: return "https://sugar-jobs.phy.syr.edu/~" + os.environ['USER'] + '/followups/' + self.time_now
2591 if 'aei.uni-hannover.de' in host: return "https://atlas3.atlas.aei.uni-hannover.de/~" + os.environ['USER'] + '/LSC/followups/' + self.time_now
2592 print sys.stderr, "WARNING: could not find web server, returning empty string"
2593 return ''
2594
2596 host = get_hostname()
2597
2598 if 'ligo.caltech.edu' or 'ligo-la.caltech.edu' or 'ligo-wa.caltech.edu' or 'phys.uwm.edu' or 'aei.uni-hannover.de' or 'phy.syr.edu' in host:
2599 remote_ifos = "V1"
2600 remote_jobs = "ligo_data_find_Q_RDS_full_data,wpipeline_FG_RDS_full_data,wpipeline_FG_SEIS_RDS_full_data,ligo_data_find_Q_RDS_playground,wpipeline_FG_RDS_playground,wpipeline_FG_SEIS_RDS_playground,ligo_data_find_Q_RDS_gps_only,wpipeline_FG_RDS_gps_only,wpipeline_FG_SEIS_RDS_gps_only,ligo_data_find_Q_RDS_time_slides,wpipeline_FG_RDS_time_slides,wpipeline_FG_SEIS_RDS_time_slides"
2601 return remote_ifos, remote_jobs
2602 return '', ''
2603
2605 host = get_hostname()
2606
2607 if 'phy.syr.edu' in host: return '/usr1/' + os.environ['USER']
2608 if 'caltech.edu' in host: return '/usr1/' + os.environ['USER']
2609 if 'phys.uwm.edu' in host: return '/people/' + os.environ['USER']
2610 if 'aei.uni-hannover.de' in host: return '/local/user/' + os.environ['USER']
2611
2613 which = subprocess.Popen(['which',prog], stdout=subprocess.PIPE)
2614 out = which.stdout.read().strip()
2615 if not out: print >>sys.stderr, "WARNING: could not find %s in your path, unless you have an ini file to overide the path to %s the DAG will fail" % (prog,prog)
2616 return out
2617
2619 for section in config.sections():
2620 if not cp.has_section(section): cp.add_section(section)
2621 for option in config.options(section):
2622 cp.set(section,option,config.get(section,option))
2623
2624
2625
2626
2628 """
2629 Simple method returns a list of links to FOMs ordered by FOM #
2630 The list is 2D ie:
2631 [['ifo,shift',LINKtoImage,LinktoThumb],['ifo,shift',LinktoImage,LinkToThumb]...]
2632 images marked [Eve,Owl,Day] via [p3,p2,p1] in filenames
2633 this methd only for S6 and later
2634 IFO naming start dates:
2635 There were three naming conventions mixed, then p1,p2,p3 and lastly Day,Eve,Owl
2636 LHO: 20090724 :: 932428815
2637 LLO: 20090708 :: 931046415
2638 It appears that the filenames are labeled by local times not
2639 utc??? We need to confirm this for this method CVT Fri-Jan-29-2010:201001291523
2640 """
2641 urls={
2642 "DEFAULT":"http://www.ligo.caltech.edu/~pshawhan/scilinks.html",
2643 "V1":"http://wwwcascina.virgo.infn.it/DetectorOperations/index.htm",
2644 "L1":"https://llocds.ligo-la.caltech.edu/scirun/S6/robofom/%s/%s%s_FOM%i%s.gif",
2645 "H1":"http://lhocds.ligo-wa.caltech.edu/scirun/S6/robofom/%s/%s%s_FOM%i%s.gif",
2646 "H2":"http://lhocds.ligo-wa.caltech.edu/scirun/S6/robofom/%s/%s%s_FOM%i%s.gif"
2647 }
2648 ifoTag=ifo.upper()
2649 shiftDuration=8;
2650
2651 shiftStandardTime={'L1':{'day':14,'eve':22,'owl':6},
2652 'H1':{'day':16,'eve':0,'owl':8},
2653 'H2':{'day':16,'eve':0,'owl':8},
2654 'V1':{'day':6,'eve':14,'owl':22}}
2655 shiftOrder=['day','eve','owl']
2656 shiftLabel={'day':'p1','eve':'p3','owl':'p2'}
2657 outputURLs=list()
2658 if ((ifo==None) or (gpsTime==None)):
2659 sys.stdout.write("getFOMLinks called incorrectly \
2660 using default opts instead!\n")
2661 return [urls['DEFAULT']]
2662 outputURLs=[]
2663
2664 if ifo.__contains__("V1"):
2665 return([['V1',urls[ifoTag],'']])
2666
2667 if shiftStandardTime.keys().__contains__(ifoTag):
2668
2669 tOffset=3600*shiftDuration
2670 for thisTime in \
2671 [gpsTime-tOffset,gpsTime,gpsTime+tOffset]:
2672 Y,M,D,h,m,s,junk0,junk1,junk2=xlaldate.XLALGPSToUTC(LIGOTimeGPS(int(thisTime)))
2673
2674 shiftString=''
2675 humanShiftLabel=''
2676 for shift,start in shiftStandardTime[ifoTag].iteritems():
2677 hours=[x%24 for x in range(start,start+shiftDuration)]
2678 if hours.__contains__(int(h)):
2679 shiftString=shiftLabel[shift]
2680 humanShiftLabel=shift
2681
2682
2683
2684 if (0 in hours) and (hours[0]<h):
2685 D=D-1;
2686 if D<1:
2687 D=1
2688 M=M-1
2689 if M<1:
2690 M=1
2691 Y=Y-1
2692
2693 tString="%s%s%s"%(str(Y).zfill(4),str(M).zfill(2),str(D).zfill(2))
2694 if ('V1').__contains__(ifoTag):
2695 outputURLs.append(['V1',urls[ifoTag],''])
2696 else:
2697 sL=shiftString
2698 for fL in [1,2,3]:
2699 outputURLs.append(["%s,%s"%(ifoTag,humanShiftLabel),
2700 urls[ifoTag]%(tString,tString,sL,fL,""),
2701 urls[ifoTag]%(tString,tString,sL,fL,"Thumb")
2702 ])
2703 return outputURLs
2704 else:
2705 return [urls['DEFAULT']]
2706
2707
2708
2710 """
2711 Pass in int form of gps time.
2712 """
2713 lGTime=LIGOTimeGPS(int(gpsTime))
2714 Y,M,D,h,m,s,junk0,junk1,junk2=xlaldate.XLALGPSToUTC(lGTime)
2715 timeStamp=str("%s-%s-%s %s:%s:%s UTC"%(str(Y).zfill(4),
2716 str(M).zfill(2),
2717 str(D).zfill(2),
2718 str(h).zfill(2),
2719 str(m).zfill(2),
2720 str(s).zfill(2)))
2721 return timeStamp
2722
2723
2724
2726 """
2727 This method returns a URL string to point you to ilog day page for
2728 specified IFO and GPStime. Valid IFO labels are V1, L1, H1 or H2.
2729 """
2730 time=int(float(time))
2731 dateString="%s/%s/%s"
2732 urls={
2733 'default':"http://www.ligo.caltech.edu/~pshawhan/scilinks.html",
2734 'V1':"https://pub3.ego-gw.it/logbook/index.php?area=logbook&ref=search&datefrom=%s&dateto=%s",
2735 'L1':"http://ilog.ligo-la.caltech.edu/ilog/pub/ilog.cgi?task=view&date_to_view=%s&group=detector&keywords_to_highlight=&text_to_highlight=&anchor_to_scroll_to=",
2736 'H1':"http://ilog.ligo-wa.caltech.edu/ilog/pub/ilog.cgi?task=view&date_to_view=%s&group=detector&keywords_to_highlight=&text_to_highlight=&anchor_to_scroll_to=",
2737 'H2':"http://ilog.ligo-wa.caltech.edu/ilog/pub/ilog.cgi?task=view&date_to_view=%s&group=detector&keywords_to_highlight=&text_to_highlight=&anchor_to_scroll_to="
2738 }
2739 outputURL=urls['default']
2740 if ((ifo==None) or (time==None)):
2741 return urls['default']
2742 gpsTime=LIGOTimeGPS(time)
2743 Y,M,D,h,m,s,junk0,junk1,junk2=xlaldate.XLALGPSToUTC(gpsTime)
2744 gpsStamp=dateString%(str(M).zfill(2),str(D).zfill(2),str(Y).zfill(4))
2745 if ('H1','H2','L1').__contains__(ifo.upper()):
2746 outputURL=urls[ifo.upper()]%gpsStamp
2747 if ('V1').__contains__(ifo.upper()):
2748 gpsTimePO=LIGOTimeGPS(time+(24*3600))
2749 Y2,M2,D2,h2,m2,s2,junk0,junk1,junk2=xlaldate.XLALGPSToUTC(gpsTimePO)
2750 gpsStampPlusOne=dateString%(str(M2).zfill(2),str(D2).zfill(2),str(Y2).zfill(4))
2751 outputURL=urls[ifo.upper()]%(gpsStamp,gpsStampPlusOne)
2752 return outputURL
2753
2754
2755
2756
2758 """
2759 """
2760 - def __init__(self,publicationDirectory=None,publicationURL=None,verbose=False):
2761 protocolTag="@PROTO@/"
2762 self.verbose=verbose
2763 self.validProtocols=["http://","https://"]
2764 givenProtocol=""
2765 if publicationDirectory == None or\
2766 publicationURL == None:
2767 sys.stderr.write("Error: Initializing filenameToURLMappe instance \
2768 with None types.\n")
2769 self.pDIR=publicationDirectory
2770 self.pURL=publicationURL
2771 for protocolCheck in self.validProtocols:
2772 if publicationDirectory.lower().startswith(protocolCheck):
2773 self.pDIR=publicationDirectory
2774 self.pURL=publicationURL
2775 raise Warning,"object initialized with publication directory and publication URL reversed\n"
2776 for protocolCheck in self.validProtocols:
2777 if self.pURL.lower().startswith(protocolCheck):
2778 self.pURL="%s"%(self.pURL.replace(protocolCheck,protocolTag))
2779 givenProtocol=protocolCheck
2780 pd=self.pDIR.lstrip(os.path.sep).split(os.path.sep)
2781 pu=self.pURL.split(os.path.sep)
2782 self.pURL=publicationURL
2783 pd.reverse()
2784 pu.reverse()
2785 cStringList=list()
2786 cURLList=list()
2787
2788 mIndex=[pd[i]==pu[i] for i in range(min(len(pd),len(pu)))].index(False)
2789 cURLList=pu[mIndex:]
2790 cStringList=pd[mIndex:]
2791 cStringList.reverse()
2792 cURLList.reverse()
2793 cURL=cString=""
2794 for elem in cURLList:
2795 cURL=cURL+"%s%s"%(os.path.sep,elem)
2796 cURL=cURL+os.path.sep
2797 if not self.pURL.startswith(os.path.sep):
2798 cURL=cURL.lstrip(os.path.sep)
2799 self.commonURL=os.path.normpath(cURL).replace(protocolTag,givenProtocol)
2800 for elem in cStringList:
2801 cString=cString+"%s%s"%(os.path.sep,elem)
2802 cString=cString+os.path.sep
2803 if not self.pDIR.startswith(os.path.sep):
2804 cString=cString.lstrip(os.path.sep)
2805 self.commonString=os.path.normpath(cString)
2806
2809
2812
2814
2815 myURL=filename.replace(self.commonString,self.commonURL)
2816
2817 if filename.strip() == "":
2818 sys.stderr.write("Improper conversion for :%s\n"%filename)
2819 raise Error, "object:filenameToURLMapper given empty string to convert!\n"
2820
2821 if myURL == filename:
2822 sys.stderr.write("Improper conversion for :%s\n"%filename)
2823 sys.stderr.write("web-url : %s\n"%self.pURL)
2824 sys.stderr.write("publication dir: %s\n"%self.pDIR)
2825 sys.stderr.write("Common String : %s\n"%self.commonString)
2826 sys.stderr.write("Common URL : %s\n"%self.commonURL)
2827 raise Warning, "object:filenameToURLMapper improperly initialized or given bad args\n"
2828 if self.verbose:
2829 sys.stdout.write("My URL : %s\n"%myURL)
2830 sys.stdout.write("My file : %s\n"%filename)
2831 sys.stdout.write("web-url : %s\n"%self.pURL)
2832 sys.stdout.write("publication dir: %s\n"%self.pDIR)
2833 sys.stdout.write("Common String : %s\n"%self.commonString)
2834 sys.stdout.write("Common URL : %s\n"%self.commonURL)
2835 return myURL
2836