2 # An example configuration file to create a gstlal_inspiral workflow
3 # Tailored to BNS but can be adapted to other sources by chaning parameters or programs
5 ## Template bank parameters
6 # Note that these can can change if you modify the template bank program.
9 # Minimum component mass for the template bank
11 # Maximum component mass
for the
template bank
13 # Minimum total mass
for the
template bank
15 # Maximum total mass
for the
template bank
17 # Low frequency cut off
for the
template bank placement
18 LOW_FREQUENCY_CUTOFF = 40.0
19 # High pass frequency to condition the data before measuring the psd
for template placement
21 # Highest frequency at which to compute the metric
22 HIGH_FREQUENCY_CUTOFF = 2047.0
23 # The sample rate at
which to compute the
template bank
25 # The minimal match of the template bank; determines how much SNR is retained for signals "in between the bank points"
27 # The start time
for reading the data
for the bank
29 # The stop time for reading the data for the bank
33 ## Sub bank parameters
34 # The large template bank is split into sub banks before they are SVD'd
35 # Number of templates to put in each sub bank; this should probably always be ~100. For highly degenerate banks it might be possible to make this 200 or so.
36 NUM_SPLIT_TEMPLATES = 100
37 # The number of templates to overlap for each sub bank. This overlap just goes into the SVD, the overlapping templates are not reconstructed and the bank remains contiguous
40 ## Controls trigger creation
42 # The dectors to analyze; provide as a space separated list
44 # The start time to analyze
46 # The end time to analyze
48 # The tag to name the output web directory
50 # the output directory for the results
51 WEBDIR = ~/public_html/${START}-${STOP}-${TAG}
52 # The number of sub banks to analyze in parallel. This means that the total number of templates in parallel is NUMBANKS * NUM_SPLIT_TEMPLATES * num(IFOS); A typical number of parallel templates might be 4 * 100 * 3 = 1200. Setting this too high might cause memory to be exhausted
54 # If non-zero, this is the interval over which to apply the composite detection statistic to avoid reconstructing all of the SVD filters
56 # The length in samples of the auto correlation "chisquared" veto.
58 # additional options, e.g.,
59 #ADDITIONAL_DAG_OPTIONS = --blind-injections BNS-MDC1-WIDE.xml
62 # The seed is the string before the suffix _injections.xml
63 # Change as appropriate, whitespace is important
64 # NOTE This uses gstlal_injections_by_local_rate for this example, but you might want to use lalapps_inspinj since it is reviewed etc.
65 # specify the seeds and number of injections. The should be <seed>_injections.xml. The same rule will be applied to each target. For a more heterogeneous injection plan more work has to go into this Makefile but it is straight forward.
66 INJECTIONS := 1_injections.xml 2_injections.xml
67 # Maximum injection distance in Mpc
69 # Minimum component mass 1 for injections
71 # Maximum component mass 1
for injections
73 # Minimum component mass 2
for injections
75 # Maximum component mass 2
for injections
77 # Minimum total mass
for injections
78 INJ_MIN_TOTAL_MASS = 2.0
79 # Maximum total mass
for injections
80 INJ_MAX_TOTAL_MASS = 6.0
81 # minimum frequency
for injections. NOTE
this should be lower than the intended filtering frequency
84 ## Segment and frame type info
86 LIGO_FRAME_TYPE=
'$*_T1200307_V4_EARLY_GAUSSIAN'
87 LIGO_SEGMENTS=
"$*:CBC-MDC1_SCIENCE_EARLY_GAUSSIAN"
88 VIRGO_FRAME_TYPE=
'V1_T1300121_V1_EARLY_GAUSSIAN'
89 # Channel names to analyze. NOTE every IFO should get a channel name, but they can be different and depend on the data
90 CHANNEL_NAMES:=--channel-name=H1=GAUSSIAN --channel-name=L1=GAUSSIAN --channel-name=V1=GAUSSIAN
92 ## FIXME MISSING vetoes
93 # In principle a similar set of segment queries can be written to extract vetoes
94 # That is not done in this example
95 # If you do decide to do it hten you have to pass the vetoes xml file to the DAG generator
97 ## Get some basic definitions. YOU HAVE TO INCLUDE THIS BEFORE THE ACTUAL RULES OR THEY WONT WORK
98 include Makefile.offline_analysis_rules
107 ## Making the master template bank
108 H1-TMPLTBANK-966393725-2048.xml: H1_frame.cache
110 --disable-compute-moments \
111 --grid-spacing Hexagonal \
112 --dynamic-range-exponent 69.0 \
113 --enable-high-pass $(HIGH_PASS_FREQ) \
114 --high-pass-order 8 \
115 --strain-high-pass-order 8 \
116 --minimum-mass $(MIN_MASS) \
117 --maximum-mass $(MAX_MASS) \
118 --min-total-mass $(MIN_TOTAL_MASS) \
119 --max-total-mass $(MAX_TOTAL_MASS) \
120 --gps-start-time $(BANKSTART) \
121 --gps-end-time $(BANKSTOP) \
122 --calibrated-data real_8 \
123 --channel-name H1:GAUSSIAN \
125 --number-of-segments 15 \
126 --minimal-match $(MM) \
127 --high-pass-attenuation 0.1 \
128 --min-high-freq-cutoff ERD \
129 --segment-length 1048576 \
130 --low-frequency-cutoff $(LOW_FREQUENCY_CUTOFF) \
132 --num-freq-cutoffs 1 \
133 --sample-rate $(SAMPLE_RATE) \
134 --high-frequency-cutoff $(HIGH_FREQUENCY_CUTOFF) \
135 --resample-filter ldas \
136 --strain-high-pass-atten 0.1 \
137 --strain-high-pass-freq $(HIGH_PASS_FREQ) \
138 --frame-cache H1_frame.cache \
139 --max-high-freq-cutoff ERD \
140 --approximant $(APPROXIMANT) \
142 --spectrum-type median \
145 ## Directories that we
'll need
146 plots $(WEBDIR) %_split_bank :
149 ## The program to to split the master template bank into sub banks. Note: this program looks for process parameters recorded for the program "tmpltbank", if that is not correct then the --bank-program option must be added to override.
150 %_split_bank.cache : H1-TMPLTBANK-966393725-2048.xml %_split_bank
151 gstlal_bank_splitter --output-path $*_split_bank --output-cache $@ --approximant $(APPROXIMANT) --overlap $(OVERLAP) --instrument $* --n $(NUM_SPLIT_TEMPLATES) --sort-by mchirp --add-f-final --max-f-final $(HIGH_FREQUENCY_CUTOFF) $<
153 ## Even though we don't use time slides
for background estimation, we still need a time slide table. It is constructed of a zero lag vector and a single offset that we use
for "closed box" results
155 ligolw_tisi --instrument=H1=0:0:0 --instrument=H2=0:0:0 --instrument=L1=0:0:0 --instrument=V1=0:0:0 $*_part0.xml
156 ligolw_tisi --instrument=H1=0:0:0 --instrument=H2=0:0:0 --instrument=L1=3.14159:3.14159:3.14159 --instrument=V1=7.892:7.892:7.892 $*_part1.xml
157 ligolw_add --
remove-input --output $@ $*_part0.xml $*_part1.xml
159 ## The dag generator and the whole point of
this makefile. The result is a file: trigger_pipe.dag that can be submitted to HTCondor
160 dag : segments.xml frame.cache tisi.xml plots $(WEBDIR) $(INJECTIONS) $(BANK_CACHE_FILES)
161 gstlal_inspiral_pipe --
verbose --data-source frames --gps-start-time $(START) --gps-end-time $(STOP) --frame-cache frame.cache --frame-segments-file segments.xml --frame-segments-name datasegments --control-peak-time $(PEAK) --num-banks $(NUMBANKS) --fir-stride 4 --web-dir $(WEBDIR) --time-slide-file tisi.xml $(INJECTION_LIST) --bank-cache $(BANK_CACHE_STRING) --tolerance 0.9999 --overlap $(OVERLAP) --flow $(LOW_FREQUENCY_CUTOFF) $(CHANNEL_NAMES) --autocorrelation-length $(AC_LENGTH) $(ADDITIONAL_DAG_OPTIONS)
163 ## Excecute a single segment query for a given IFO
164 # Note: Virgo might sometimes need a special rule
165 %_segmentspadded.xml:
166 ligolw_segment_query --segment-url=${SEG_SERVER} -q --gps-start-time ${START} --gps-end-time ${STOP} --include-segments=$(LIGO_SEGMENTS) --result-name=datasegments > $@
168 ## Virgo datafind often requires a separate rule since the frame files have different types
170 ligo_data_find -o V -t $(VIRGO_FRAME_TYPE) -l -s $(START) -e $(STOP) --url-type file > $@
172 ## Excute a datafind query for a given IFO
174 #FIXME horrible hack to get the observatory, not guaranteed to work
176 $(eval OBS:=$(subst 1,$(empty),$(OBS)))
177 $(eval OBS:=$(subst 2,$(empty),$(OBS)))
178 ligo_data_find -o $(OBS) -t $(LIGO_FRAME_TYPE) -l -s $(START) -e $(STOP) --url-type file > $@
181 ## Combine the results of all the single IFO datafind queries into a single file
182 frame.cache: $(FRAME_CACHE_FILES)
183 cat $(FRAME_CACHE_FILES) > frame.cache
185 ## Combine the results of the segment database queries
for all ifos into a single file
186 segments.xml: $(SEGMENTS_FILES) frame.cache
187 ligolw_add --output segdb.xml $(SEGMENTS_FILES)
188 ligolw_cut --
delete-column segment:segment_def_cdb --
delete-column segment:creator_db --
delete-column segment_definer:insertion_time segdb.xml
189 gstlal_cache_to_segments frame.cache nogaps.xml
190 gstlal_segments_operations --segment-file1 segdb.xml --segment-file2 nogaps.xml --intersection --output-file $@
191 -rm -vf nogaps.xml segdb.xml
192 gstlal_segments_trim --trim 8 --min-length 512 --output $@ $@
195 # A similar procedure to the above semgent generation could/should be used to make veto files
197 ## Generate injections, e.g., 1_injections.xml or 2_injections.xml
for whatever seeds are requested
199 gstlal_injections_by_local_rate \
202 --flower $(INJ_FLOW) \
203 --gps-start-time $(START) \
204 --gps-end-time $(STOP) \
205 --bns-max-distance $(INJ_MAX_DIST) \
206 --nsbh-local-rate 0 \
208 --bns-local-rate 22000
210 ## Cleanup by deleting all of files created. WARNING
this will completely obliterate an analysis.
212 -rm -rvf *.sub *.dag* *.cache *.sh logs *.sqlite plots $(WEBDIR) *.html Images *.css *.js
213 -rm -rvf *_split_bank/ lalapps_run_sqlite/ ligolw_* gstlal_*
214 -rm -vf segments.xml tisi.xml H*.xml L*.xml V*.xml ?_injections.xml ????-*_split_bank-*.xml
215 -rm -vf *marginalized*.xml.gz *-ALL_LLOID*.xml.gz