Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
bins_exe = PyCBCDistributeBackgroundBins(workflow.cp, 'distribute_background_bins',
ifos=workflow.ifos, tags=tags, out_dir=out_dir)
statmap_exe = PyCBCStatMapExecutable(workflow.cp, 'statmap',
ifos=workflow.ifos,
tags=tags, out_dir=out_dir)
cstat_exe = PyCBCCombineStatmap(workflow.cp, 'combine_statmap', ifos=workflow.ifos,
tags=tags, out_dir=out_dir)
background_bins = workflow.cp.get_opt_tags('workflow-coincidence', 'background-bins', tags).split(' ')
background_bins = [x for x in background_bins if x != '']
bins_node = bins_exe.create_node(coinc_files, bank_file, background_bins)
workflow += bins_node
statmap_files = FileList([])
for i, coinc_file in enumerate(bins_node.output_files):
statnode = statmap_exe.create_node(FileList([coinc_file]), tags=['BIN_%s' % i])
workflow += statnode
statmap_files.append(statnode.output_files[0])
statmap_files[i].bin_name = bins_node.names[i]
cstat_node = cstat_exe.create_node(statmap_files)
workflow += cstat_node
return cstat_node.output_files[0], statmap_files
ifos=workflow.ifos, tags=tags,
out_dir=out_dir)
background_bins = workflow.cp.get_opt_tags('workflow-coincidence',
'background-bins',
tags).split(' ')
background_bins = [x for x in background_bins if x != '']
for inj_type in ['injinj', 'injfull', 'fullinj']:
bins_node = bins_exe.create_node(FileList(coinc_files[inj_type]),
bank_file, background_bins,
tags=[inj_type])
workflow += bins_node
coinc_files[inj_type] = bins_node.output_files
statmap_files = FileList([])
for i in range(len(background_bins)):
statnode = statmap_exe.create_node(
FileList([coinc_files['injinj'][i]]),
FileList([background_file[i]]),
FileList([coinc_files['injfull'][i]]),
FileList([coinc_files['fullinj'][i]]),
tags=['BIN_%s' % i])
workflow += statnode
statmap_files.append(statnode.output_files[0])
cstat_node = cstat_exe.create_node(statmap_files)
workflow += cstat_node
return cstat_node.output_files[0]
Returns
--------
"""
if inj_tags is None:
inj_tags = []
if tags is None:
tags = []
cp = workflow.cp
full_segment = trig_files[0].segment
trig_name = cp.get("workflow", "trigger-name")
grb_string = "GRB" + trig_name
num_trials = int(cp.get("trig_combiner", "num-trials"))
pp_outs = FileList([])
pp_nodes = []
# Set up needed exe classes
trig_combiner_exe = os.path.basename(cp.get("executables",
"trig_combiner"))
trig_combiner_class = select_generic_executable(workflow, "trig_combiner")
trig_cluster_exe = os.path.basename(cp.get("executables", "trig_cluster"))
trig_cluster_class = select_generic_executable(workflow, "trig_cluster")
sbv_plotter_exe = os.path.basename(cp.get("executables", "sbv_plotter"))
sbv_plotter_class = select_generic_executable(workflow, "sbv_plotter")
efficiency_exe = os.path.basename(cp.get("executables", "efficiency"))
efficiency_class = select_generic_executable(workflow, "efficiency")
# match. If they match, they're adjacent and use the time where
# they meet as a bound for accepting coincidences. If they're not
# adjacent, then there is no bound for accepting coincidences.
coincStart, coincEnd = None, None
if idx and (cafe_cache.extent[0] == cafe_caches[idx-1].extent[1]):
coincStart = cafe_cache.extent[0]
if idx + 1 - len(cafe_caches) and \
(cafe_cache.extent[1] == cafe_caches[idx+1].extent[0]):
coincEnd = cafe_cache.extent[1]
coincSegment = (coincStart, coincEnd)
# Need to create a list of the File(s) contained in the cache.
# Assume that if we have partitioned input then if *one* job in the
# partitioned input is an input then *all* jobs will be.
if not parallelize_split_input:
inputTrigFiles = FileList([])
for object in cafe_cache.objects:
inputTrigFiles.append(object.workflow_file)
llw_files = inputTrigFiles + [dqSegFile] + [tisiOutFile]
# Now we can create the nodes
node = ligolwadd_job.create_node(cafe_cache.extent, llw_files)
ligolwAddFile = node.output_files[0]
ligolwAddOuts.append(ligolwAddFile)
workflow.add_node(node)
for category in veto_cats:
node = ligolwthinca_job[category].create_node(\
cafe_cache.extent, coincSegment, ligolwAddFile)
ligolwThincaOuts += \
node.output_files.find_output_without_tag('DIST_STATS')
ligolwThincaLikelihoodOuts += \
def make_sngl_ifo(workflow, sngl_file, bank_file, trigger_id, out_dir, ifo,
tags=None):
"""Setup a job to create sngl detector sngl ifo html summary snippet.
"""
tags = [] if tags is None else tags
makedir(out_dir)
name = 'page_snglinfo'
files = FileList([])
node = PlotExecutable(workflow.cp, name, ifos=[ifo],
out_dir=out_dir, tags=tags).create_node()
node.add_input_opt('--single-trigger-file', sngl_file)
node.add_input_opt('--bank-file', bank_file)
node.add_opt('--trigger-id', str(trigger_id))
node.add_opt('--instrument', ifo)
node.new_output_file_opt(workflow.analysis_time, '.html', '--output-file')
workflow += node
files += node.output_files
return files
if len(workflow.ifos) > 2:
raise ValueError('This coincidence method only supports two ifo searches')
# Wall time knob and memory knob
factor = int(workflow.cp.get_opt_tags('workflow-coincidence', 'parallelization-factor', tags))
ffiles = {}
ifiles = {}
ifos, files = full_data_trig_files.categorize_by_attr('ifo')
for ifo, file in zip(ifos, files):
ffiles[ifo] = file[0]
ifos, files = inj_trig_files.categorize_by_attr('ifo')
for ifo, file in zip(ifos, files):
ifiles[ifo] = file[0]
ifo0, ifo1 = ifos[0], ifos[1]
combo = [(FileList([ifiles[ifo0], ifiles[ifo1]]), "injinj"),
(FileList([ifiles[ifo0], ffiles[ifo1]]), "injfull"),
(FileList([ifiles[ifo1], ffiles[ifo0]]), "fullinj"),
]
bg_files = {'injinj':[],'injfull':[],'fullinj':[]}
for trig_files, ctag in combo:
findcoinc_exe = PyCBCFindCoincExecutable(workflow.cp, 'coinc',
ifos=workflow.ifos,
tags=tags + [ctag], out_dir=out_dir)
for i in range(factor):
group_str = '%s/%s' % (i, factor)
coinc_node = findcoinc_exe.create_node(trig_files, hdfbank,
veto_file, veto_name,
group_str, tags=([str(i)]))
bg_files[ctag] += coinc_node.output_files
workflow.add_node(coinc_node)
def find_output_with_ifo(self, ifo):
"""
Find all files who have ifo = ifo
"""
# Enforce upper case
ifo = ifo.upper()
return FileList([i for i in self if ifo in i.ifo_list])
identify them. The FileList class contains functions to search
based on tags.
"""
if tags is None:
tags = []
logging.info("Entering injection module.")
make_analysis_dir(output_dir)
# Get full analysis segment for output file naming
full_segment = workflow.analysis_time
ifos = workflow.ifos
# Identify which injections to do by presence of sub-sections in
# the configuration file
inj_tags = []
inj_files = FileList([])
for section in workflow.cp.get_subsections(inj_section_name):
inj_tag = section.upper()
curr_tags = tags + [inj_tag]
# FIXME: Remove once fixed in pipedown
# TEMPORARILY we require inj tags to end in "INJ"
if not inj_tag.endswith("INJ"):
err_msg = "Currently workflow requires injection names to end with "
err_msg += "a inj suffix. Ie. bnslininj or bbhinj. "
err_msg += "%s is not good." %(inj_tag.lower())
raise ValueError(err_msg)
# Parse for options in ini file
injection_method = workflow.cp.get_opt_tags("workflow-injections",
"injections-method",
workflow: pycbc.workflow.core.Workflow
An instanced class that manages the constructed workflow.
output_dir : path string
The directory where data products will be placed.
tags : list of strings
If given these tags are used to uniquely name and identify output files
that would be produced in multiple calls to this function.
Returns
--------
gate_files : pycbc.workflow.core.FileList
The FileList holding the gating files
'''
if tags is None:
tags = []
gate_files = FileList([])
cp = workflow.cp
global_seg = workflow.analysis_time
user_tag = "PREGEN_GATE"
for ifo in workflow.ifos:
try:
pre_gen_file = cp.get_opt_tags('workflow-gating',
'gating-file-%s' % ifo.lower(),
tags)
pre_gen_file = resolve_url(pre_gen_file,
os.path.join(os.getcwd(),output_dir))
file_url = urlparse.urljoin('file:',
urllib.pathname2url(pre_gen_file))
curr_file = File(ifo, user_tag, global_seg, file_url,
tags=tags)
def make_inference_1d_posterior_plots(
workflow, inference_file, output_dir, parameters=None,
analysis_seg=None, tags=None):
parameters = [] if parameters is None else parameters
files = FileList([])
for (ii, parameter) in enumerate(parameters):
files += make_inference_posterior_plot(
workflow, inference_file, output_dir,
parameters=[parameter], analysis_seg=analysis_seg,
tags=tags + ['param{}'.format(ii)])
return files