本文整理汇总了Python中pycbc.workflow.core.make_analysis_dir函数的典型用法代码示例。如果您正苦于以下问题:Python make_analysis_dir函数的具体用法?Python make_analysis_dir怎么用?Python make_analysis_dir使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了make_analysis_dir函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: make_segments_plot
def make_segments_plot(workflow, seg_files, out_dir, tags=[]):
make_analysis_dir(out_dir)
node = PlotExecutable(workflow.cp, 'plot_segments', ifos=workflow.ifos,
out_dir=out_dir, tags=tags).create_node()
node.add_input_list_opt('--segment-files', seg_files)
node.new_output_file_opt(workflow.analysis_time, '.html', '--output-file')
workflow += node
示例2: setup_postprocessing_preparation
def setup_postprocessing_preparation(workflow, triggerFiles, output_dir,
tags=[], **kwargs):
"""
This function aims to be the gateway for preparing the output of the
coincidence and/or matched-filtering stages of the workflow for calculation
of the significance of triggers and any rate statements that are to made. In
practice this normally means combining output files, performing any
clustering and performing mapping between triggers and simulations where
needed.
Parameters
-----------
workflow : pycbc.workflow.core.Workflow
The Workflow instance that the coincidence jobs will be added to.
triggerFiles : pycbc.workflow.core.FileList
An FileList of the trigger files that are used as
input at this stage.
output_dir : path
The directory in which output files will be stored.
tags : list of strings (optional, default = [])
A list of the tagging strings that will be used for all jobs created
by this call to the workflow. An example might be ['POSTPROC1'] or
['DENTYSNEWPOSTPROC']. This will be used in output names.
Returns
--------
postProcPreppedFiles : pycbc.workflow.core.FileList
A list of files that can be used as input for the post-processing stage.
"""
logging.info("Entering post-processing preparation module.")
make_analysis_dir(output_dir)
# Parse for options in .ini file
postProcPrepMethod = workflow.cp.get_opt_tags("workflow-postprocprep",
"postprocprep-method", tags)
# Scope here for adding different options/methods here. For now we only
# have the single_stage ihope method which consists of converting the
# ligolw_thinca output xml into one file, clustering, performing injection
# finding and putting everything into one SQL database.
if postProcPrepMethod == "PIPEDOWN_WORKFLOW":
# If you want the intermediate output files, call this directly
postPostPreppedFiles,_,_,_ = setup_postprocprep_pipedown_workflow(
workflow, triggerFiles, output_dir,
tags=tags, **kwargs)
elif postProcPrepMethod == "PIPEDOWN_REPOP":
postPostPreppedFiles,_,_,_ = setup_postprocprep_pipedown_workflow(
workflow, triggerFiles, output_dir,
tags=tags, do_repop=True, **kwargs)
elif postProcPrepMethod == "GSTLAL_POSTPROCPREP":
postPostPreppedFiles = setup_postprocprep_gstlal_workflow(workflow,
triggerFiles, output_dir, tags=tags, **kwargs)
else:
errMsg = "Post-processing preparation method not recognized. Must be "
errMsg += "one of PIPEDOWN_WORKFLOW or GSTLAL_POSTPROCPREP."
raise ValueError(errMsg)
logging.info("Leaving post-processing preparation module.")
return postPostPreppedFiles
示例3: make_average_psd
def make_average_psd(workflow, psd_files, out_dir, tags=None,
gate_files=None,
output_fmt='.txt'):
make_analysis_dir(out_dir)
tags = [] if tags is None else tags
node = AvgPSDExecutable(workflow.cp, 'average_psd', ifos=workflow.ifos,
out_dir=out_dir, tags=tags).create_node()
node.add_input_list_opt('--input-files', psd_files)
node.new_output_file_opt(workflow.analysis_time, output_fmt,
'--detector-avg-file')
# FIXME should Node have a public method for handling
# multidetector output options of type --option H1:foo L1:bar?
node.add_opt('--time-avg-file')
for ifo in workflow.ifos:
time_avg_file = File(ifo, node.executable.name, workflow.analysis_time,
extension=output_fmt, directory=out_dir,
tags=tags)
multi_ifo_string = ifo + ':' + time_avg_file.name
node.add_opt(multi_ifo_string)
node._add_output(time_avg_file)
if gate_files is not None:
ifo_gate = None
for gate_file in gate_files:
if gate_file.ifo == ifo:
ifo_gate = gate_file
if ifo_gate is not None:
node.add_input_opt('--gating-file', ifo_gate)
workflow += node
return node.output_files
示例4: make_psd_file
def make_psd_file(workflow, frame_files, segment_file, segment_name, out_dir,
gate_files=None, tags=None):
make_analysis_dir(out_dir)
tags = [] if not tags else tags
exe = CalcPSDExecutable(workflow.cp, 'calculate_psd',
ifos=segment_file.ifo, out_dir=out_dir,
tags=tags)
node = exe.create_node()
node.add_input_opt('--analysis-segment-file', segment_file)
node.add_opt('--segment-name', segment_name)
if gate_files is not None:
ifo_gate = None
for gate_file in gate_files:
if gate_file.ifo == segment_file.ifo:
ifo_gate = gate_file
if ifo_gate is not None:
node.add_input_opt('--gating-file', ifo_gate)
if not exe.has_opt('frame-type'):
node.add_input_list_opt('--frame-files', frame_files)
node.new_output_file_opt(workflow.analysis_time, '.hdf', '--output-file')
workflow += node
return node.output_files[0]
示例5: setup_psd_calculate
def setup_psd_calculate(workflow, frame_files, ifo, segments,
segment_name, out_dir,
gate_files=None, tags=None):
make_analysis_dir(out_dir)
tags = [] if not tags else tags
if workflow.cp.has_option_tags('workflow-psd', 'parallelization-factor', tags=tags):
num_parts = int(workflow.cp.get_opt_tags('workflow-psd',
'parallelization-factor',
tags=tags))
else:
num_parts = 1
segment_lists = list(chunks(segments, num_parts))
psd_files = FileList([])
for i, segs in enumerate(segment_lists):
seg_file = segments_to_file(segmentlist(segs),
out_dir + '/%s-INSPIRAL_DATA-%s.xml' % (ifo, i),
'INSPIRAL_DATA', ifo=ifo)
psd_files += [make_psd_file(workflow, frame_files, seg_file,
segment_name, out_dir,
gate_files=gate_files,
tags=tags + ['PART%s' % i])]
if num_parts > 1:
return merge_psds(workflow, psd_files, ifo, out_dir, tags=tags)
else:
return psd_files[0]
示例6: make_inj_table
def make_inj_table(workflow, inj_file, out_dir, tags=[]):
make_analysis_dir(out_dir)
node = PlotExecutable(workflow.cp, 'page_injections', ifos=workflow.ifos,
out_dir=out_dir, tags=tags).create_node()
node.add_input_opt('--injection-file', inj_file)
node.new_output_file_opt(inj_file.segment, '.html', '--output-file')
workflow += node
示例7: make_snrifar_plot
def make_snrifar_plot(workflow, bg_file, out_dir, tags=[]):
make_analysis_dir(out_dir)
node = PlotExecutable(workflow.cp, 'plot_snrifar', ifos=workflow.ifos,
out_dir=out_dir, tags=tags).create_node()
node.add_input_opt('--trigger-file', bg_file)
node.new_output_file_opt(bg_file.segment, '.png', '--output-file')
workflow += node
示例8: setup_psd_calculate
def setup_psd_calculate(workflow, frame_files, ifo, segments,
segment_name, out_dir, tags=None):
make_analysis_dir(out_dir)
tags = [] if not tags else tags
if workflow.cp.has_option_tags('workflow-psd', 'parallelization-factor', tags=tags):
num_parts = int(workflow.cp.get_opt_tags('workflow-psd',
'parallelization-factor',
tags=tags))
else:
num_parts = 1
# get rid of duplicate segments which happen when splitting the bank
segments = segmentlist(frozenset(segments))
segment_lists = list(chunks(segments, num_parts))
psd_files = FileList([])
for i, segs in enumerate(segment_lists):
seg_file = SegFile.from_segment_list('%s_%s' %(segment_name, i),
segmentlist(segs), segment_name, ifo,
valid_segment=workflow.analysis_time,
extension='xml', directory=out_dir)
psd_files += [make_psd_file(workflow, frame_files, seg_file,
segment_name, out_dir,
tags=tags + ['PART%s' % i])]
if num_parts > 1:
return merge_psds(workflow, psd_files, ifo, out_dir, tags=tags)
else:
return psd_files[0]
示例9: make_sensitivity_plot
def make_sensitivity_plot(workflow, inj_file, out_dir, tags=[]):
make_analysis_dir(out_dir)
for tag in workflow.cp.get_subsections('plot_sensitivity'):
node = PlotExecutable(workflow.cp, 'plot_sensitivity', ifos=workflow.ifos,
out_dir=out_dir, tags=[tag] + tags).create_node()
node.add_input_opt('--injection-file', inj_file)
node.new_output_file_opt(inj_file.segment, '.png', '--output-file')
workflow += node
示例10: merge_psds
def merge_psds(workflow, files, ifo, out_dir, tags=None):
make_analysis_dir(out_dir)
tags = [] if not tags else tags
node = MergePSDFiles(workflow.cp, "merge_psds", ifos=ifo, out_dir=out_dir, tags=tags).create_node()
node.add_input_list_opt("--psd-files", files)
node.new_output_file_opt(workflow.analysis_time, ".hdf", "--output-file")
workflow += node
return node.output_files[0]
示例11: setup_coh_PTF_post_processing
def setup_coh_PTF_post_processing(workflow, trigger_files, trigger_cache,
output_dir, segment_dir, injection_trigger_files=None,
injection_files=None, injection_trigger_caches=None,
injection_caches=None, config_file=None, run_dir=None, ifos=None,
web_dir=None, inj_tags=[], tags=[], **kwargs):
"""
This function aims to be the gateway for running postprocessing in CBC
offline workflows. Post-processing generally consists of calculating the
significance of triggers and making any statements about trigger rates.
Dedicated plotting jobs do not belong here.
Properties
-----------
workflow : pycbc.workflow.core.Workflow
The Workflow instance that the coincidence jobs will be added to.
trigger_files : pycbc.workflow.core.FileList
An FileList of the trigger files that are used as
input at this stage.
summary_xml_files : pycbc.workflow.core.FileList
An FileList of the output of the analysislogging_utils module.
output_dir : path
The directory in which output files will be stored.
tags : list of strings (optional, default = [])
A list of the tagging strings that will be used for all jobs created
by this call to the workflow. An example might be ['POSTPROC1'] or
['DENTYSNEWPOSTPROC']. This will be used in output names.
Returns
--------
post_proc_files : pycbc.workflow.core.FileList
A list of the output from this stage.
"""
logging.info("Entering post-processing stage.")
make_analysis_dir(output_dir)
# Parse for options in .ini file
post_proc_method = workflow.cp.get_opt_tags("workflow-postproc",
"postproc-method", tags)
# Scope here for adding different options/methods here. For now we only
# have the single_stage ihope method which consists of converting the
# ligolw_thinca output xml into one file, clustering, performing injection
# finding and putting everything into one SQL database.
if post_proc_method == "COH_PTF_WORKFLOW":
post_proc_files = setup_postproc_coh_PTF_workflow(workflow,
trigger_files, trigger_cache, injection_trigger_files,
injection_files, injection_trigger_caches, injection_caches,
config_file, output_dir, web_dir, segment_dir, ifos=ifos,
inj_tags=inj_tags, tags=tags, **kwargs)
else:
errMsg = "Post-processing method not recognized. Must be "
errMsg += "COH_PTF_WORKFLOW."
raise ValueError(errMsg)
logging.info("Leaving post-processing module.")
return post_proc_files
示例12: setup_interval_coinc_inj
def setup_interval_coinc_inj(workflow, hdfbank, full_data_trig_files, inj_trig_files,
background_file, veto_file, veto_name, out_dir, tags=[]):
"""
This function sets up exact match coincidence and background estimation
using a folded interval technique.
"""
make_analysis_dir(out_dir)
logging.info('Setting up coincidence for injection')
if len(hdfbank) > 1:
raise ValueError('This coincidence method only supports a '
'pregenerated template bank')
hdfbank = hdfbank[0]
if len(workflow.ifos) > 2:
raise ValueError('This coincidence method only supports two ifo searches')
combinecoinc_exe = PyCBCStatMapInjExecutable(workflow.cp, 'statmap_inj',
ifos=workflow.ifos,
tags=tags, out_dir=out_dir)
# Wall time knob and memory knob
factor = int(workflow.cp.get_opt_tags('workflow-coincidence', 'parallelization-factor', tags))
ffiles = {}
ifiles = {}
ifos, files = full_data_trig_files.categorize_by_attr('ifo')
for ifo, file in zip(ifos, files):
ffiles[ifo] = file[0]
ifos, files = inj_trig_files.categorize_by_attr('ifo')
for ifo, file in zip(ifos, files):
ifiles[ifo] = file[0]
ifo0, ifo1 = ifos[0], ifos[1]
combo = [(FileList([ifiles[ifo0], ifiles[ifo1]]), "injinj"),
(FileList([ifiles[ifo0], ffiles[ifo1]]), "injfull"),
(FileList([ifiles[ifo1], ffiles[ifo0]]), "fullinj"),
]
bg_files = {'injinj':[],'injfull':[],'fullinj':[]}
for trig_files, ctag in combo:
findcoinc_exe = PyCBCFindCoincExecutable(workflow.cp, 'coinc',
ifos=workflow.ifos,
tags=tags + [ctag], out_dir=out_dir)
for i in range(factor):
group_str = '%s/%s' % (i, factor)
coinc_node = findcoinc_exe.create_node(trig_files, hdfbank,
veto_file, veto_name,
group_str, tags=([str(i)]))
bg_files[ctag] += coinc_node.output_files
workflow.add_node(coinc_node)
combine_node = combinecoinc_exe.create_node(FileList(bg_files['injinj']), background_file,
FileList(bg_files['injfull']), FileList(bg_files['fullinj']))
workflow.add_node(combine_node)
logging.info('...leaving coincidence ')
return combine_node.output_files[0]
示例13: find_injections_in_hdf_coinc
def find_injections_in_hdf_coinc(workflow, inj_coinc_file, inj_xml_file,
veto_file, veto_name, out_dir, tags=[]):
make_analysis_dir(out_dir)
exe = PyCBCHDFInjFindExecutable(workflow.cp, 'hdfinjfind',
ifos=workflow.ifos,
out_dir=out_dir, tags=tags)
node = exe.create_node(inj_coinc_file, inj_xml_file, veto_file, veto_name, tags)
workflow += node
return node.output_files[0]
示例14: make_foreground_table
def make_foreground_table(workflow, trig_file, bank_file, ftag, out_dir, tags=[]):
make_analysis_dir(out_dir)
node = PlotExecutable(workflow.cp, 'page_foreground', ifos=workflow.ifos,
out_dir=out_dir, tags=tags).create_node()
node.add_input_opt('--bank-file', bank_file)
node.add_opt('--foreground-tag', ftag)
node.add_input_opt('--trigger-file', trig_file)
node.new_output_file_opt(bank_file.segment, '.html', '--output-file')
workflow += node
示例15: make_coinc_snrchi_plot
def make_coinc_snrchi_plot(workflow, inj_file, inj_trig, stat_file, trig_file, out_dir, tags=[]):
make_analysis_dir(out_dir)
for tag in workflow.cp.get_subsections('plot_coinc_snrchi'):
node = PlotExecutable(workflow.cp, 'plot_coinc_snrchi', ifos=inj_trig.ifo,
out_dir=out_dir, tags=[tag] + tags).create_node()
node.add_input_opt('--found-injection-file', inj_file)
node.add_input_opt('--single-injection-file', inj_trig)
node.add_input_opt('--coinc-statistic-file', stat_file)
node.add_input_opt('--single-trigger-file', trig_file)
node.new_output_file_opt(inj_file.segment, '.png', '--output-file')
workflow += node