本文整理汇总了Python中pycbc.workflow.core.Workflow.set_job_properties方法的典型用法代码示例。如果您正苦于以下问题:Python Workflow.set_job_properties方法的具体用法?Python Workflow.set_job_properties怎么用?Python Workflow.set_job_properties使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pycbc.workflow.core.Workflow
的用法示例。
在下文中一共展示了Workflow.set_job_properties方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: setup_foreground_minifollowups
# 需要导入模块: from pycbc.workflow.core import Workflow [as 别名]
# 或者: from pycbc.workflow.core.Workflow import set_job_properties [as 别名]
def setup_foreground_minifollowups(workflow, coinc_file, single_triggers,
tmpltbank_file, insp_segs, insp_data_name,
insp_anal_name, dax_output, out_dir, tags=None):
""" Create plots that followup the Nth loudest coincident injection
from a statmap produced HDF file.
Parameters
----------
workflow: pycbc.workflow.Workflow
The core workflow instance we are populating
coinc_file:
single_triggers: list of pycbc.workflow.File
A list cointaining the file objects associated with the merged
single detector trigger files for each ifo.
tmpltbank_file: pycbc.workflow.File
The file object pointing to the HDF format template bank
insp_segs: SegFile
The segment file containing the data read and analyzed by each inspiral
job.
insp_data_name: str
The name of the segmentlist storing data read.
insp_anal_name: str
The name of the segmentlist storing data analyzed.
out_dir: path
The directory to store minifollowups result plots and files
tags: {None, optional}
Tags to add to the minifollowups executables
Returns
-------
layout: list
A list of tuples which specify the displayed file layout for the
minifollops plots.
"""
logging.info('Entering minifollowups module')
if not workflow.cp.has_section('workflow-minifollowups'):
logging.info('There is no [workflow-minifollowups] section in configuration file')
logging.info('Leaving minifollowups')
return
tags = [] if tags is None else tags
makedir(dax_output)
# turn the config file into a File class
config_path = os.path.abspath(dax_output + '/' + '_'.join(tags) + 'foreground_minifollowup.ini')
workflow.cp.write(open(config_path, 'w'))
config_file = wdax.File(os.path.basename(config_path))
config_file.PFN(config_path, 'local')
exe = Executable(workflow.cp, 'foreground_minifollowup', ifos=workflow.ifos, out_dir=dax_output)
node = exe.create_node()
node.add_input_opt('--config-files', config_file)
node.add_input_opt('--bank-file', tmpltbank_file)
node.add_input_opt('--statmap-file', coinc_file)
node.add_multiifo_input_list_opt('--single-detector-triggers', single_triggers)
node.add_input_opt('--inspiral-segments', insp_segs)
node.add_opt('--inspiral-data-read-name', insp_data_name)
node.add_opt('--inspiral-data-analyzed-name', insp_anal_name)
node.new_output_file_opt(workflow.analysis_time, '.dax', '--output-file', tags=tags)
node.new_output_file_opt(workflow.analysis_time, '.dax.map', '--output-map', tags=tags)
name = node.output_files[0].name
map_loc = node.output_files[1].name
node.add_opt('--workflow-name', name)
node.add_opt('--output-dir', out_dir)
workflow += node
# execute this in a sub-workflow
fil = node.output_files[0]
job = dax.DAX(fil)
job.addArguments('--basename %s' % os.path.splitext(os.path.basename(name))[0])
Workflow.set_job_properties(job, map_loc)
workflow._adag.addJob(job)
dep = dax.Dependency(parent=node._dax_node, child=job)
workflow._adag.addDependency(dep)
logging.info('Leaving minifollowups module')
示例2: setup_foreground_inference
# 需要导入模块: from pycbc.workflow.core import Workflow [as 别名]
# 或者: from pycbc.workflow.core.Workflow import set_job_properties [as 别名]
def setup_foreground_inference(workflow, coinc_file, single_triggers,
tmpltbank_file, insp_segs, insp_data_name,
insp_anal_name, dax_output, out_dir, tags=None):
""" Creates workflow node that will run the inference workflow.
Parameters
----------
workflow: pycbc.workflow.Workflow
The core workflow instance we are populating
coinc_file: pycbc.workflow.File
The file associated with coincident triggers.
single_triggers: list of pycbc.workflow.File
A list cointaining the file objects associated with the merged
single detector trigger files for each ifo.
tmpltbank_file: pycbc.workflow.File
The file object pointing to the HDF format template bank
insp_segs: SegFile
The segment file containing the data read and analyzed by each inspiral
job.
insp_data_name: str
The name of the segmentlist storing data read.
insp_anal_name: str
The name of the segmentlist storing data analyzed.
dax_output : str
The name of the output DAX file.
out_dir: path
The directory to store minifollowups result plots and files
tags: {None, optional}
Tags to add to the minifollowups executables
"""
logging.info("Entering inference module")
# check if configuration file has inference section
if not workflow.cp.has_section("workflow-inference"):
logging.info("There is no [workflow-inference] section in configuration file")
logging.info("Leaving inference module")
return
# default tags is a list
tags = [] if tags is None else tags
# make the directory that will contain the dax file
makedir(dax_output)
# turn the config file into a File class
config_path = os.path.abspath(dax_output + "/" + "_".join(tags) \
+ "foreground_inference.ini")
workflow.cp.write(open(config_path, "w"))
config_file = wdax.File(os.path.basename(config_path))
config_file.PFN(config_path, "local")
# create an Executable for the inference workflow generator
exe = Executable(workflow.cp, "foreground_inference", ifos=workflow.ifos,
out_dir=dax_output)
# create the node that will run in the workflow
node = exe.create_node()
node.add_input_opt("--config-files", config_file)
node.add_input_opt("--bank-file", tmpltbank_file)
node.add_input_opt("--statmap-file", coinc_file)
node.add_multiifo_input_list_opt("--single-detector-triggers",
single_triggers)
node.new_output_file_opt(workflow.analysis_time, ".dax", "--output-file",
tags=tags)
node.new_output_file_opt(workflow.analysis_time, ".dax.map",
"--output-map", tags=tags)
# get dax name and use it for the workflow name
name = node.output_files[0].name
node.add_opt("--workflow-name", name)
# get output map name and use it for the output dir name
map_loc = node.output_files[1].name
node.add_opt("--output-dir", out_dir)
# add this node to the workflow
workflow += node
# create job for dax that will run a sub-workflow
# and add it to the workflow
fil = node.output_files[0]
job = dax.DAX(fil)
job.addArguments("--basename %s" % os.path.splitext(os.path.basename(name))[0])
Workflow.set_job_properties(job, map_loc)
workflow._adag.addJob(job)
# make dax a child of the inference workflow generator node
dep = dax.Dependency(parent=node._dax_node, child=job)
workflow._adag.addDependency(dep)
logging.info("Leaving inference module")
示例3: setup_single_det_minifollowups
# 需要导入模块: from pycbc.workflow.core import Workflow [as 别名]
# 或者: from pycbc.workflow.core.Workflow import set_job_properties [as 别名]
def setup_single_det_minifollowups(workflow, single_trig_file, tmpltbank_file,
insp_segs, insp_data_name, insp_anal_name,
dax_output, out_dir, veto_file=None,
veto_segment_name=None, tags=None):
""" Create plots that followup the Nth loudest clustered single detector
triggers from a merged single detector trigger HDF file.
Parameters
----------
workflow: pycbc.workflow.Workflow
The core workflow instance we are populating
single_trig_file: pycbc.workflow.File
The File class holding the single detector triggers.
tmpltbank_file: pycbc.workflow.File
The file object pointing to the HDF format template bank
insp_segs: SegFile
The segment file containing the data read by each inspiral job.
insp_data_name: str
The name of the segmentlist storing data read.
insp_anal_name: str
The name of the segmentlist storing data analyzed.
out_dir: path
The directory to store minifollowups result plots and files
tags: {None, optional}
Tags to add to the minifollowups executables
Returns
-------
layout: list
A list of tuples which specify the displayed file layout for the
minifollops plots.
"""
logging.info('Entering minifollowups module')
if not workflow.cp.has_section('workflow-sngl_minifollowups'):
msg = 'There is no [workflow-sngl_minifollowups] section in '
msg += 'configuration file'
logging.info(msg)
logging.info('Leaving minifollowups')
return
tags = [] if tags is None else tags
makedir(dax_output)
# turn the config file into a File class
curr_ifo = single_trig_file.ifo
config_path = os.path.abspath(dax_output + '/' + curr_ifo + \
'_'.join(tags) + 'singles_minifollowup.ini')
workflow.cp.write(open(config_path, 'w'))
config_file = wdax.File(os.path.basename(config_path))
config_file.PFN(config_path, 'local')
exe = Executable(workflow.cp, 'singles_minifollowup',
ifos=curr_ifo, out_dir=dax_output, tags=tags)
wikifile = curr_ifo + '_'.join(tags) + 'loudest_table.txt'
node = exe.create_node()
node.add_input_opt('--config-files', config_file)
node.add_input_opt('--bank-file', tmpltbank_file)
node.add_input_opt('--single-detector-file', single_trig_file)
node.add_input_opt('--inspiral-segments', insp_segs)
node.add_opt('--inspiral-data-read-name', insp_data_name)
node.add_opt('--inspiral-data-analyzed-name', insp_anal_name)
node.add_opt('--instrument', curr_ifo)
node.add_opt('--wiki-file', wikifile)
if veto_file is not None:
assert(veto_segment_name is not None)
node.add_input_opt('--veto-file', veto_file)
node.add_opt('--veto-segment-name', veto_segment_name)
node.new_output_file_opt(workflow.analysis_time, '.dax', '--output-file', tags=tags)
node.new_output_file_opt(workflow.analysis_time, '.dax.map', '--output-map', tags=tags)
name = node.output_files[0].name
map_file = node.output_files[1]
node.add_opt('--workflow-name', name)
node.add_opt('--output-dir', out_dir)
workflow += node
# execute this in a sub-workflow
fil = node.output_files[0]
job = dax.DAX(fil)
job.addArguments('--basename %s' \
% os.path.splitext(os.path.basename(name))[0])
Workflow.set_job_properties(job, map_file)
workflow._adag.addJob(job)
dep = dax.Dependency(parent=node._dax_node, child=job)
workflow._adag.addDependency(dep)
logging.info('Leaving minifollowups module')
示例4: setup_injection_minifollowups
# 需要导入模块: from pycbc.workflow.core import Workflow [as 别名]
# 或者: from pycbc.workflow.core.Workflow import set_job_properties [as 别名]
def setup_injection_minifollowups(
workflow,
injection_file,
inj_xml_file,
single_triggers,
tmpltbank_file,
insp_segs,
insp_data_name,
insp_anal_name,
dax_output,
out_dir,
tags=None,
):
""" Create plots that followup the closest missed injections
Parameters
----------
workflow: pycbc.workflow.Workflow
The core workflow instance we are populating
coinc_file:
single_triggers: list of pycbc.workflow.File
A list cointaining the file objects associated with the merged
single detector trigger files for each ifo.
tmpltbank_file: pycbc.workflow.File
The file object pointing to the HDF format template bank
insp_segs: SegFile
The segment file containing the data read by each inspiral job.
insp_data_name: str
The name of the segmentlist storing data read.
insp_anal_name: str
The name of the segmentlist storing data analyzed.
out_dir: path
The directory to store minifollowups result plots and files
tags: {None, optional}
Tags to add to the minifollowups executables
Returns
-------
layout: list
A list of tuples which specify the displayed file layout for the
minifollops plots.
"""
logging.info("Entering injection minifollowups module")
if not workflow.cp.has_section("workflow-injection_minifollowups"):
logging.info("There is no [workflow-injection_minifollowups] section in configuration file")
logging.info("Leaving minifollowups")
return
tags = [] if tags is None else tags
makedir(dax_output)
# turn the config file into a File class
config_path = os.path.abspath(dax_output + "/" + "_".join(tags) + "injection_minifollowup.ini")
workflow.cp.write(open(config_path, "w"))
config_file = wdax.File(os.path.basename(config_path))
config_file.PFN(config_path, "local")
exe = Executable(workflow.cp, "injection_minifollowup", ifos=workflow.ifos, out_dir=dax_output)
node = exe.create_node()
node.add_input_opt("--config-files", config_file)
node.add_input_opt("--bank-file", tmpltbank_file)
node.add_input_opt("--injection-file", injection_file)
node.add_input_opt("--injection-xml-file", inj_xml_file)
node.add_multiifo_input_list_opt("--single-detector-triggers", single_triggers)
node.add_input_opt("--inspiral-segments", insp_segs)
node.add_opt("--inspiral-data-read-name", insp_data_name)
node.add_opt("--inspiral-data-analyzed-name", insp_anal_name)
node.new_output_file_opt(workflow.analysis_time, ".dax", "--output-file", tags=tags)
node.new_output_file_opt(workflow.analysis_time, ".dax.map", "--output-map", tags=tags)
name = node.output_files[0].name
map_loc = node.output_files[1].name
node.add_opt("--workflow-name", name)
node.add_opt("--output-dir", out_dir)
workflow += node
# execute this in a sub-workflow
fil = node.output_files[0]
job = dax.DAX(fil)
job.addArguments("--basename %s" % os.path.splitext(os.path.basename(name))[0])
Workflow.set_job_properties(job, map_loc)
workflow._adag.addJob(job)
dep = dax.Dependency(parent=node._dax_node, child=job)
workflow._adag.addDependency(dep)
logging.info("Leaving injection minifollowups module")