本文整理汇总了Python中pycbc.workflow.core.Node.add_opt方法的典型用法代码示例。如果您正苦于以下问题:Python Node.add_opt方法的具体用法?Python Node.add_opt怎么用?Python Node.add_opt使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pycbc.workflow.core.Node
的用法示例。
在下文中一共展示了Node.add_opt方法的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: create_node
# 需要导入模块: from pycbc.workflow.core import Node [as 别名]
# 或者: from pycbc.workflow.core.Node import add_opt [as 别名]
def create_node(self, parent, tags=None):
import Pegasus.DAX3 as dax
if tags is None:
tags = []
node = Node(self)
# Set input / output options
node.add_opt('--trig-file', '%s' % parent.storage_path)
#node._dax_node.uses(parent, link=dax.Link.INPUT, register=False,
# transfer=False)
#node._inputs += [parent]
node.add_opt('--output-dir', self.out_dir)
node.add_profile('condor', 'request_cpus', self.num_threads)
# Adding output files as pycbc.workflow.core.File objects
out_file = File(self.ifos, 'INSPIRAL', parent.segment,
directory=self.out_dir, extension='xml.gz',
tags=[parent.tag_str, 'CLUSTERED'],
store_file=self.retain_files)
out_file.PFN(out_file.cache_entry.path, site="local")
#node._dax_node.uses(out_file, link=dax.Link.OUTPUT, register=False,
# transfer=False)
#node._outputs += [out_file]
out_file.node = node
#node._add_output(out_file)
return node, FileList([out_file])
示例2: create_node
# 需要导入模块: from pycbc.workflow.core import Node [as 别名]
# 或者: from pycbc.workflow.core.Node import add_opt [as 别名]
def create_node(self, inj_coinc_file, inj_xml_file, veto_file, veto_name, tags=[]):
node = Node(self)
node.add_input_list_opt('--trigger-file', inj_coinc_file)
node.add_input_list_opt('--injection-file', inj_xml_file)
if veto_name is not None:
node.add_input_opt('--veto-file', veto_file)
node.add_opt('--segment-name', veto_name)
node.new_output_file_opt(inj_xml_file[0].segment, '.hdf', '--output-file',
tags=tags)
return node
示例3: create_node
# 需要导入模块: from pycbc.workflow.core import Node [as 别名]
# 或者: from pycbc.workflow.core.Node import add_opt [as 别名]
def create_node(self, trig_file, bank_file, veto_file, veto_name):
node = Node(self)
# Executable objects are initialized with ifo information
node.add_opt('--ifo', self.ifo_string)
node.add_input_opt('--trigger-file', trig_file)
node.add_input_opt('--template-file', bank_file)
node.add_input_opt('--veto-file', veto_file)
node.add_opt('--veto-segment-name', veto_name)
node.new_output_file_opt(trig_file.segment, '.hdf', '--output')
return node
示例4: create_node
# 需要导入模块: from pycbc.workflow.core import Node [as 别名]
# 或者: from pycbc.workflow.core.Node import add_opt [as 别名]
def create_node(self, coinc_files, ifos, tags=None):
if tags is None:
tags = []
segs = coinc_files.get_times_covered_by_files()
seg = segments.segment(segs[0][0], segs[-1][1])
node = Node(self)
node.set_memory(5000)
node.add_input_list_opt('--coinc-files', coinc_files)
node.add_opt('--ifos', ifos)
node.new_output_file_opt(seg, '.hdf', '--output-file', tags=tags)
return node
示例5: create_node
# 需要导入模块: from pycbc.workflow.core import Node [as 别名]
# 或者: from pycbc.workflow.core.Node import add_opt [as 别名]
def create_node(self, parent, inj_trigs, inj_string, max_inc, segment):
node = Node(self)
trig_name = self.cp.get('workflow', 'trigger-name')
node.add_opt('--inj-string', inj_string)
node.add_opt('--max-inclination', max_inc)
node.add_opt('--inj-cache', '%s' % parent.storage_path)
out_files = FileList([])
for inj_trig in inj_trigs:
out_file_tag = [
inj_string, "FILTERED", max_inc,
inj_trig.tag_str.rsplit('_', 1)[-1]
]
out_file = File(
self.ifos,
inj_trig.description,
inj_trig.segment,
extension="xml",
directory=self.out_dir,
tags=out_file_tag)
out_file.PFN(out_file.cache_entry.path, site="local")
out_files.append(out_file)
node.add_opt('--output-dir', self.out_dir)
return node, out_files
示例6: create_node
# 需要导入模块: from pycbc.workflow.core import Node [as 别名]
# 或者: from pycbc.workflow.core.Node import add_opt [as 别名]
def create_node(self, parent, inj_trigs, inj_string, max_inc, segment):
node = Node(self)
trig_name = self.cp.get("workflow", "trigger-name")
node.add_opt("--inj-string", inj_string)
node.add_opt("--max-inclination", max_inc)
node.add_opt("--inj-cache", "%s" % parent.storage_path)
out_files = FileList([])
for inj_trig in inj_trigs:
out_string = inj_string.split(max_inc)[0]
out_file_tag = [out_string, "FILTERED", max_inc, inj_trig.tag_str.rsplit("_", 1)[-1]]
out_file = File(
self.ifos,
inj_trig.description,
inj_trig.segment,
extension="xml",
directory=self.out_dir,
tags=out_file_tag,
)
out_file.PFN(out_file.cache_entry.path, site="local")
out_files.append(out_file)
node.add_opt("--output-dir", self.out_dir)
return node, out_files
示例7: create_node
# 需要导入模块: from pycbc.workflow.core import Node [as 别名]
# 或者: from pycbc.workflow.core.Node import add_opt [as 别名]
def create_node(self, data_seg, valid_seg, parent=None, inj_file=None,
dfParents=None, bankVetoBank=None, ipn_file=None, tags=[]):
node = Node(self)
if not dfParents:
raise ValueError("%s must be supplied with frame files"
% self.name)
# If doing single IFO search, make sure slides are disabled
if len(self.ifo_list) < 2 and \
('--do-short-slides' in node._options or \
'--short-slide-offset' in node._options):
raise ValueError("Cannot run with time slides in a single IFO "
"configuration! Please edit your configuration "
"file accordingly.")
pad_data = self.get_opt('pad-data')
if pad_data is None:
raise ValueError("The option pad-data is a required option of "
"%s. Please check the ini file." % self.name)
# Feed in bank_veto_bank.xml
if self.cp.has_option('inspiral', 'do-bank-veto'):
if not bankVetoBank:
raise ValueError("%s must be given a bank veto file if the "
"argument 'do-bank-veto' is given"
% self.name)
node.add_input_opt('--bank-veto-templates', bankVetoBank)
# Set time options
node.add_opt('--gps-start-time', data_seg[0] + int(pad_data))
node.add_opt('--gps-end-time', data_seg[1] - int(pad_data))
node.add_opt('--trig-start-time', valid_seg[0])
node.add_opt('--trig-end-time', valid_seg[1])
node.add_profile('condor', 'request_cpus', self.num_threads)
# Set the input and output files
node.new_output_file_opt(data_seg, '.xml.gz', '--output-file',
tags=tags, store_file=self.retain_files)
node.add_input_opt('--non-spin-bank', parent, )
for frameCache in dfParents:
node.add_input_opt('--%s-frame-cache' % frameCache.ifo.lower(),
frameCache)
node.add_arg('--%s-data' % frameCache.ifo.lower())
if ipn_file is not None:
node.add_input_opt('--sky-positions-file', ipn_file)
if inj_file is not None:
if ('--do-short-slides' in node._options or \
'--short-slide-offset' in node._options):
raise ValueError("Cannot run with short slides in an "
"injection job. Please edit your "
"configuration file accordingly.")
node.add_input_opt('--injection-file', inj_file)
return node
示例8: create_node
# 需要导入模块: from pycbc.workflow.core import Node [as 别名]
# 或者: from pycbc.workflow.core.Node import add_opt [as 别名]
def create_node(self, zerolag, full_data,
injfull, fullinj, ifos, tags=None):
if tags is None:
tags = []
segs = zerolag.get_times_covered_by_files()
seg = segments.segment(segs[0][0], segs[-1][1])
node = Node(self)
node.set_memory(5000)
node.add_input_list_opt('--zero-lag-coincs', zerolag)
node.add_input_list_opt('--full-data-background', full_data)
node.add_input_list_opt('--mixed-coincs-inj-full', injfull)
node.add_input_list_opt('--mixed-coincs-full-inj', fullinj)
node.add_opt('--ifos', ifos)
node.new_output_file_opt(seg, '.hdf', '--output-file', tags=tags)
return node
示例9: create_node
# 需要导入模块: from pycbc.workflow.core import Node [as 别名]
# 或者: from pycbc.workflow.core.Node import add_opt [as 别名]
def create_node(self, data_seg, valid_seg, parent=None, inj_file=None,
dfParents=None, bankVetoBank=None, tags=[]):
node = Node(self)
if not dfParents:
raise ValueError("%s must be supplied with frame files"
% self.name)
pad_data = self.get_opt('pad-data')
if pad_data is None:
raise ValueError("The option pad-data is a required option of "
"%s. Please check the ini file." % self.name)
# Feed in bank_veto_bank.xml
if self.cp.has_option('inspiral', 'do-bank-veto'):
if not bankVetoBank:
raise ValueError("%s must be given a bank veto file if the"
"argument 'do-bank-veto' is given"
% self.name)
node.add_input_opt('--bank-veto-templates', bankVetoBank)
node.add_opt('--gps-start-time', data_seg[0] + int(pad_data))
node.add_opt('--gps-end-time', data_seg[1] - int(pad_data))
node.add_opt('--trig-start-time', valid_seg[0])
node.add_opt('--trig-end-time', valid_seg[1])
node.add_profile('condor', 'request_cpus', self.num_threads)
# Set the input and output files
node.new_output_file_opt(data_seg, '.xml.gz', '--output-file',
tags=tags, store_file=self.retain_files)
node.add_input_opt('--non-spin-bank', parent, )
for frameCache in dfParents:
node.add_input_opt('--%s-frame-cache' % frameCache.ifo.lower(),
frameCache)
node.add_arg('--%s-data' % frameCache.ifo.lower())
node.add_opt('--%s-channel-name' % frameCache.ifo.lower(),
self.cp.get('workflow',
'%s-channel-name' %frameCache.ifo.lower()))
if inj_file is not None:
node.add_input_opt('--injection-file', inj_file)
return node
示例10: create_node
# 需要导入模块: from pycbc.workflow.core import Node [as 别名]
# 或者: from pycbc.workflow.core.Node import add_opt [as 别名]
def create_node(self, segment, coinc_file, tmpltbank_file, data_type, loudest_event_number):
''' Creates a node for the loudest_event_number-th loudest event in the
coinc_file. '''
# make a node
node = Node(self)
# add input files
node.add_input_opt('--coinc-file', coinc_file)
node.add_input_opt('--tmpltbank-file', tmpltbank_file)
# add options
node.add_opt('--data-type', data_type)
node.add_opt('--loudest-event-number', loudest_event_number)
# add output file
node.new_output_file_opt(segment, '.html', '--output-file',
store_file=self.retain_files)
return node
示例11: setup_plotthinca
# 需要导入模块: from pycbc.workflow.core import Node [as 别名]
# 或者: from pycbc.workflow.core.Node import add_opt [as 别名]
def setup_plotthinca(workflow, input_files, cache_filename, coinc_cachepattern,
slide_cachepattern, output_dir, tags=[], **kwargs):
"""
This function sets up the nodes that will generate summary from a list of
thinca files.
Parameters
-----------
Workflow : ahope.Workflow
The ahope workflow instance that the coincidence jobs will be added to.
input_files : ahope.FileList
An FileList of files that are used as input at this stage.
cache_filename : str
Filename of the ihope cache.
coinc_cachepattern : str
The pattern that will be used to find zero-lag coincidence filenames in the cache.
slide_cachepattern : str
The pattern that will be used to find time slide filenames in the cache.
output_dir : path
The directory in which output files will be stored.
tags : list of strings (optional, default = [])
A list of the tagging strings that will be used for all jobs created
by this call to the workflow. An example might be ['full_data'].
This will be used in output names and directories.
Returns
--------
plot_files : ahope.FileList
A list of the output files from this stage.
"""
plot_files = FileList([])
# create executable
plotthinca_job = Executable(workflow.cp, 'plotthinca', 'vanilla',
workflow.ifos, output_dir, tags)
# get all ifo combinations of at least 2 coincident ifos
ifo_combos = []
for n in xrange(len(plotthinca_job.ifo_list)+1):
for ifo_list in itertools.combinations(plotthinca_job.ifo_list, n+2):
ifo_combos.append(ifo_list)
for tag in tags:
for ifo_list in ifo_combos:
ifo_string = ''.join(ifo_list)
# create node
node = Node(plotthinca_job)
node.add_opt('--gps-start-time', workflow.analysis_time[0])
node.add_opt('--gps-end-time', workflow.analysis_time[1])
node.add_opt('--cache-file', cache_filename)
node.add_opt('--ifo-times', ifo_string)
node.add_opt('--ifo-tag', 'SECOND_'+ifo_string)
for ifo in ifo_list:
node.add_opt('--%s-triggers'%ifo.lower(), '')
node.add_opt('--user-tag', tag.upper()+'_SUMMARY_PLOTS')
node.add_opt('--output-path', output_dir)
node.add_opt('--coinc-pattern', coinc_cachepattern)
node.add_opt('--slide-pattern', slide_cachepattern)
node.add_opt('--enable-output')
# add node to workflow
workflow.add_node(node)
# make all input_files parents
#for f in input_files:
# dep = dax.Dependency(parent=f.node._dax_node, child=node._dax_node)
# workflow._adag.addDependency(dep)
return plot_files
示例12: get_veto_segs
# 需要导入模块: from pycbc.workflow.core import Node [as 别名]
# 或者: from pycbc.workflow.core.Node import add_opt [as 别名]
def get_veto_segs(workflow, ifo, category, start_time, end_time, out_dir,
vetoGenJob, tag=None, execute_now=False):
"""
Obtain veto segments for the selected ifo and veto category and add the job
to generate this to the workflow.
Parameters
-----------
workflow: pycbc.workflow.core.Workflow
An instance of the Workflow class that manages the workflow.
ifo : string
The string describing the ifo to generate vetoes for.
category : int
The veto category to generate vetoes for.
start_time : gps time (either int/LIGOTimeGPS)
The time at which to begin searching for segments.
end_time : gps time (either int/LIGOTimeGPS)
The time at which to stop searching for segments.
out_dir : path
The directory in which output will be stored.
vetoGenJob : Job
The veto generation Job class that will be used to create the Node.
tag : string, optional (default=None)
Use this to specify a tag. This can be used if this module is being
called more than once to give call specific configuration (by setting
options in [workflow-datafind-${TAG}] rather than [workflow-datafind]). This
is also used to tag the Files returned by the class to uniqueify
the Files and uniqueify the actual filename.
FIXME: Filenames may not be unique with current codes!
execute_now : boolean, optional
If true, jobs are executed immediately. If false, they are added to the
workflow to be run later.
Returns
--------
veto_def_file : pycbc.workflow.core.OutSegFile
The workflow File object corresponding to this DQ veto file.
"""
segValidSeg = segments.segment([start_time,end_time])
node = Node(vetoGenJob)
node.add_opt('--veto-categories', str(category))
node.add_opt('--ifo-list', ifo)
node.add_opt('--gps-start-time', str(start_time))
node.add_opt('--gps-end-time', str(end_time))
vetoXmlFileName = "%s-VETOTIME_CAT%d-%d-%d.xml" \
%(ifo, category, start_time, end_time-start_time)
vetoXmlFilePath = os.path.abspath(os.path.join(out_dir, vetoXmlFileName))
currUrl = urlparse.urlunparse(['file', 'localhost',
vetoXmlFilePath, None, None, None])
if tag:
currTags = [tag, 'VETO_CAT%d' %(category)]
else:
currTags = ['VETO_CAT%d' %(category)]
vetoXmlFile = OutSegFile(ifo, 'SEGMENTS', segValidSeg, currUrl,
tags=currTags)
node._add_output(vetoXmlFile)
if execute_now:
if file_needs_generating(vetoXmlFile.cache_entry.path):
workflow.execute_node(node)
else:
node.executed = True
for fil in node._outputs:
fil.node = None
fil.PFN(fil.storage_path, site='local')
else:
workflow.add_node(node)
return vetoXmlFile
示例13: setup_hardware_injection_page
# 需要导入模块: from pycbc.workflow.core import Node [as 别名]
# 或者: from pycbc.workflow.core.Node import add_opt [as 别名]
def setup_hardware_injection_page(workflow, input_files, cache_filename,
inspiral_cachepattern, output_dir, tags=[], **kwargs):
"""
This function sets up the nodes that will create the hardware injection page.
Parameters
-----------
Workflow : ahope.Workflow
The ahope workflow instance that the coincidence jobs will be added to.
input_files : ahope.FileList
An FileList of files that are used as input at this stage.
cache_filename : str
Filename of the ihope cache.
inspiral_cachepattern : str
The pattern that will be used to find inspiral filenames in the cache.
output_dir : path
The directory in which output files will be stored.
tags : list of strings (optional, default = [])
A list of the tagging strings that will be used for all jobs created
by this call to the workflow. An example might be ['full_data'].
This will be used to search the cache.
Returns
--------
plot_files : ahope.FileList
A list of the output files from this stage.
"""
logging.info("Entering hardware injection page setup.")
out_files = FileList([])
# check if hardware injection section exists
# if not then do not do add hardware injection job to the workflow
if not workflow.cp.has_section('workflow-hardware-injections'):
msg = "There is no workflow-hardware-injections section. "
msg += "The hardware injection page will not be added to the workflow."
logging.info(msg)
logging.info("Leaving hardware injection page setup.")
return out_files
# make the output dir
if not os.path.exists(output_dir):
os.makedirs(output_dir)
# create executable
hwinjpage_job = Executable(workflow.cp, 'hardware_injection_page',
'vanilla', workflow.ifos, output_dir, tags)
# retrieve hardware injection file
hwinjDefUrl = workflow.cp.get_opt_tags('workflow-hardware-injections',
'hwinj-definer-url', tags)
hwinjDefBaseName = os.path.basename(hwinjDefUrl)
hwinjDefNewPath = os.path.join(output_dir, hwinjDefBaseName)
urllib.urlretrieve (hwinjDefUrl, hwinjDefNewPath)
# update hwinj definer file location
workflow.cp.set("workflow-hardware-injections", "hwinj-definer-file",
hwinjDefNewPath)
# query for the hardware injection segments
get_hardware_injection_segment_files(workflow, output_dir, hwinjDefNewPath)
# create node
node = Node(hwinjpage_job)
node.add_opt('--gps-start-time', workflow.analysis_time[0])
node.add_opt('--gps-end-time', workflow.analysis_time[1])
node.add_opt('--source-xml', hwinjDefNewPath)
node.add_opt('--segment-dir', output_dir)
node.add_opt('--cache-file', cache_filename)
node.add_opt('--cache-pattern', inspiral_cachepattern)
node.add_opt('--analyze-injections', '')
for ifo in workflow.ifos:
node.add_opt('--%s-injections'%ifo.lower(), '')
outfile = File(node.executable.ifo_string, 'HWINJ_SUMMARY',
workflow.analysis_time, extension='html', directory=output_dir)
node.add_opt('--outfile', outfile.storage_path)
# add node to workflow
workflow.add_node(node)
# make all input_files parents
#for f in input_files:
# dep = dax.Dependency(parent=f.node._dax_node, child=node._dax_node)
# workflow._adag.addDependency(dep)
out_files += node.output_files
logging.info("Leaving hardware injection page setup.")
return out_files
示例14: setup_plotnumtemplates
# 需要导入模块: from pycbc.workflow.core import Node [as 别名]
# 或者: from pycbc.workflow.core.Node import add_opt [as 别名]
def setup_plotnumtemplates(workflow, input_files, cache_filename,
tmpltbank_cachepattern, output_dir, tags=[],
**kwargs):
"""
This function sets up the nodes that will generate a plot of the number
of templates against time.
Parameters
-----------
Workflow : ahope.Workflow
The ahope workflow instance that the coincidence jobs will be added to.
input_files : ahope.FileList
An FileList of files that are used as input at this stage.
cache_filename : str
Filename of the ihope cache.
tmpltbank_cachepattern : str
The pattern that will be used to find template_bank filenames in the cache.
output_dir : path
The directory in which output files will be stored.
tags : list of strings (optional, default = [])
A list of the tagging strings that will be used for all jobs created
by this call to the workflow. An example might be ['full_data'].
This will be used in output names and directories.
Returns
--------
plot_files : ahope.FileList
A list of the output files from this stage.
"""
plot_files = FileList([])
# create executable
plotnumtemplates_job = Executable(workflow.cp, 'plotnumtemplates',
'vanilla', workflow.ifos, output_dir, tags)
for tag in tags:
# create node
node = Node(plotnumtemplates_job)
node.add_opt('--gps-start-time', workflow.analysis_time[0])
node.add_opt('--gps-end-time', workflow.analysis_time[1])
node.add_opt('--cache-file', cache_filename)
node.add_opt('--ifo-times', node.executable.ifo_string)
node.add_opt('--user-tag', tag.upper()+'_SUMMARY_PLOTS')
node.add_opt('--output-path', output_dir)
node.add_opt('--bank-pattern', tmpltbank_cachepattern)
node.add_opt('--enable-output')
# add node to workflow
workflow.add_node(node)
# make all input_files parents
#for f in input_files:
# dep = dax.Dependency(parent=f.node._dax_node, child=node._dax_node)
# workflow._adag.addDependency(dep)
return plot_files