本文整理汇总了Python中ProdCommon.MCPayloads.WorkflowSpec.WorkflowSpec.parameters['WorkflowType']方法的典型用法代码示例。如果您正苦于以下问题:Python WorkflowSpec.parameters['WorkflowType']方法的具体用法?Python WorkflowSpec.parameters['WorkflowType']怎么用?Python WorkflowSpec.parameters['WorkflowType']使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类ProdCommon.MCPayloads.WorkflowSpec.WorkflowSpec
的用法示例。
在下文中一共展示了WorkflowSpec.parameters['WorkflowType']方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: createCleanupWorkflowSpec
# 需要导入模块: from ProdCommon.MCPayloads.WorkflowSpec import WorkflowSpec [as 别名]
# 或者: from ProdCommon.MCPayloads.WorkflowSpec.WorkflowSpec import parameters['WorkflowType'] [as 别名]
def createCleanupWorkflowSpec():
"""
_createCleanupWorkflowSpec_
Create a generic cleanup WorkflowSpec definition
that can be used to generate a sanbox for cleanup jobs
"""
timestamp = str(time.asctime(time.localtime(time.time())))
timestamp = timestamp.replace(" ", "-")
timestamp = timestamp.replace(":", "_")
workflow = WorkflowSpec()
workflow.setWorkflowName("CleanUp-%s" % timestamp)
workflow.setActivity("CleanUp")
workflow.setRequestCategory("mc-cleanup")
workflow.setRequestTimestamp(timestamp)
workflow.parameters['WorkflowType']="CleanUp"
cleanUp = workflow.payload
cleanUp.name = "cleanUp1"
cleanUp.type = "CleanUp"
cleanUp.application["Project"] = ""
cleanUp.application["Version"] = ""
cleanUp.application["Architecture"] = ""
cleanUp.application["Executable"] = "RuntimeCleanUp.py" # binary name
cleanUp.configuration = ""
cleanUp.cfgInterface = None
return workflow
示例2: createMergeJobWorkflow
# 需要导入模块: from ProdCommon.MCPayloads.WorkflowSpec import WorkflowSpec [as 别名]
# 或者: from ProdCommon.MCPayloads.WorkflowSpec.WorkflowSpec import parameters['WorkflowType'] [as 别名]
def createMergeJobWorkflow(procSpec, isFastMerge = True, doCleanUp = True, littleE = False):
"""
_createMergeJobWorkflow_
Given a Processing Workflow, generate a set of Merge Job
workflows that can be used to generate actual merge jobs
(as opposed to creating datasets like createMergeDatasetWorkflow)
returns a dictionary of (input, IE MergeSensor watched) dataset name
to workflow spec instances
"""
mergeDatasetWF = createMergeDatasetWorkflow(procSpec, isFastMerge)
mergeDatasets = mergeDatasetWF.outputDatasets()
results = {}
procSpecName = procSpec.workflowName()
for dataset in mergeDatasets:
inputDataset = dataset['ParentDataset']
newWF = WorkflowSpec()
newWF.parameters.update(procSpec.parameters)
newWF.setWorkflowName(procSpecName)
newWF.parameters['WorkflowType'] = "Merge"
cmsRunNode = newWF.payload
cmsRunNode.name = "cmsRun1"
cmsRunNode.type = "CMSSW"
cmsRunNode.application["Project"] = "CMSSW"
cmsRunNode.application["Version"] = dataset['ApplicationVersion']
cmsRunNode.application["Architecture"] = "slc3_ia32_gcc323"
# //
# // Hack to forward UserSandbox to Merge Jobs
#//
userSandbox = dataset.get("UserSandbox", None)
if userSandbox != None:
cmsRunNode.userSandbox = userSandbox
#if isFastMerge == True:
# if littleE:
# cmsRunNode.application["Executable"] = "edmFastMerge"
# else:
# cmsRunNode.application["Executable"] = _FastMergeBinary
# outputModuleName = "EdmFastMerge"
#else:
cmsRunNode.application["Executable"] = "cmsRun"
outputModuleName = "Merged"
# //
# // Input Dataset
#//
datasetBits = DatasetConventions.parseDatasetPath(inputDataset)
inDataset = cmsRunNode.addInputDataset(datasetBits['Primary'],
datasetBits['Processed'])
inDataset["DataTier"] = datasetBits['DataTier']
# //
# // Output Dataset
#//
outputDataset = cmsRunNode.addOutputDataset(
dataset['PrimaryDataset'],
dataset['ProcessedDataset'],
outputModuleName)
outputDataset["DataTier"] = dataset['DataTier']
outputDataset["PSetHash"] = dataset['PSetHash']
outputDataset["ApplicationName"] = \
cmsRunNode.application["Executable"]
outputDataset["ApplicationProject"] = \
cmsRunNode.application["Project"]
outputDataset["ApplicationVersion"] = \
cmsRunNode.application["Version"]
outputDataset["ApplicationFamily"] = outputModuleName
outputDataset["PhysicsGroup"] = \
procSpec.parameters.get('PhysicsGroup', None)
outputDataset['ParentDataset'] = inputDataset
# //
# // Add Stage Out node
#//
WorkflowTools.addStageOutNode(cmsRunNode, "stageOut1")
if doCleanUp == True:
WorkflowTools.addCleanUpNode(cmsRunNode, "cleanUp1")
# //
# // Add log archive node
#//
WorkflowTools.addLogArchNode(cmsRunNode, "logArchive")
WorkflowTools.generateFilenames(newWF)
#.........这里部分代码省略.........