本文整理汇总了Python中ProdCommon.MCPayloads.WorkflowSpec.WorkflowSpec类的典型用法代码示例。如果您正苦于以下问题:Python WorkflowSpec类的具体用法?Python WorkflowSpec怎么用?Python WorkflowSpec使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了WorkflowSpec类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: createCleanupWorkflowSpec
def createCleanupWorkflowSpec():
"""
_createCleanupWorkflowSpec_
Create a generic cleanup WorkflowSpec definition
that can be used to generate a sanbox for cleanup jobs
"""
timestamp = str(time.asctime(time.localtime(time.time())))
timestamp = timestamp.replace(" ", "-")
timestamp = timestamp.replace(":", "_")
workflow = WorkflowSpec()
workflow.setWorkflowName("CleanUp-%s" % timestamp)
workflow.setActivity("CleanUp")
workflow.setRequestCategory("mc-cleanup")
workflow.setRequestTimestamp(timestamp)
workflow.parameters['WorkflowType']="CleanUp"
cleanUp = workflow.payload
cleanUp.name = "cleanUp1"
cleanUp.type = "CleanUp"
cleanUp.application["Project"] = ""
cleanUp.application["Version"] = ""
cleanUp.application["Architecture"] = ""
cleanUp.application["Executable"] = "RuntimeCleanUp.py" # binary name
cleanUp.configuration = ""
cleanUp.cfgInterface = None
return workflow
示例2: load
def load(self, improvNode):
"""
_load_
Extract information for this object from the improv instance provided
"""
wfQuery = IMProvQuery("/RequestSpec/WorkflowSpec")
wfnode = wfQuery(improvNode)[0]
wfspec = WorkflowSpec()
wfspec.loadFromNode(wfnode)
self.workflow = wfspec
policyQuery = IMProvQuery("/RequestSpec/Policies/*")
detailQuery = IMProvQuery("/RequestSpec/RequestDetails/*")
preferredPAQuery = IMProvQuery("/RequestSpec/PreferredPA")
policies = policyQuery(improvNode)
details = detailQuery(improvNode)
preferredPAs = preferredPAQuery(improvNode)
for policy in policies:
self.policies[str(policy.name)] = str(policy.chardata)
for detail in improvNode.attrs.keys():
self.requestDetails[detail] = str(improvNode.attrs[detail])
for preferredPA in preferredPAs:
self.preferredPAs[str(preferredPA.attrs['id'])] = \
str(preferredPA.attrs['priority'])
return
示例3: __init__
class JobSpecExpander:
def __init__(self, jobSpecFile):
self.jobSpec = JobSpec()
self.jobSpec.load(jobSpecFile)
self.taskState = TaskState(os.getcwd())
self.taskState.loadRunResDB()
self.workflowSpec = WorkflowSpec()
self.workflowSpec.load(os.environ["PRODAGENT_WORKFLOW_SPEC"])
self.config = self.taskState.configurationDict()
finder = NodeFinder(self.taskState.taskName())
self.jobSpec.payload.operate(finder)
self.jobSpecNode = finder.result
wffinder = NodeFinder(self.taskState.taskName())
self.workflowSpec.payload.operate(wffinder)
self.workflowNode = wffinder.result
if self.jobSpecNode.jobType != "Merge":
if self.config.has_key('Configuration'):
try:
self.createPSet()
except Exception, ex:
msg = "Unable to generate cmsRun Config from JobSpec:\n"
msg += str(ex)
print msg
badfile = open("exit.status", 'w')
badfile.write("10040")
badfile.close()
else:
示例4: getCMSSoft
def getCMSSoft(work,reverse=False):
"""
opens the workflowfile and gets the CMSSoft version
if reverse, returns a map between CMSSoft version and real workflowname
"""
new_work={}
workflowSpec = WorkflowSpec()
for fil in work:
try:
workflowSpec.load(fil)
cmssw=workflowSpec.payload.application['Version']
name=workflowSpec.parameters['WorkflowName']
if reverse:
if not new_work.has_key(cmssw):
new_work[cmssw]=[]
new_work[cmssw].append(name)
else:
new_work[name]=cmssw
except:
"""
something went wrong
"""
msg="WorkflowConstraints getCMSSoft: something went wrong while handling file "+fil
print(msg)
return new_work
示例5: add
def add(self, workflowFile):
"""
_add_
Add a dataset to the list of watched datasets.
Arguments:
workflowFile -- the workflow specification file
Return:
the datasetId
"""
# read the WorkflowSpecFile
try:
wfile = WorkflowSpec()
wfile.load(workflowFile)
# wrong dataset file
except Exception, msg:
raise InvalidDataset, \
"Error loading workflow specifications from %s" % workflowFile
示例6: makeJobs
def makeJobs(self, testInstance):
"""
_makeJobs_
Create Job Specs for the test instance provided
"""
logging.info("Creating Jobs for test %s at site %s" % (
testInstance['Name'],
testInstance['Site'])
)
testName = testInstance['WorkflowSpecId']
specInstance = WorkflowSpec()
specInstance.load(testInstance['WorkflowSpecFile'])
if testInstance['InputDataset'] == None:
initialRun = self.jobCounts.get(testInstance['Name'], 1)
factory = RequestJobFactory(
specInstance,
testInstance['WorkingDir'],
testInstance['TotalEvents'],
InitialRun = initialRun,
EventsPerJob = testInstance['EventsPerJob'],
Sites = [testInstance['Site']])
jobsList = factory()
self.jobCounts[testInstance['Name']] += len(jobsList)
else:
factory = DatasetJobFactory(
specInstance,
testInstance['WorkingDir'],
specInstance.parameters['DBSURL'],
)
jobsList = factory()
self.jobCounts[testInstance['Name']] += len(jobsList)
msg = "Created %s jobs:\n" % len(jobsList)
for job in jobsList:
jobSpecFile = job['JobSpecFile']
jobSpecId = job['JobSpecId']
msg += " %s\n" % jobSpecId
testInstance['JobSpecs'][jobSpecId] = jobSpecFile
logging.info(msg)
return
示例7: GoodWorkflow
def GoodWorkflow(workflow):
"""
Check if workflow can be loaded
"""
RequestDir,firstrun = getRequestInjectorConfig()
workflowCache="%s/WorkflowCache"%RequestDir
workflowSpec = WorkflowSpec()
try:
workflowSpec.load(workflow)
except:
return False
return True
示例8: loadWorkflow
def loadWorkflow(self, specFile):
"""
_loadWorkflow_
Helper method, since every plugin will have to do
something with a workflow
"""
spec = WorkflowSpec()
try:
spec.load(specFile)
except Exception, ex:
msg = "Unable to read workflow spec file:\n%s\n" % specFile
msg += str(ex)
raise RuntimeError, msg
示例9: __init__
class FactoryInterface:
"""
_FactoryInterface_
JobSpec Factory Interface defintion & common utils for
all job spec factory generators
"""
def __init__(self, workflowSpec):
# or use isinstance(WorkflowSpec) if need to include sub classes
if workflowSpec.__class__ is WorkflowSpec:
self.workflow = workflowSpec
else:
self.workflow = WorkflowSpec()
self.workflow.load(workflowSpec)
示例10: __init__
def __init__(self, config, msgSvcRef, **workflowDetails):
self.configuration = config
self.msgSvcRef = msgSvcRef
self.workflowDetails = workflowDetails
self.workflow = workflowDetails['id']
self.workflowFile = workflowDetails['workflow_spec_file']
self.workflowSpec = WorkflowSpec()
self.workflowSpec.load(self.workflowFile)
self.doMigration = self.configuration.get("MigrateToGlobal", True)
self.doInjection = self.configuration.get("InjectToPhEDEx", True)
示例11: createLogCollectorWorkflowSpec
def createLogCollectorWorkflowSpec(wf):
"""
_createLogColectorWorkflowSpec_
Create a generic LogArchive WorkflowSpec definition
"""
timestamp = str(time.asctime(time.localtime(time.time())))
timestamp = timestamp.replace(" ", "-")
timestamp = timestamp.replace(":", "_")
workflow = WorkflowSpec()
workflow.setWorkflowName("LogCollect-%s" % timestamp)
workflow.setActivity("LogCollect")
workflow.setRequestCategory("logcollect")
workflow.setRequestTimestamp(timestamp)
workflow.parameters["WorkflowType"] = "LogCollect"
logArchive = workflow.payload
logArchive.name = "logCollect1"
logArchive.type = "LogCollect"
# TODO: remove this?
# logArchive.workflow = wf
logArchive.configuration
logArchive.application["Project"] = ""
logArchive.application["Version"] = ""
logArchive.application["Architecture"] = ""
logArchive.application["Executable"] = "RuntimeLogCollector.py" # binary name
logArchive.configuration = ""
logArchive.cfgInterface = None
# set stageOut override
# cfg = IMProvNode("config")
# stageOut = IMProvNode("StageOutParameters")
# cfg.addNode()
# WorkflowTools.addStageOutNode(logArchive, "StageOut1")
# WorkflowTools.addStageOutOverride(logArchive, stageOutParams['command'],
# stageOutParams['option'],
# stageOutParams['se-name'],
# stageOutParams['lfnPrefix'])
return workflow
示例12: createWorkflow
def createWorkflow(self, runNumber, primaryDataset,
processedDataset, dataTier):
"""
_createWorkflow_
Create a workflow for a given run and primary dataset. If the workflow
has been created previously, load it and use it.
"""
jobCache = os.path.join(self.args["ComponentDir"], "T0ASTPlugin",
"Run" + runNumber)
if not os.path.exists(jobCache):
os.makedirs(jobCache)
workflowSpecFileName = "DQMHarvest-Run%s-%s-workflow.xml" % (runNumber, primaryDataset)
workflowSpecPath = os.path.join(jobCache, workflowSpecFileName)
if os.path.exists(workflowSpecPath):
msg = "Loading existing workflow for dataset: %s\n " % primaryDataset
msg += " => %s\n" % workflowSpecPath
logging.info(msg)
workflowSpec = WorkflowSpec()
workflowSpec.load(workflowSpecPath)
return (workflowSpec, workflowSpecPath)
msg = "No workflow found for dataset: %s\n " % primaryDataset
msg += "Looking up software version and generating workflow..."
recoConfig = self.t0astWrapper.listRecoConfig(runNumber, primaryDataset)
if not recoConfig["DO_RECO"]:
logging.info("RECO disabled for dataset %s" % primaryDataset)
return (None, None)
globalTag = self.args.get("OverrideGlobalTag", None)
if globalTag == None:
globalTag = recoConfig["GLOBAL_TAG"]
cmsswVersion = self.args.get("OverrideCMSSW", None)
if cmsswVersion == None:
cmsswVersion = recoConfig["CMSSW_VERSION"]
datasetPath = "/%s/%s/%s" % (primaryDataset, processedDataset, dataTier)
workflowSpec = createHarvestingWorkflow(datasetPath, self.site,
self.args["CmsPath"],
self.args["ScramArch"],
cmsswVersion, globalTag,
configFile=self.args["ConfigFile"],
DQMServer=self.args['DQMServer'],
proxyLocation=self.args['proxyLocation'],
DQMCopyToCERN=self.args['DQMCopyToCERN'],
doStageOut=self.args['DoStageOut'])
workflowSpec.save(workflowSpecPath)
msg = "Created Harvesting Workflow:\n %s" % workflowSpecPath
logging.info(msg)
self.publishWorkflow(workflowSpecPath, workflowSpec.workflowName())
return (workflowSpec, workflowSpecPath)
示例13: makeWorkflow
def makeWorkflow(self):
"""
_makeWorkflow_
Generate a workflow. If the self.configFile parameter has been set
this will attempt to load the config from file, otherwise it will
create an empty process object which will get filled in by the runtime
script.
"""
self.timestamp = int(time.time())
self.workflow = WorkflowSpec()
self.workflowName = "AlcaSkim-Run%s-%s" % \
(self.run, self.primaryDataset)
self.workflow.setWorkflowName(self.workflowName)
self.workflow.setRequestCategory("data")
self.workflow.setRequestTimestamp(self.timestamp)
self.workflow.parameters["WorkflowType"] = "Processing"
self.workflow.parameters["ProdRequestID"] = self.run
self.workflow.parameters["RunNumber"] = self.run
self.workflow.parameters["CMSSWVersion"] = self.cmssw["CMSSWVersion"]
self.workflow.parameters["ScramArch"] = self.cmssw["ScramArch"]
self.workflow.parameters["CMSPath"] = self.cmssw["CMSPath"]
self.cmsRunNode = self.workflow.payload
self.cmsRunNode.name = "cmsRun1"
self.cmsRunNode.type = "CMSSW"
self.cmsRunNode.application["Version"] = self.cmssw["CMSSWVersion"]
self.cmsRunNode.application["Executable"] = "cmsRun"
self.cmsRunNode.application["Project"] = "CMSSW"
self.cmsRunNode.application["Architecture"] = self.cmssw["ScramArch"]
inputDataset = self.cmsRunNode.addInputDataset(self.primaryDataset,
self.parentProcessedDataset)
inputDataset["DataTier"] = "RECO"
if self.configFile == None:
self.loadProcessFromFramework()
else:
self.loadProcessFromFile()
self.setupOutputModules()
WorkflowTools.addStageOutNode(self.cmsRunNode, "stageOut1")
WorkflowTools.addLogArchNode(self.cmsRunNode, "logArchive")
WorkflowTools.generateFilenames(self.workflow)
return self.workflow
示例14: __init__
def __init__(self, jobSpecFile):
self.jobSpec = JobSpec()
self.jobSpec.load(jobSpecFile)
self.taskState = TaskState(os.getcwd())
self.taskState.loadRunResDB()
self.workflowSpec = WorkflowSpec()
self.workflowSpec.load(os.environ["PRODAGENT_WORKFLOW_SPEC"])
self.config = self.taskState.configurationDict()
finder = NodeFinder(self.taskState.taskName())
self.jobSpec.payload.operate(finder)
self.jobSpecNode = finder.result
wffinder = NodeFinder(self.taskState.taskName())
self.workflowSpec.payload.operate(wffinder)
self.workflowNode = wffinder.result
tier0Merge = self.workflowSpec.parameters.get("Tier0Merge", "False")
if self.jobSpecNode.jobType != "Merge" or tier0Merge == "True":
if self.config.has_key('Configuration'):
#try:
self.createPSet()
#except Exception, ex:
# msg = "Unable to generate cmsRun Config from JobSpec:\n"
# msg += str(ex)
# print msg
# badfile = open("exit.status", 'w')
# badfile.write("10040")
# badfile.close()
else:
# //
# // Merge job
#//
self.createMergePSet()
# do after pset created to get correct input files
self.setJobDetails()
if self.config.has_key('UserSandbox'):
self.userSandbox()
示例15: createProductionWorkflow
def createProductionWorkflow(prodName, cmsswVersion, cfgFile = None,
category = "mc", **args):
"""
_createProductionWorkflow_
Create a Production style workflow, ie generation of new events
"""
timestamp = int(time.time())
if args.get("PyCfg", None) == None:
if cfgFile == None:
msg = "Error: No Cfg File or python cfg file provided to createProductionWorkflow"
raise RuntimeError, msg
pycfgFile = createPythonConfig(cfgFile)
pycfgFileContent = file(pycfgFile).read()
else:
pycfgFileContent = args['PyCfg']
if args.get("PSetHash", None) == None:
realPSetHash = createPSetHash(cfgFile)
else:
realPSetHash = args['PSetHash']
# //
# // Create a new WorkflowSpec and set its name
#//
spec = WorkflowSpec()
workflowname = "%s__%s-%s-%s-%s"%(prodName,cmsswVersion,args.get("processingLabel","Test07"),args.get("physicsGroup","NoPhysicsGroup"),timestamp)
spec.setWorkflowName(workflowname)
spec.setRequestCategory(category)
spec.setRequestTimestamp(timestamp)
cmsRun = spec.payload
populateCMSRunNode(cmsRun, "cmsRun1", cmsswVersion, pycfgFileContent, realPSetHash,
timestamp, prodName, physicsGroup = args.get("physicsGroup", "NoPhysicsGroup"), processingLabel=args.get("processingLabel", "Test07"), fakeHash = args.get("FakeHash", False))
addStageOutNode(cmsRun, "stageOut1")
generateFilenames(spec)
return spec