本文整理汇总了Python中WMCore.WMSpec.StdSpecs.TaskChain.TaskChainWorkloadFactory类的典型用法代码示例。如果您正苦于以下问题:Python TaskChainWorkloadFactory类的具体用法?Python TaskChainWorkloadFactory怎么用?Python TaskChainWorkloadFactory使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了TaskChainWorkloadFactory类的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: testPileupFetcherOnMC
def testPileupFetcherOnMC(self):
pileupMcArgs = TaskChainWorkloadFactory.getTestArguments()
pileupMcArgs['Task1']["MCPileup"] = "/Cosmics/ComissioningHI-PromptReco-v1/RECO"
pileupMcArgs['Task1']["DataPileup"] = "/HighPileUp/Run2011A-v1/RAW"
pileupMcArgs['Task1']["ConfigCacheID"] = self.injectGenerationConfig()
pileupMcArgs["CouchDBName"] = "pileupfetcher_t"
pileupMcArgs["CouchURL"] = os.environ["COUCHURL"]
factory = TaskChainWorkloadFactory()
testWorkload = factory.factoryWorkloadConstruction("TestWorkload", pileupMcArgs)
# now that the workload was created and args validated, we can add this PileupConfig
pileupMcArgs["PileupConfig"] = parsePileupConfig(pileupMcArgs['Task1']["MCPileup"],
pileupMcArgs['Task1']["DataPileup"])
# Since this is test of the fetcher - The loading from WMBS isn't
# really necessary because the fetching happens before the workflow
# is inserted into WMBS: feed the workload instance directly into fetcher:
fetcher = PileupFetcher()
creator = SandboxCreator()
pathBase = "%s/%s" % (self.testDir, testWorkload.name())
for topLevelTask in testWorkload.taskIterator():
for taskNode in topLevelTask.nodeIterator():
# this is how the call to PileupFetcher is happening
# from the SandboxCreator test
task = WMTask.WMTaskHelper(taskNode)
taskPath = "%s/WMSandbox/%s" % (pathBase, task.name())
fetcher.setWorkingDirectory(taskPath)
# create Sandbox for the fetcher ...
creator._makePathonPackage(taskPath)
fetcher(task)
self._queryPileUpConfigFile(pileupMcArgs, task, taskPath)
示例2: testMultithreadedTaskChain
def testMultithreadedTaskChain(self):
"""
Test for multithreaded task chains where all steps run with the same
number of cores
"""
arguments = self.buildMultithreadedTaskChain(self.differentNCores)
arguments['Task1']['Multicore'] = 4
arguments['Task2']['Multicore'] = 4
arguments['Task3']['Multicore'] = 4
factory = TaskChainWorkloadFactory()
try:
self.workload = factory.factoryWorkloadConstruction("MultiChain", arguments)
except Exception as ex:
msg = "Error invoking TaskChainWorkloadFactory:\n%s" % str(ex)
self.fail(msg)
hlt = self.workload.getTaskByPath('/MultiChain/HLTD')
reco = self.workload.getTaskByPath('/MultiChain/HLTD/HLTDMergewriteRAWDIGI/RECODreHLT')
miniAOD = self.workload.getTaskByPath('/MultiChain/HLTD/HLTDMergewriteRAWDIGI/RECODreHLT/RECODreHLTMergewriteALCA/MINIAODDreHLT')
hltStep = hlt.getStepHelper("cmsRun1")
recoStep = reco.getStepHelper("cmsRun1")
miniAODStep = miniAOD.getStepHelper("cmsRun1")
self.assertEqual(hltStep.getNumberOfCores(), 4)
self.assertEqual(recoStep.getNumberOfCores(), 4)
self.assertEqual(miniAODStep.getNumberOfCores(), 4)
return
示例3: createMCWMSpec
def createMCWMSpec(self, name='MonteCarloWorkload'):
mcArgs = TaskChainWorkloadFactory.getTestArguments()
mcArgs["CouchDBName"] = rerecoArgs["CouchDBName"]
mcArgs["Task1"]["ConfigCacheID"] = createConfig(mcArgs["CouchDBName"])
wmspec = taskChainWorkload(name, mcArgs)
wmspec.setSpecUrl("/path/to/workload")
getFirstTask(wmspec).addProduction(totalevents=10000)
return wmspec
示例4: getProdArgs
def getProdArgs():
mcArgs = TaskChainWorkloadFactory.getTestArguments()
mcArgs.update({
"CouchURL": None,
"CouchDBName": None,
"ConfigCacheDoc" : None
})
mcArgs.pop('ConfigCacheDoc')
return mcArgs
示例5: testMultithreadedTasksTaskChain
def testMultithreadedTasksTaskChain(self):
"""
Test for multithreaded task chains where each step
may run with a different number of cores
"""
arguments = self.buildMultithreadedTaskChain(self.differentNCores)
factory = TaskChainWorkloadFactory()
try:
self.workload = factory.factoryWorkloadConstruction("MultiChain2", arguments)
except Exception as ex:
msg = "Error invoking TaskChainWorkloadFactory:\n%s" % str(ex)
self.fail(msg)
hlt = self.workload.getTaskByPath("/MultiChain2/HLTD")
reco = self.workload.getTaskByPath("/MultiChain2/HLTD/HLTDMergewriteRAWDIGI/RECODreHLT")
miniAOD = self.workload.getTaskByPath(
"/MultiChain2/HLTD/HLTDMergewriteRAWDIGI/RECODreHLT/RECODreHLTMergewriteALCA/MINIAODDreHLT"
)
hltMemory = hlt.jobSplittingParameters()["performance"]["memoryRequirement"]
recoMemory = reco.jobSplittingParameters()["performance"]["memoryRequirement"]
aodMemory = miniAOD.jobSplittingParameters()["performance"]["memoryRequirement"]
hltStep = hlt.getStepHelper("cmsRun1")
recoStep = reco.getStepHelper("cmsRun1")
miniAODStep = miniAOD.getStepHelper("cmsRun1")
self.assertEqual(hltStep.getNumberOfCores(), 4)
self.assertEqual(recoStep.getNumberOfCores(), 8)
self.assertEqual(miniAODStep.getNumberOfCores(), 1)
self.assertEqual(recoMemory, 3200.0)
self.assertEqual(aodMemory, 2000.0)
self.assertEqual(hltMemory, 2400.0)
return
示例6: buildMultithreadedTaskChain
def buildMultithreadedTaskChain(self, filename):
""" d
Build a TaskChain from several sources and customization
"""
processorDocs = makeProcessingConfigs(self.configDatabase)
testArguments = TaskChainWorkloadFactory.getTestArguments()
# Read in the request
request = json.load(open(filename))
# Construct args from the pieces starting with test args ...
arguments = testArguments
# ... continuing with the request
for key in [
"CMSSWVersion",
"ScramArch",
"GlobalTag",
"ProcessingVersion",
"Multicore",
"Memory",
"TaskChain",
"Task1",
"Task2",
"Task3",
]:
arguments.update({key: request["createRequest"][key]})
for key in ["SiteBlacklist"]:
arguments.update({key: request["assignRequest"][key]})
# ... then some local overrides
del arguments["ConfigCacheID"]
del arguments["ConfigCacheUrl"]
arguments.update({"CouchURL": self.testInit.couchUrl, "CouchDBName": self.testInit.couchDbName})
# ... now fill in the ConfigCache documents created and override the inputs to link them up
arguments["Task1"]["ConfigCacheID"] = processorDocs["DigiHLT"]
arguments["Task2"]["ConfigCacheID"] = processorDocs["Reco"]
arguments["Task2"]["InputFromOutputModule"] = "writeRAWDIGI"
arguments["Task3"]["ConfigCacheID"] = processorDocs["ALCAReco"]
arguments["Task3"]["InputFromOutputModule"] = "writeALCA"
return arguments
示例7: buildMultithreadedTaskChain
def buildMultithreadedTaskChain(self, filename):
""" d
Build a TaskChain from several sources and customization
"""
processorDocs = makeProcessingConfigs(self.configDatabase)
testArguments = TaskChainWorkloadFactory.getTestArguments()
# Read in the request
request = json.load(open(filename))
# Construct args from the pieces starting with test args ...
arguments = testArguments
# ... continuing with the request
for key in ['CMSSWVersion', 'ScramArch', 'GlobalTag', 'ProcessingVersion',
'Multicore', 'Memory',
'TaskChain', 'Task1', 'Task2', 'Task3']:
arguments.update({key : request['createRequest'][key]})
for key in ['SiteBlacklist']:
arguments.update({key : request['assignRequest'][key]})
# ... then some local overrides
del arguments['ConfigCacheID']
del arguments['ConfigCacheUrl']
arguments.update({
"CouchURL": self.testInit.couchUrl,
"CouchDBName": self.testInit.couchDbName,
})
# ... now fill in the ConfigCache documents created and override the inputs to link them up
arguments['Task1']['ConfigCacheID'] = processorDocs['DigiHLT']
arguments['Task2']['ConfigCacheID'] = processorDocs['Reco']
arguments['Task2']['InputFromOutputModule'] = 'writeRAWDIGI'
arguments['Task3']['ConfigCacheID'] = processorDocs['ALCAReco']
arguments['Task3']['InputFromOutputModule'] = 'writeALCA'
return arguments
示例8: testPileupTaskChain
def testPileupTaskChain(self):
"""
Test for multithreaded task chains where each step
may run with a different number of cores
"""
processorDocs = makeProcessingConfigs(self.configDatabase)
testArguments = TaskChainWorkloadFactory.getTestArguments()
arguments = {
"AcquisitionEra": "ReleaseValidation",
"Requestor": "[email protected]",
"CMSSWVersion": "CMSSW_3_5_8",
"ScramArch": "slc5_ia32_gcc434",
"ProcessingVersion": 1,
"GlobalTag": "GR10_P_v4::All",
"CouchURL": self.testInit.couchUrl,
"CouchDBName": self.testInit.couchDbName,
"SiteWhitelist" : ["T1_CH_CERN", "T1_US_FNAL"],
"DashboardHost": "127.0.0.1",
"DashboardPort": 8884,
"TaskChain" : 2,
"Task1" :{
"InputDataset" : "/cosmics/whatever-input-v1/GEN-SIM",
"TaskName" : "DIGI",
"ConfigCacheID" : processorDocs['DigiHLT'],
"SplittingAlgo" : "LumiBased",
"LumisPerJob": 4,
"MCPileup": "/some/cosmics-mc-v1/GEN-SIM",
"DeterministicPileup": True,
"CMSSWVersion" : "CMSSW_5_2_6",
"GlobalTag" : "GR_39_P_V5:All",
"PrimaryDataset" : "PURelValTTBar",
"AcquisitionEra": "CMSSW_5_2_6",
"ProcessingString": "ProcStr_Task1"
},
"Task2" : {
"TaskName" : "RECO",
"InputTask" : "DIGI",
"InputFromOutputModule" : "writeRAWDIGI",
"ConfigCacheID" : processorDocs['Reco'],
"DataPileup": "/some/minbias-data-v1/GEN-SIM",
"SplittingAlgo" : "LumiBased",
"LumisPerJob": 2,
"GlobalTag": "GR_R_62_V3::All",
"AcquisitionEra": "CMSSW_5_2_7",
"ProcessingString": "ProcStr_Task2"
},
}
testArguments.update(arguments)
arguments = testArguments
factory = TaskChainWorkloadFactory()
self.workload = factory.factoryWorkloadConstruction("PullingTheChain", arguments)
firstTask = self.workload.getTaskByPath("/PullingTheChain/DIGI")
cmsRunStep = firstTask.getStep("cmsRun1").getTypeHelper()
pileupData = cmsRunStep.getPileup()
self.assertFalse(hasattr(pileupData, "data"))
self.assertEqual(pileupData.mc.dataset, ["/some/cosmics-mc-v1/GEN-SIM"])
splitting = firstTask.jobSplittingParameters()
self.assertTrue(splitting["deterministicPileup"])
secondTask = self.workload.getTaskByPath("/PullingTheChain/DIGI/DIGIMergewriteRAWDIGI/RECO")
cmsRunStep = secondTask.getStep("cmsRun1").getTypeHelper()
pileupData = cmsRunStep.getPileup()
self.assertFalse(hasattr(pileupData, "mc"))
self.assertEqual(pileupData.data.dataset, ["/some/minbias-data-v1/GEN-SIM"])
splitting = secondTask.jobSplittingParameters()
self.assertFalse(splitting["deterministicPileup"])
示例9: __call__
def __call__(self, workflowName, args):
workload = TaskChainWorkloadFactory.__call__(self, workflowName, args)
#delattr(workload.taskIterator().next().steps().data.application.configuration,
# 'configCacheUrl')
return workload
示例10: testMultipleGlobalTags
def testMultipleGlobalTags(self):
"""
_testMultipleGlobalTags_
Test creating a workload that starts in a processing task
with an input dataset, and has different globalTags
and CMSSW versions (with corresponding scramArch) in
each task
"""
processorDocs = makeProcessingConfigs(self.configDatabase)
testArguments = TaskChainWorkloadFactory.getTestArguments()
lumiDict = {"1":[[2,4], [8,50]], "2":[[100,200], [210,210]]}
lumiDict2 = {"1":[[2,4], [8,40]], "2":[[100,150], [210,210]]}
arguments = {
"AcquisitionEra": "ReleaseValidation",
"Requestor": "[email protected]",
"CMSSWVersion": "CMSSW_3_5_8",
"ScramArch": "slc5_ia32_gcc434",
"ProcessingVersion": 1,
"GlobalTag": "DefaultGlobalTag",
"LumiList": lumiDict,
"CouchURL": self.testInit.couchUrl,
"CouchDBName": self.testInit.couchDbName,
"SiteWhitelist" : ["T1_CH_CERN", "T1_US_FNAL"],
"DashboardHost": "127.0.0.1",
"DashboardPort": 8884,
"TaskChain" : 4,
"Task1" :{
"TaskName" : "DigiHLT",
"ConfigCacheID" : processorDocs['DigiHLT'],
"InputDataset" : "/MinimumBias/Commissioning10-v4/GEN-SIM",
"SplittingAlgo" : "FileBased",
},
"Task2" : {
"TaskName" : "Reco",
"InputTask" : "DigiHLT",
"InputFromOutputModule" : "writeRAWDIGI",
"ConfigCacheID" : processorDocs['Reco'],
"SplittingAlgo" : "FileBased",
"GlobalTag" : "GlobalTagForReco",
"CMSSWVersion" : "CMSSW_3_1_2",
"ScramArch" : "CompatibleRECOArch",
"PrimaryDataset" : "ZeroBias",
"LumiList": lumiDict2,
},
"Task3" : {
"TaskName" : "ALCAReco",
"InputTask" : "Reco",
"InputFromOutputModule" : "writeALCA",
"ConfigCacheID" : processorDocs['ALCAReco'],
"SplittingAlgo" : "FileBased",
"GlobalTag" : "GlobalTagForALCAReco",
"CMSSWVersion" : "CMSSW_3_1_3",
"ScramArch" : "CompatibleALCAArch",
},
"Task4" : {
"TaskName" : "Skims",
"InputTask" : "Reco",
"InputFromOutputModule" : "writeRECO",
"ConfigCacheID" : processorDocs['Skims'],
"SplittingAlgo" : "FileBased",
}
}
testArguments.update(arguments)
arguments = testArguments
factory = TaskChainWorkloadFactory()
try:
self.workload = factory.factoryWorkloadConstruction("YankingTheChain", arguments)
except Exception as ex:
msg = "Error invoking TaskChainWorkloadFactory:\n%s" % str(ex)
self.fail(msg)
testWMBSHelper = WMBSHelper(self.workload, "DigiHLT", "SomeBlock", cachepath = self.testInit.testDir)
testWMBSHelper.createTopLevelFileset()
testWMBSHelper._createSubscriptionsInWMBS(testWMBSHelper.topLevelTask, testWMBSHelper.topLevelFileset)
self._checkTask(self.workload.getTaskByPath("/YankingTheChain/DigiHLT"), arguments['Task1'], arguments)
self._checkTask(self.workload.getTaskByPath("/YankingTheChain/DigiHLT/DigiHLTMergewriteRAWDIGI/Reco"), arguments['Task2'],
arguments)
self._checkTask(self.workload.getTaskByPath("/YankingTheChain/DigiHLT/DigiHLTMergewriteRAWDIGI/Reco/RecoMergewriteALCA/ALCAReco"),
arguments['Task3'], arguments)
self._checkTask(self.workload.getTaskByPath("/YankingTheChain/DigiHLT/DigiHLTMergewriteRAWDIGI/Reco/RecoMergewriteRECO/Skims"),
arguments['Task4'], arguments)
digi = self.workload.getTaskByPath("/YankingTheChain/DigiHLT")
self.assertEqual(lumiDict, digi.getLumiMask())
digiStep = digi.getStepHelper("cmsRun1")
self.assertEqual(digiStep.getGlobalTag(), arguments['GlobalTag'])
self.assertEqual(digiStep.getCMSSWVersion(), arguments['CMSSWVersion'])
self.assertEqual(digiStep.getScramArch(), arguments['ScramArch'])
# Make sure this task has a different lumilist than the global one
reco = self.workload.getTaskByPath("/YankingTheChain/DigiHLT/DigiHLTMergewriteRAWDIGI/Reco")
self.assertEqual(lumiDict2, reco.getLumiMask())
recoStep = reco.getStepHelper("cmsRun1")
self.assertEqual(recoStep.getGlobalTag(), arguments['Task2']['GlobalTag'])
self.assertEqual(recoStep.getCMSSWVersion(), arguments['Task2']['CMSSWVersion'])
#.........这里部分代码省略.........
示例11: testGeneratorWorkflow
def testGeneratorWorkflow(self):
"""
_testGeneratorWorkflow_
Test creating a request with an initial generator task
it mocks a request where there are 2 similar paths starting
from the generator, each one with a different PrimaryDataset, CMSSW configuration
and processed dataset. Dropping the RAW output as well.
Also include an ignored output module to keep things interesting...
"""
generatorDoc = makeGeneratorConfig(self.configDatabase)
processorDocs = makeProcessingConfigs(self.configDatabase)
testArguments = TaskChainWorkloadFactory.getTestArguments()
arguments = {
"AcquisitionEra": "ReleaseValidation",
"Requestor": "[email protected]",
"CMSSWVersion": "CMSSW_3_5_8",
"ScramArch": "slc5_ia32_gcc434",
"ProcessingVersion": 1,
"GlobalTag": "GR10_P_v4::All",
"CouchURL": self.testInit.couchUrl,
"CouchDBName": self.testInit.couchDbName,
"SiteWhitelist" : ["T1_CH_CERN", "T1_US_FNAL"],
"DashboardHost": "127.0.0.1",
"DashboardPort": 8884,
"TaskChain" : 6,
"IgnoredOutputModules" : ["writeSkim2", "writeRAWDEBUGDIGI"],
"Task1" :{
"TaskName" : "GenSim",
"ConfigCacheID" : generatorDoc,
"SplittingAlgo" : "EventBased",
"RequestNumEvents" : 10000,
"Seeding" : "AutomaticSeeding",
"PrimaryDataset" : "RelValTTBar",
},
"Task2" : {
"TaskName" : "DigiHLT_new",
"InputTask" : "GenSim",
"InputFromOutputModule" : "writeGENSIM",
"ConfigCacheID" : processorDocs['DigiHLT'],
"SplittingAlgo" : "LumiBased",
"CMSSWVersion" : "CMSSW_5_2_6",
"GlobalTag" : "GR_39_P_V5:All",
"PrimaryDataset" : "PURelValTTBar",
"KeepOutput" : False
},
"Task3" : {
"TaskName" : "DigiHLT_ref",
"InputTask" : "GenSim",
"InputFromOutputModule" : "writeGENSIM",
"ConfigCacheID" : processorDocs['DigiHLT'],
"SplittingAlgo" : "EventBased",
"CMSSWVersion" : "CMSSW_5_2_7",
"GlobalTag" : "GR_40_P_V5:All",
"AcquisitionEra" : "ReleaseValidationNewConditions",
"ProcessingVersion" : 3,
"ProcessingString" : "Test",
"KeepOutput" : False
},
"Task4" : {
"TaskName" : "Reco",
"InputTask" : "DigiHLT_new",
"InputFromOutputModule" : "writeRAWDIGI",
"ConfigCacheID" : processorDocs['Reco'],
"SplittingAlgo" : "FileBased",
"TransientOutputModules" : ["writeRECO"]
},
"Task5" : {
"TaskName" : "ALCAReco",
"InputTask" : "DigiHLT_ref",
"InputFromOutputModule" : "writeRAWDIGI",
"ConfigCacheID" : processorDocs['ALCAReco'],
"SplittingAlgo" : "LumiBased",
},
"Task6" : {
"TaskName" : "Skims",
"InputTask" : "Reco",
"InputFromOutputModule" : "writeRECO",
"ConfigCacheID" : processorDocs['Skims'],
"SplittingAlgo" : "LumiBased",
}
}
testArguments.update(arguments)
arguments = testArguments
factory = TaskChainWorkloadFactory()
# Test a malformed task chain definition
arguments['Task4']['TransientOutputModules'].append('writeAOD')
self.assertRaises(WMSpecFactoryException, factory.validateSchema, arguments)
arguments['Task4']['TransientOutputModules'].remove('writeAOD')
try:
self.workload = factory.factoryWorkloadConstruction("PullingTheChain", arguments)
except Exception as ex:
msg = "Error invoking TaskChainWorkloadFactory:\n%s" % str(ex)
import traceback
#.........这里部分代码省略.........
示例12: testMultipleGlobalTags
def testMultipleGlobalTags(self):
"""
_testMultipleGlobalTags_
Test creating a workload that starts in a processing task
with an input dataset, and has different globalTags
and CMSSW versions (with corresponding scramArch) in
each task
"""
processorDocs = makeProcessingConfigs(self.configDatabase)
testArguments = TaskChainWorkloadFactory.getTestArguments()
lumiDict = {"1":[[2,4], [8,50]], "2":[[100,200], [210,210]]}
lumiDict2 = {"1":[[2,4], [8,40]], "2":[[100,150], [210,210]]}
arguments = {
"AcquisitionEra": "ReleaseValidation",
"Requestor": "[email protected]",
"CMSSWVersion": "CMSSW_3_5_8",
"ScramArch": "slc5_ia32_gcc434",
"ProcessingVersion": 1,
"GlobalTag": "DefaultGlobalTag",
"LumiList": lumiDict,
"CouchURL": self.testInit.couchUrl,
"CouchDBName": self.testInit.couchDbName,
"SiteWhitelist" : ["T1_CH_CERN", "T1_US_FNAL"],
"DashboardHost": "127.0.0.1",
"DashboardPort": 8884,
"TaskChain" : 4,
"Task1" :{
"TaskName" : "DigiHLT",
"ConfigCacheID" : processorDocs['DigiHLT'],
"InputDataset" : "/MinimumBias/Commissioning10-v4/GEN-SIM",
"SplittingAlgo" : "FileBased",
},
"Task2" : {
"TaskName" : "Reco",
"InputTask" : "DigiHLT",
"InputFromOutputModule" : "writeRAWDIGI",
"ConfigCacheID" : processorDocs['Reco'],
"SplittingAlgo" : "FileBased",
"GlobalTag" : "GlobalTagForReco",
"CMSSWVersion" : "CMSSW_3_1_2",
"ScramArch" : "CompatibleRECOArch",
"PrimaryDataset" : "ZeroBias",
"LumiList": lumiDict2,
},
"Task3" : {
"TaskName" : "ALCAReco",
"InputTask" : "Reco",
"InputFromOutputModule" : "writeALCA",
"ConfigCacheID" : processorDocs['ALCAReco'],
"SplittingAlgo" : "FileBased",
"GlobalTag" : "GlobalTagForALCAReco",
"CMSSWVersion" : "CMSSW_3_1_3",
"ScramArch" : "CompatibleALCAArch",
},
"Task4" : {
"TaskName" : "Skims",
"InputTask" : "Reco",
"InputFromOutputModule" : "writeRECO",
"ConfigCacheID" : processorDocs['Skims'],
"SplittingAlgo" : "FileBased",
}
}
testArguments.update(arguments)
arguments = testArguments
factory = TaskChainWorkloadFactory()
try:
self.workload = factory.factoryWorkloadConstruction("YankingTheChain", arguments)
except Exception, ex:
msg = "Error invoking TaskChainWorkloadFactory:\n%s" % str(ex)
self.fail(msg)
示例13: testGeneratorWorkflow
def testGeneratorWorkflow(self):
"""
_testGeneratorWorkflow_
Test creating a request with an initial generator task
it mocks a request where there are 2 similar paths starting
from the generator, each one with a different PrimaryDataset, CMSSW configuration
and processed dataset. Dropping the RAW output as well.
Also include an ignored output module to keep things interesting...
"""
generatorDoc = makeGeneratorConfig(self.configDatabase)
processorDocs = makeProcessingConfigs(self.configDatabase)
arguments = {
"AcquisitionEra": "ReleaseValidation",
"Requestor": "[email protected]",
"CMSSWVersion": "CMSSW_3_5_8",
"ScramArch": "slc5_ia32_gcc434",
"ProcessingVersion": 1,
"GlobalTag": "GR10_P_v4::All",
"CouchURL": self.testInit.couchUrl,
"CouchDBName": self.testInit.couchDbName,
"SiteWhitelist" : ["T1_CH_CERN", "T1_US_FNAL"],
"DashboardHost": "127.0.0.1",
"DashboardPort": 8884,
"TaskChain" : 6,
"IgnoredOutputModules" : ["writeSkim2", "writeRAWDEBUGDIGI"],
"Task1" :{
"TaskName" : "GenSim",
"ConfigCacheID" : generatorDoc,
"SplittingAlgorithm" : "EventBased",
"SplittingArguments" : {"events_per_job" : 250},
"RequestNumEvents" : 10000,
"Seeding" : "Automatic",
"PrimaryDataset" : "RelValTTBar",
},
"Task2" : {
"TaskName" : "DigiHLT_new",
"InputTask" : "GenSim",
"InputFromOutputModule" : "writeGENSIM",
"ConfigCacheID" : processorDocs['DigiHLT'],
"SplittingAlgorithm" : "LumiBased",
"SplittingArguments" : {"lumis_per_job" : 2 },
"CMSSWVersion" : "CMSSW_5_2_6",
"GlobalTag" : "GR_39_P_V5:All",
"PrimaryDataset" : "PURelValTTBar",
"KeepOutput" : False
},
"Task3" : {
"TaskName" : "DigiHLT_ref",
"InputTask" : "GenSim",
"InputFromOutputModule" : "writeGENSIM",
"ConfigCacheID" : processorDocs['DigiHLT'],
"SplittingAlgorithm" : "EventBased",
"SplittingArguments" : {"events_per_job" : 100 },
"CMSSWVersion" : "CMSSW_5_2_7",
"GlobalTag" : "GR_40_P_V5:All",
"AcquisitionEra" : "ReleaseValidationNewConditions",
"ProcessingVersion" : 3,
"ProcessingString" : "Test",
"KeepOutput" : False
},
"Task4" : {
"TaskName" : "Reco",
"InputTask" : "DigiHLT_new",
"InputFromOutputModule" : "writeRAWDIGI",
"ConfigCacheID" : processorDocs['Reco'],
"SplittingAlgorithm" : "FileBased",
"SplittingArguments" : {"files_per_job" : 1 },
"TransientOutputModules" : ["writeRECO"]
},
"Task5" : {
"TaskName" : "ALCAReco",
"InputTask" : "DigiHLT_ref",
"InputFromOutputModule" : "writeRAWDIGI",
"ConfigCacheID" : processorDocs['ALCAReco'],
"SplittingAlgorithm" : "LumiBased",
"SplittingArguments" : {"lumis_per_job" : 8 },
},
"Task6" : {
"TaskName" : "Skims",
"InputTask" : "Reco",
"InputFromOutputModule" : "writeRECO",
"ConfigCacheID" : processorDocs['Skims'],
"SplittingAlgorithm" : "LumiBased",
"SplittingArguments" : {"lumis_per_job" : 10 },
}
}
factory = TaskChainWorkloadFactory()
# Test a malformed task chain definition
arguments['Task4']['TransientOutputModules'].append('writeAOD')
self.assertRaises(WMSpecFactoryException, factory.validateSchema, arguments)
arguments['Task4']['TransientOutputModules'].remove('writeAOD')
try:
#.........这里部分代码省略.........
示例14: dicts
from __future__ import division, print_function
import unittest
from copy import deepcopy
from WMCore.ReqMgr.DataStructs.Request import initialize_clone
from WMCore.WMSpec.StdSpecs.ReReco import ReRecoWorkloadFactory
from WMCore.WMSpec.StdSpecs.StepChain import StepChainWorkloadFactory
from WMCore.WMSpec.StdSpecs.TaskChain import TaskChainWorkloadFactory
from WMCore.WMSpec.WMSpecErrors import WMSpecFactoryException
### Spec arguments definition with only key and its default value
RERECO_ARGS = ReRecoWorkloadFactory.getWorkloadCreateArgs()
STEPCHAIN_ARGS = StepChainWorkloadFactory.getWorkloadCreateArgs()
TASKCHAIN_ARGS = TaskChainWorkloadFactory.getWorkloadCreateArgs()
# inner Task/Step definition
STEP_ARGS = StepChainWorkloadFactory.getChainCreateArgs()
TASK_ARGS = TaskChainWorkloadFactory.getChainCreateArgs()
### Some original request dicts (ones to be cloned from)
rerecoOriginalArgs = {'Memory': 234, 'SkimName1': 'skim_2017', 'SkimInput1': 'RECOoutput',
'Skim1ConfigCacheID': 'abcdef',
'TimePerEvent': 1.2, 'RequestType': 'ReReco', 'RequestName': 'test_rereco'}
stepChainOriginalArgs = {'Memory': 1234, 'TimePerEvent': 1.2, 'RequestType': 'StepChain',
"ScramArch": ["slc6_amd64_gcc481"], "RequestName": "test_stepchain",
"Step1": {"ConfigCacheID": "blah", "GlobalTag": "PHYS18", "BlockWhitelist": ["A", "B"]},
"Step2": {"AcquisitionEra": "ACQERA", "ProcessingVersion": 3, "LumisPerJob": 10},
"StepChain": 2, "ConfigCacheID": None}
taskChainOriginalArgs = {'PrepID': None, 'Campaign': "MainTask", 'RequestType': 'TaskChain',
"ScramArch": ["slc6_amd64_gcc481", "slc7_amd64_gcc630"], "RequestName": "test_taskchain",