本文整理汇总了Python中ProdCommon.MCPayloads.WorkflowSpec.WorkflowSpec.pileupDatasets方法的典型用法代码示例。如果您正苦于以下问题:Python WorkflowSpec.pileupDatasets方法的具体用法?Python WorkflowSpec.pileupDatasets怎么用?Python WorkflowSpec.pileupDatasets使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类ProdCommon.MCPayloads.WorkflowSpec.WorkflowSpec
的用法示例。
在下文中一共展示了WorkflowSpec.pileupDatasets方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from ProdCommon.MCPayloads.WorkflowSpec import WorkflowSpec [as 别名]
# 或者: from ProdCommon.MCPayloads.WorkflowSpec.WorkflowSpec import pileupDatasets [as 别名]
class RequestIterator:
"""
_RequestIterator_
Working from a Generic Workflow template, generate
concrete jobs from it, keeping in-memory history
"""
def __init__(self, workflowSpecFile, workingDir):
self.workflow = workflowSpecFile
self.workingDir = workingDir
self.count = 0
self.runIncrement = 1
self.currentJob = None
self.sitePref = None
self.pileupDatasets = {}
self.ownedJobSpecs = {}
# //
# // Initially hard coded, should be extracted from Component Config
#//
self.eventsPerJob = 10
self.workflowSpec = WorkflowSpec()
try:
self.workflowSpec.load(workflowSpecFile)
except:
logging.error("ERROR Loading Workflow: %s " % (workflowSpecFile))
return
if self.workflowSpec.parameters.get("RunIncrement", None) != None:
self.runIncrement = int(
self.workflowSpec.parameters['RunIncrement']
)
self.generators = GeneratorMaker()
self.workflowSpec.payload.operate(self.generators)
# //
# // Cache Area for JobSpecs
#//
self.specCache = os.path.join(
self.workingDir,
"%s-Cache" %self.workflowSpec.workflowName())
if not os.path.exists(self.specCache):
os.makedirs(self.specCache)
def loadPileupDatasets(self):
"""
_loadPileupDatasets_
Are we dealing with pileup? If so pull in the file list
"""
puDatasets = self.workflowSpec.pileupDatasets()
if len(puDatasets) > 0:
logging.info("Found %s Pileup Datasets for Workflow: %s" % (
len(puDatasets), self.workflowSpec.workflowName(),
))
self.pileupDatasets = createPileupDatasets(self.workflowSpec)
return
def loadPileupSites(self):
"""
_loadPileupSites_
Are we dealing with pileup? If so pull in the site list
"""
sites = []
puDatasets = self.workflowSpec.pileupDatasets()
if len(puDatasets) > 0:
logging.info("Found %s Pileup Datasets for Workflow: %s" % (
len(puDatasets), self.workflowSpec.workflowName(),
))
sites = getPileupSites(self.workflowSpec)
return sites
def __call__(self):
"""
_operator()_
When called generate a new concrete job payload from the
generic workflow and return it.
"""
newJobSpec = self.createJobSpec()
self.count += self.runIncrement
return newJobSpec
def createJobSpec(self):
#.........这里部分代码省略.........
示例2: __init__
# 需要导入模块: from ProdCommon.MCPayloads.WorkflowSpec import WorkflowSpec [as 别名]
# 或者: from ProdCommon.MCPayloads.WorkflowSpec.WorkflowSpec import pileupDatasets [as 别名]
class DatasetIterator:
"""
_DatasetIterator_
Working from a Generic Workflow template, generate
concrete jobs from it, keeping in-memory history
"""
def __init__(self, workflowSpecFile, workingDir):
self.workflow = workflowSpecFile
self.workingDir = workingDir
self.currentJob = None
self.workflowSpec = WorkflowSpec()
self.workflowSpec.load(workflowSpecFile)
self.currentJobDef = None
self.count = 0
self.onlyClosedBlocks = False
if self.workflowSpec.parameters.has_key("OnlyClosedBlocks"):
onlyClosed = str(
self.workflowSpec.parameters["OnlyClosedBlocks"]).lower()
if onlyClosed == "true":
self.onlyClosedBlocks = True
self.ownedJobSpecs = {}
self.allowedBlocks = []
self.allowedSites = []
self.dbsUrl = getLocalDBSURL()
self.splitType = \
self.workflowSpec.parameters.get("SplitType", "file").lower()
self.splitSize = int(self.workflowSpec.parameters.get("SplitSize", 1))
self.generators = GeneratorMaker()
self.generators(self.workflowSpec.payload)
self.pileupDatasets = {}
# //
# // Does the workflow contain a block restriction??
#//
blockRestriction = \
self.workflowSpec.parameters.get("OnlyBlocks", None)
if blockRestriction != None:
# //
# // restriction on blocks present, populate allowedBlocks list
#//
msg = "Block restriction provided in Workflow Spec:\n"
msg += "%s\n" % blockRestriction
blockList = blockRestriction.split(",")
for block in blockList:
if len(block.strip() ) > 0:
self.allowedBlocks.append(block.strip())
# //
# // Does the workflow contain a site restriction??
#//
siteRestriction = \
self.workflowSpec.parameters.get("OnlySites", None)
if siteRestriction != None:
# //
# // restriction on sites present, populate allowedSites list
#//
msg = "Site restriction provided in Workflow Spec:\n"
msg += "%s\n" % siteRestriction
siteList = siteRestriction.split(",")
for site in siteList:
if len(site.strip() ) > 0:
self.allowedSites.append(site.strip())
# //
# // Is the DBSURL contact information provided??
#//
value = self.workflowSpec.parameters.get("DBSURL", None)
if value != None:
self.dbsUrl = value
if self.dbsUrl == None:
msg = "Error: No DBSURL available for dataset:\n"
msg += "Cant get local DBSURL and one not provided with workflow"
raise RuntimeError, msg
# //
# // Cache Area for JobSpecs
#//
self.specCache = os.path.join(
self.workingDir,
"%s-Cache" %self.workflowSpec.workflowName())
if not os.path.exists(self.specCache):
os.makedirs(self.specCache)
def __call__(self, jobDef):
"""
_operator()_
When called generate a new concrete job payload from the
generic workflow and return it.
The JobDef should be a JobDefinition with the input details
including LFNs and event ranges etc.
"""
newJobSpec = self.createJobSpec(jobDef)
#.........这里部分代码省略.........