本文整理匯總了Python中ProdCommon.MCPayloads.WorkflowSpec.WorkflowSpec.pileupDatasets方法的典型用法代碼示例。如果您正苦於以下問題:Python WorkflowSpec.pileupDatasets方法的具體用法?Python WorkflowSpec.pileupDatasets怎麽用?Python WorkflowSpec.pileupDatasets使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類ProdCommon.MCPayloads.WorkflowSpec.WorkflowSpec
的用法示例。
在下文中一共展示了WorkflowSpec.pileupDatasets方法的2個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: __init__
# 需要導入模塊: from ProdCommon.MCPayloads.WorkflowSpec import WorkflowSpec [as 別名]
# 或者: from ProdCommon.MCPayloads.WorkflowSpec.WorkflowSpec import pileupDatasets [as 別名]
class RequestIterator:
"""
_RequestIterator_
Working from a Generic Workflow template, generate
concrete jobs from it, keeping in-memory history
"""
def __init__(self, workflowSpecFile, workingDir):
self.workflow = workflowSpecFile
self.workingDir = workingDir
self.count = 0
self.runIncrement = 1
self.currentJob = None
self.sitePref = None
self.pileupDatasets = {}
self.ownedJobSpecs = {}
# //
# // Initially hard coded, should be extracted from Component Config
#//
self.eventsPerJob = 10
self.workflowSpec = WorkflowSpec()
try:
self.workflowSpec.load(workflowSpecFile)
except:
logging.error("ERROR Loading Workflow: %s " % (workflowSpecFile))
return
if self.workflowSpec.parameters.get("RunIncrement", None) != None:
self.runIncrement = int(
self.workflowSpec.parameters['RunIncrement']
)
self.generators = GeneratorMaker()
self.workflowSpec.payload.operate(self.generators)
# //
# // Cache Area for JobSpecs
#//
self.specCache = os.path.join(
self.workingDir,
"%s-Cache" %self.workflowSpec.workflowName())
if not os.path.exists(self.specCache):
os.makedirs(self.specCache)
def loadPileupDatasets(self):
"""
_loadPileupDatasets_
Are we dealing with pileup? If so pull in the file list
"""
puDatasets = self.workflowSpec.pileupDatasets()
if len(puDatasets) > 0:
logging.info("Found %s Pileup Datasets for Workflow: %s" % (
len(puDatasets), self.workflowSpec.workflowName(),
))
self.pileupDatasets = createPileupDatasets(self.workflowSpec)
return
def loadPileupSites(self):
"""
_loadPileupSites_
Are we dealing with pileup? If so pull in the site list
"""
sites = []
puDatasets = self.workflowSpec.pileupDatasets()
if len(puDatasets) > 0:
logging.info("Found %s Pileup Datasets for Workflow: %s" % (
len(puDatasets), self.workflowSpec.workflowName(),
))
sites = getPileupSites(self.workflowSpec)
return sites
def __call__(self):
"""
_operator()_
When called generate a new concrete job payload from the
generic workflow and return it.
"""
newJobSpec = self.createJobSpec()
self.count += self.runIncrement
return newJobSpec
def createJobSpec(self):
#.........這裏部分代碼省略.........
示例2: __init__
# 需要導入模塊: from ProdCommon.MCPayloads.WorkflowSpec import WorkflowSpec [as 別名]
# 或者: from ProdCommon.MCPayloads.WorkflowSpec.WorkflowSpec import pileupDatasets [as 別名]
class DatasetIterator:
"""
_DatasetIterator_
Working from a Generic Workflow template, generate
concrete jobs from it, keeping in-memory history
"""
def __init__(self, workflowSpecFile, workingDir):
self.workflow = workflowSpecFile
self.workingDir = workingDir
self.currentJob = None
self.workflowSpec = WorkflowSpec()
self.workflowSpec.load(workflowSpecFile)
self.currentJobDef = None
self.count = 0
self.onlyClosedBlocks = False
if self.workflowSpec.parameters.has_key("OnlyClosedBlocks"):
onlyClosed = str(
self.workflowSpec.parameters["OnlyClosedBlocks"]).lower()
if onlyClosed == "true":
self.onlyClosedBlocks = True
self.ownedJobSpecs = {}
self.allowedBlocks = []
self.allowedSites = []
self.dbsUrl = getLocalDBSURL()
self.splitType = \
self.workflowSpec.parameters.get("SplitType", "file").lower()
self.splitSize = int(self.workflowSpec.parameters.get("SplitSize", 1))
self.generators = GeneratorMaker()
self.generators(self.workflowSpec.payload)
self.pileupDatasets = {}
# //
# // Does the workflow contain a block restriction??
#//
blockRestriction = \
self.workflowSpec.parameters.get("OnlyBlocks", None)
if blockRestriction != None:
# //
# // restriction on blocks present, populate allowedBlocks list
#//
msg = "Block restriction provided in Workflow Spec:\n"
msg += "%s\n" % blockRestriction
blockList = blockRestriction.split(",")
for block in blockList:
if len(block.strip() ) > 0:
self.allowedBlocks.append(block.strip())
# //
# // Does the workflow contain a site restriction??
#//
siteRestriction = \
self.workflowSpec.parameters.get("OnlySites", None)
if siteRestriction != None:
# //
# // restriction on sites present, populate allowedSites list
#//
msg = "Site restriction provided in Workflow Spec:\n"
msg += "%s\n" % siteRestriction
siteList = siteRestriction.split(",")
for site in siteList:
if len(site.strip() ) > 0:
self.allowedSites.append(site.strip())
# //
# // Is the DBSURL contact information provided??
#//
value = self.workflowSpec.parameters.get("DBSURL", None)
if value != None:
self.dbsUrl = value
if self.dbsUrl == None:
msg = "Error: No DBSURL available for dataset:\n"
msg += "Cant get local DBSURL and one not provided with workflow"
raise RuntimeError, msg
# //
# // Cache Area for JobSpecs
#//
self.specCache = os.path.join(
self.workingDir,
"%s-Cache" %self.workflowSpec.workflowName())
if not os.path.exists(self.specCache):
os.makedirs(self.specCache)
def __call__(self, jobDef):
"""
_operator()_
When called generate a new concrete job payload from the
generic workflow and return it.
The JobDef should be a JobDefinition with the input details
including LFNs and event ranges etc.
"""
newJobSpec = self.createJobSpec(jobDef)
#.........這裏部分代碼省略.........