本文整理汇总了Python中DIRAC.RequestManagementSystem.Client.RequestContainer.RequestContainer.setSourceComponent方法的典型用法代码示例。如果您正苦于以下问题:Python RequestContainer.setSourceComponent方法的具体用法?Python RequestContainer.setSourceComponent怎么用?Python RequestContainer.setSourceComponent使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类DIRAC.RequestManagementSystem.Client.RequestContainer.RequestContainer
的用法示例。
在下文中一共展示了RequestContainer.setSourceComponent方法的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __setRemovalRequest
# 需要导入模块: from DIRAC.RequestManagementSystem.Client.RequestContainer import RequestContainer [as 别名]
# 或者: from DIRAC.RequestManagementSystem.Client.RequestContainer.RequestContainer import setSourceComponent [as 别名]
def __setRemovalRequest( self, lfn, ownerDN, ownerGroup ):
""" Set removal request with the given credentials
"""
request = RequestContainer()
request.setRequestAttributes( { 'OwnerDN':ownerDN, 'OwnerGroup':ownerGroup } )
requestName = os.path.basename( lfn ).strip()+'_removal_request.xml'
request.setRequestName( requestName )
request.setSourceComponent( 'JobCleaningAgent' )
removalDict = {'Attributes':{ 'Operation':'removeFile',
'TargetSE':'',
'ExecutionOrder':0
}
}
result = request.addSubRequest( removalDict, 'removal' )
if not result['OK']:
return result
index = result['Value']
fileDict = { 'LFN':lfn, 'PFN':'', 'Status':'Waiting' }
request.setSubRequestFiles( index, 'removal', [fileDict] )
client = RequestClient()
result = request.toXML()
if not result['OK']:
return result
xmlRequest = result['Value']
result = client.setRequest( requestName, xmlRequest )
return result
示例2: getRequest
# 需要导入模块: from DIRAC.RequestManagementSystem.Client.RequestContainer import RequestContainer [as 别名]
# 或者: from DIRAC.RequestManagementSystem.Client.RequestContainer.RequestContainer import setSourceComponent [as 别名]
def getRequest():
"""
helper fcn to build requestContainer
"""
requestContainer = RequestContainer( init = False )
## get request
requestContainer.setRequestName( "00009423_00000118" )
requestContainer.setJobID( 0 )
requestContainer.setOwnerDN( "" )
requestContainer.setOwnerGroup( "" )
requestContainer.setDIRACSetup( "" )
requestContainer.setSourceComponent( None )
requestContainer.setCreationTime( "0000-00-00 00:00:00" )
requestContainer.setLastUpdate( "2011-02-19 04:57:02" )
requestContainer.setStatus( "Waiting" )
## get subrequest
requestContainer.initiateSubRequest( "transfer" )
subRequestDict = { "Status" : "Waiting",
"SubRequestID" : 2259916,
"Operation" : "replicateAndRegister",
"Arguments" : None,
"ExecutionOrder" : 0,
"SourceSE" : None,
"TargetSE" : "GRIDKA_MC-DST,GRIDKA_MC_M-DST",
"Catalogue" : None,
"CreationTime" : "2011-02-19 04:57:02",
"SubmissionTime" : "2011-02-19 04:57:02",
"LastUpdate" : "2011-08-18 20:14:22" }
requestContainer.setSubRequestAttributes( 0, "transfer", subRequestDict )
## get subrequest files
files = [ { "FileID" : 1610538,
"LFN" : "/lhcb/MC/MC10/ALLSTREAMS.DST/00009422/0000/00009422_00000171_1.allstreams.dst",
"Size" : None,
"PFN" : None,
"GUID" : None,
"Md5" : None,
"Addler" : None,
"Attempt" : 1,
"Status" : "Scheduled" } ]
requestContainer.setSubRequestFiles( 0, "transfer", files )
return { "OK" : True,
"Value" : { "RequestName" : "00009423_00000118",
"RequestString" : requestContainer.toXML()["Value"],
"JobID" : 0,
"RequestContainer" : requestContainer } }
示例3: getKwargsRemoveFile
# 需要导入模块: from DIRAC.RequestManagementSystem.Client.RequestContainer import RequestContainer [as 别名]
# 或者: from DIRAC.RequestManagementSystem.Client.RequestContainer.RequestContainer import setSourceComponent [as 别名]
def getKwargsRemoveFile():
""" helper fcn to build request """
requestContainer = RequestContainer( init = False )
requestContainer.setJobID( 11111111 )
#requestContainer.setOwnerDN( "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=cibak/CN=605919/CN=Krzysztof Ciba" )
requestContainer.setOwnerGroup( "lhcb_user" )
requestContainer.setDIRACSetup( "LHCb-Production" )
requestContainer.setSourceComponent( None )
requestContainer.setCreationTime( "0000-00-00 00:00:00" )
requestContainer.setLastUpdate( "2011-12-01 04:57:02" )
requestContainer.setStatus( "Waiting" )
requestContainer.initiateSubRequest( "removal" )
subRequestDict = { "Status" : "Waiting",
"SubRequestID" : 2222222,
"Operation" : "removeFile",
"Arguments" : None,
"ExecutionOrder" : 0,
"SourceSE" : None,
"TargetSE" : "RAL-USER",
"Catalogue" : "LcgFileCatalogCombined",
"CreationTime" : "2011-12-01 04:57:02",
"SubmissionTime" : "2011-12-01 04:57:02",
"LastUpdate" : "2011-12-01 20:14:22" }
requestContainer.setSubRequestAttributes( 0, "removal", subRequestDict )
files = [ { "FileID" : 3333333,
"LFN" : "/lhcb/user/c/cibak/11889/11889410/test.zzz",
"Size" : 44444444,
"PFN" : "srm://srm-lhcb.gridpp.rl.ac.uk/castor/ads.rl.ac.uk/prod/lhcb/user/c/cibak/11889/11889410/test.zzz",
"GUID" : "5P13RD4L-4J5L-3D21-U5P1-3RD4L4J5P13R",
"Md5" : None,
"Addler" : "92b85e26",
"Attempt" : 1,
"Status" : "Waiting" } ]
requestContainer.setSubRequestFiles( 0, "removal", files )
return { "requestName" : "00000001.xml",
"requestString" : requestContainer.toXML()["Value"],
"jobID" : 1,
"executionOrder" : 0,
"sourceServer" : "foobarserver",
"configPath" : "/Systems/DataManagement/Development/Agents/RemovalAgent" }
示例4: getRequest
# 需要导入模块: from DIRAC.RequestManagementSystem.Client.RequestContainer import RequestContainer [as 别名]
# 或者: from DIRAC.RequestManagementSystem.Client.RequestContainer.RequestContainer import setSourceComponent [as 别名]
#.........这里部分代码省略.........
break
# Haven't succeeded to get any request
if not requestID:
return S_OK()
dmRequest.setRequestID( requestID )
fields = ['FileID', 'LFN', 'Size', 'PFN', 'GUID', 'Md5', 'Addler', 'Attempt', 'Status' ]
for subRequestID, operation, arguments, executionOrder, sourceSE, targetSE, catalogue, \
creationTime, submissionTime, lastUpdate in reqDict[requestID]:
if not subRequestID in subIDList: continue
res = dmRequest.initiateSubRequest( requestType )
ind = res['Value']
subRequestDict = {
'Status' : 'Waiting',
'SubRequestID' : subRequestID,
'Operation' : operation,
'Arguments' : arguments,
'ExecutionOrder': int( executionOrder ),
'SourceSE' : sourceSE,
'TargetSE' : targetSE,
'Catalogue' : catalogue,
'CreationTime' : creationTime,
'SubmissionTime': submissionTime,
'LastUpdate' : lastUpdate
}
res = dmRequest.setSubRequestAttributes( ind, requestType, subRequestDict )
if not res['OK']:
err = 'RequestDB._getRequest: Failed to set subRequest attributes for RequestID %s' % requestID
self.__releaseSubRequests( requestID, subIDList )
return S_ERROR( '%s\n%s' % ( err, res['Message'] ) )
req = "SELECT %s FROM `Files` WHERE `SubRequestID`=%s ORDER BY `FileID`;" % ( ', '.join( fields ),
subRequestID )
res = self._query( req )
if not res['OK']:
err = 'RequestDB._getRequest: Failed to get File attributes for RequestID %s.%s' % ( requestID,
subRequestID )
self.__releaseSubRequests( requestID, subIDList )
return S_ERROR( '%s\n%s' % ( err, res['Message'] ) )
files = []
for fileID, lfn, size, pfn, guid, md5, addler, attempt, status in res['Value']:
fileDict = {'FileID':fileID, 'LFN':lfn, 'Size':size, 'PFN':pfn, 'GUID':guid,
'Md5':md5, 'Addler':addler, 'Attempt':attempt, 'Status':status}
files.append( fileDict )
res = dmRequest.setSubRequestFiles( ind, requestType, files )
if not res['OK']:
err = 'RequestDB._getRequest: Failed to set files into Request for RequestID %s.%s' % ( requestID,
subRequestID )
self.__releaseSubRequests( requestID, subIDList )
return S_ERROR( '%s\n%s' % ( err, res['Message'] ) )
req = "SELECT Dataset,Status FROM Datasets WHERE SubRequestID = %s;" % subRequestID
res = self._query( req )
if not res['OK']:
err = 'RequestDB._getRequest: Failed to get Datasets for RequestID %s.%s' % ( requestID, subRequestID )
self.__releaseSubRequests( requestID, subIDList )
return S_ERROR( '%s\n%s' % ( err, res['Message'] ) )
datasets = []
for dataset, status in res['Value']:
datasets.append( dataset )
res = dmRequest.setSubRequestDatasets( ind, requestType, datasets )
if not res['OK']:
err = 'RequestDB._getRequest: Failed to set datasets into Request for RequestID %s.%s' % ( requestID,
subRequestID )
self.__releaseSubRequests( requestID, subIDList )
return S_ERROR( '%s\n%s' % ( err, res['Message'] ) )
fields = ['RequestName', 'JobID', 'OwnerDN', 'OwnerGroup',
'DIRACSetup', 'SourceComponent', 'CreationTime',
'SubmissionTime', 'LastUpdate']
req = "SELECT %s FROM `Requests` WHERE `RequestID`=%s;" % ( ', '.join( fields ), requestID )
res = self._query( req )
if not res['OK']:
err = 'RequestDB._getRequest: Failed to retrieve max RequestID'
self.__releaseSubRequests( requestID, subIDList )
return S_ERROR( '%s\n%s' % ( err, res['Message'] ) )
requestName, jobID, ownerDN, ownerGroup, diracSetup, sourceComponent, \
creationTime, submissionTime, lastUpdate = res['Value'][0]
dmRequest.setRequestName( requestName )
dmRequest.setJobID( jobID )
dmRequest.setOwnerDN( ownerDN )
dmRequest.setOwnerGroup( ownerGroup )
dmRequest.setDIRACSetup( diracSetup )
dmRequest.setSourceComponent( sourceComponent )
dmRequest.setCreationTime( str( creationTime ) )
dmRequest.setLastUpdate( str( lastUpdate ) )
res = dmRequest.toXML()
if not res['OK']:
err = 'RequestDB._getRequest: Failed to create XML for RequestID %s' % ( requestID )
self.__releaseSubRequests( requestID, subIDList )
return S_ERROR( '%s\n%s' % ( err, res['Message'] ) )
requestString = res['Value']
#still have to manage the status of the dataset properly
resultDict = {}
resultDict['RequestName'] = requestName
resultDict['RequestString'] = requestString
resultDict['JobID'] = jobID
return S_OK( resultDict )
示例5: S_ERROR
# 需要导入模块: from DIRAC.RequestManagementSystem.Client.RequestContainer import RequestContainer [as 别名]
# 或者: from DIRAC.RequestManagementSystem.Client.RequestContainer.RequestContainer import setSourceComponent [as 别名]
except Exception, x:
gLogger.error("Could not copy because %s" % x)
return S_ERROR("Could not copy because %s" % x)
elif path.find("http://") > -1:
gLogger.error("path %s was not foreseen, location not known, upload to location yourself, and publish in CS manually" % path)
return S_ERROR()
else:
lfnpath = "%s%s" % (path, os.path.basename(appTar))
res = rm.putAndRegister(lfnpath, appTar, ops.getValue('Software/BaseStorageElement',"CERN-SRM"))
if not res['OK']:
return res
request = RequestContainer()
request.setCreationTime()
requestClient = RequestClient()
request.setRequestName('copy_%s' % os.path.basename(appTar).replace(".tgz","").replace(".tar.gz",""))
request.setSourceComponent('ReplicateILCSoft')
copies_at = ops.getValue('Software/CopiesAt',[])
index_copy = 0
for copies in copies_at:
res = request.addSubRequest({'Attributes':{'Operation' : 'replicateAndRegister',
'TargetSE' : copies,
'ExecutionOrder' : index_copy},
'Files':[{'LFN':lfnpath}]},
'transfer')
#res = rm.replicateAndRegister("%s%s"%(path,appTar),"IN2P3-SRM")
if not res['OK']:
return res
index_copy += 1
requestxml = request.toXML()['Value']
if copies_at:
res = requestClient.setRequest(request.getRequestName()['Value'], requestxml)
示例6: __init__
# 需要导入模块: from DIRAC.RequestManagementSystem.Client.RequestContainer import RequestContainer [as 别名]
# 或者: from DIRAC.RequestManagementSystem.Client.RequestContainer.RequestContainer import setSourceComponent [as 别名]
class FailoverTransfer:
#############################################################################
def __init__(self,requestObject=False):
""" Constructor function, can specify request object to instantiate
FailoverTransfer or a new request object is created.
"""
self.log = gLogger.getSubLogger( "FailoverTransfer" )
self.rm = ReplicaManager()
self.request = requestObject
if not self.request:
self.request = RequestContainer()
self.request.setRequestName('default_request.xml')
self.request.setSourceComponent('FailoverTransfer')
#############################################################################
def transferAndRegisterFile(self,fileName,localPath,lfn,destinationSEList,fileGUID=None,fileCatalog=None):
"""Performs the transfer and register operation with failover.
"""
errorList = []
for se in destinationSEList:
self.log.info('Attempting rm.putAndRegister("%s","%s","%s",guid="%s",catalog="%s")' %(lfn,localPath,se,fileGUID,fileCatalog))
result = self.rm.putAndRegister(lfn,localPath,se,guid=fileGUID,catalog=fileCatalog)
self.log.verbose(result)
if not result['OK']:
self.log.error('rm.putAndRegister failed with message',result['Message'])
errorList.append(result['Message'])
continue
if not result['Value']['Failed']:
self.log.info('rm.putAndRegister successfully uploaded %s to %s' %(fileName,se))
return S_OK({'uploadedSE':se,'lfn':lfn})
#Now we know something went wrong
errorDict = result['Value']['Failed'][lfn]
if not errorDict.has_key('register'):
self.log.error('rm.putAndRegister failed with unknown error',str(errorDict))
errorList.append('Unknown error while attempting upload to %s' %se)
continue
fileDict = errorDict['register']
#Therefore the registration failed but the upload was successful
if not fileCatalog:
fileCatalog=''
result = self.__setRegistrationRequest(fileDict['LFN'],se,fileCatalog,fileDict)
if not result['OK']:
self.log.error('Failed to set registration request for: SE %s and metadata: \n%s' %(se,fileDict))
errorList.append('Failed to set registration request for: SE %s and metadata: \n%s' %(se,fileDict))
continue
else:
self.log.info('Successfully set registration request for: SE %s and metadata: \n%s' %(se,fileDict))
metadata = {}
metadata['filedict']=fileDict
metadata['uploadedSE']=se
metadata['lfn']=lfn
metadata['registration']='request'
return S_OK(metadata)
self.log.error('Encountered %s errors during attempts to upload output data' %len(errorList))
return S_ERROR('Failed to upload output data file')
#############################################################################
def transferAndRegisterFileFailover(self,fileName,localPath,lfn,targetSE,failoverSEList,fileGUID=None,fileCatalog=None):
"""Performs the transfer and register operation to failover storage and sets the
necessary replication and removal requests to recover.
"""
failover = self.transferAndRegisterFile(fileName,localPath,lfn,failoverSEList,fileGUID,fileCatalog)
if not failover['OK']:
self.log.error('Could not upload file to failover SEs',failover['Message'])
return failover
#set removal requests and replication requests
result = self.__setFileReplicationRequest(lfn,targetSE)
if not result['OK']:
self.log.error('Could not set file replication request',result['Message'])
return result
lfn = failover['Value']['lfn']
failoverSE = failover['Value']['uploadedSE']
self.log.info('Attempting to set replica removal request for LFN %s at failover SE %s' %(lfn,failoverSE))
result = self.__setReplicaRemovalRequest(lfn,failoverSE)
if not result['OK']:
self.log.error('Could not set removal request',result['Message'])
return result
return S_OK('%s uploaded to a failover SE' %fileName)
#############################################################################
def getRequestObject(self):
"""Returns the potentially modified request object in order to propagate changes.
"""
return S_OK(self.request)
#############################################################################
def __setFileReplicationRequest(self,lfn,se):
""" Sets a registration request.
"""
self.log.info('Setting replication request for %s to %s' % (lfn,se))
#.........这里部分代码省略.........
示例7: getRequest
# 需要导入模块: from DIRAC.RequestManagementSystem.Client.RequestContainer import RequestContainer [as 别名]
# 或者: from DIRAC.RequestManagementSystem.Client.RequestContainer.RequestContainer import setSourceComponent [as 别名]
#.........这里部分代码省略.........
# RG: We need to check that all subRequest with smaller ExecutionOrder are "Done"
if subIDList:
# We managed to get some requests, can continue now
requestID = reqID
break
# Haven't succeeded to get any request
if not requestID:
return S_OK()
dmRequest.setRequestID( requestID )
# RG: We have this list in subIDList, can different queries get part of the subrequets of the same type?
subRequestIDs = []
for subRequestID, operation, arguments, executionOrder, sourceSE, targetSE, catalogue, creationTime, submissionTime, lastUpdate in res['Value']:
if not subRequestID in subIDList: continue
subRequestIDs.append( subRequestID )
# RG: res['Value'] is the range of the loop and it gets redefined here !!!!!!
res = dmRequest.initiateSubRequest( requestType )
ind = res['Value']
subRequestDict = {
'Status' : 'Waiting',
'SubRequestID' : subRequestID,
'Operation' : operation,
'Arguments' : arguments,
'ExecutionOrder': int( executionOrder ),
'SourceSE' : sourceSE,
'TargetSE' : targetSE,
'Catalogue' : catalogue,
'CreationTime' : creationTime,
'SubmissionTime': submissionTime,
'LastUpdate' : lastUpdate
}
res = dmRequest.setSubRequestAttributes( ind, requestType, subRequestDict )
if not res['OK']:
err = 'RequestDB._getRequest: Failed to set subRequest attributes for RequestID %s' % requestID
self.__releaseSubRequests( requestID, subRequestIDs )
return S_ERROR( '%s\n%s' % ( err, res['Message'] ) )
req = "SELECT FileID,LFN,Size,PFN,GUID,Md5,Addler,Attempt,Status \
from Files WHERE SubRequestID = %s ORDER BY FileID;" % subRequestID
res = self._query( req )
if not res['OK']:
err = 'RequestDB._getRequest: Failed to get File attributes for RequestID %s.%s' % ( requestID, subRequestID )
self.__releaseSubRequests( requestID, subRequestIDs )
return S_ERROR( '%s\n%s' % ( err, res['Message'] ) )
files = []
for fileID, lfn, size, pfn, guid, md5, addler, attempt, status in res['Value']:
fileDict = {'FileID':fileID, 'LFN':lfn, 'Size':size, 'PFN':pfn, 'GUID':guid, 'Md5':md5, 'Addler':addler, 'Attempt':attempt, 'Status':status}
files.append( fileDict )
res = dmRequest.setSubRequestFiles( ind, requestType, files )
if not res['OK']:
err = 'RequestDB._getRequest: Failed to set files into Request for RequestID %s.%s' % ( requestID, subRequestID )
self.__releaseSubRequests( requestID, subRequestIDs )
return S_ERROR( '%s\n%s' % ( err, res['Message'] ) )
req = "SELECT Dataset,Status FROM Datasets WHERE SubRequestID = %s;" % subRequestID
res = self._query( req )
if not res['OK']:
err = 'RequestDB._getRequest: Failed to get Datasets for RequestID %s.%s' % ( requestID, subRequestID )
self.__releaseSubRequests( requestID, subRequestIDs )
return S_ERROR( '%s\n%s' % ( err, res['Message'] ) )
datasets = []
for dataset, status in res['Value']:
datasets.append( dataset )
res = dmRequest.setSubRequestDatasets( ind, requestType, datasets )
if not res['OK']:
err = 'RequestDB._getRequest: Failed to set datasets into Request for RequestID %s.%s' % ( requestID, subRequestID )
self.__releaseSubRequests( requestID, subRequestIDs )
return S_ERROR( '%s\n%s' % ( err, res['Message'] ) )
req = "SELECT RequestName,JobID,OwnerDN,OwnerGroup,DIRACSetup,SourceComponent,CreationTime,SubmissionTime,LastUpdate from Requests WHERE RequestID = %s;" % requestID
res = self._query( req )
if not res['OK']:
err = 'RequestDB._getRequest: Failed to retrieve max RequestID'
self.__releaseSubRequests( requestID, subRequestIDs )
return S_ERROR( '%s\n%s' % ( err, res['Message'] ) )
requestName, jobID, ownerDN, ownerGroup, diracSetup, sourceComponent, creationTime, submissionTime, lastUpdate = res['Value'][0]
dmRequest.setRequestName( requestName )
dmRequest.setJobID( jobID )
dmRequest.setOwnerDN( ownerDN )
dmRequest.setOwnerGroup( ownerGroup )
dmRequest.setDIRACSetup( diracSetup )
dmRequest.setSourceComponent( sourceComponent )
dmRequest.setCreationTime( str( creationTime ) )
dmRequest.setLastUpdate( str( lastUpdate ) )
res = dmRequest.toXML()
if not res['OK']:
err = 'RequestDB._getRequest: Failed to create XML for RequestID %s' % ( requestID )
self.__releaseSubRequests( requestID, subRequestIDs )
return S_ERROR( '%s\n%s' % ( err, res['Message'] ) )
requestString = res['Value']
#still have to manage the status of the dataset properly
resultDict = {}
resultDict['RequestName'] = requestName
resultDict['RequestString'] = requestString
resultDict['JobID'] = jobID
return S_OK( resultDict )
示例8: HandleProdOutputData
# 需要导入模块: from DIRAC.RequestManagementSystem.Client.RequestContainer import RequestContainer [as 别名]
# 或者: from DIRAC.RequestManagementSystem.Client.RequestContainer.RequestContainer import setSourceComponent [as 别名]
class HandleProdOutputData(ModuleBase):
def __init__(self):
super(HandleProdOutputData, self).__init__()
self.result = S_ERROR()
self.fc = FileCatalogClient()
self.rm = ReplicaManager()
self.destination = ''
self.basepath = '/ilc/prod'
self.dataattributes = ''
self.attributesdict = {}
self.generatorfile = ''
self.mokkafile = ''
self.marlinfiles = ''
self.slicfile = ''
self.lcsimfiles = ''
self.request = None
self.failoverTest=False
self.log = gLogger.getSubLogger( "HandleOutputData" )
def applicationSpecificInputs(self):
""" Resolve all input variables for the module here.
@return: S_OK()
"""
if self.step_commons.has_key('DataAttributes'):
self.dataattributes = self.step_commons['DataAttributes']
else:
return S_ERROR('No data attributes found, cannot proceed with registration in Catalog, ABORT!')
for attribute in self.dataattributes.split(";"):
if self.step_commons.has_key(attribute):
self.attributesdict[attribute] = self.step_commons[attribute]
if self.step_commons.has_key("destination"):
self.destination = self.step_commons['destination']
if self.step_commons.has_key('GENFile'):
self.generatorfile = self.step_commons['GENFile']
if self.step_commons.has_key('MokkaFile'):
self.mokkafile = self.step_commons['MokkaFile']
if self.step_commons.has_key('MarlinFiles'):
self.marlinfiles = self.step_commons['MarlinFiles'].split(';')
if self.step_commons.has_key('SLICFile'):
self.slicfile = self.step_commons['SLICFile']
if self.step_commons.has_key('LCSIMFiles'):
self.lcsimfiles = self.step_commons['LCSIMFiles'].split(';')
if self.workflow_commons.has_key('Request'):
self.request = self.workflow_commons['Request']
else:
self.request = RequestContainer()
self.request.setRequestName('job_%s_request.xml' % self.jobID)
self.request.setJobID(self.jobID)
self.request.setSourceComponent("Job_%s" % self.jobID)
return S_OK('Parameters resolved')
def execute(self):
#Have to work out if the module is part of the last step i.e.
#user jobs can have any number of steps and we only want
#to run the finalization once.
currentStep = int(self.step_commons['STEP_NUMBER'])
totalSteps = int(self.workflow_commons['TotalSteps'])
if currentStep==totalSteps:
self.lastStep=True
else:
self.log.verbose('Current step = %s, total steps of workflow = %s, HandleProdOutputData will enable itself only at the last workflow step.' %(currentStep,totalSteps))
if not self.lastStep:
return S_OK()
self.result =self.resolveInputVariables()
if not self.result['OK']:
self.log.error(self.result['Message'])
return self.result
###Instantiate object that will ensure that the files are registered properly
failoverTransfer = FailoverTransfer(self.request)
datatohandle = {}
if self.generatorfile:
if not os.path.exists(self.generatorfile):
return S_ERROR("File %s does not exist, something went wrong before !"%(self.generatorfile))
self.attributesdict['DataType'] = 'gen'
lfnpath = string.join([self.basepath,self.attributesdict['Machine'],self.attributesdict['Energy'],
self.attributesdict['DataType'],self.attributesdict['EvtType'],self.attributesdict['ProdID'],
self.generatorfile],"/")
datatohandle[self.generatorfile]={'lfn':lfnpath,'type':'gen','workflowSE':self.destination}
if self.mokkafile or self.slicfile:
recofile = ''
if self.mokkafile and not os.path.exists(self.mokkafile):
return S_ERROR("File %s does not exist, something went wrong before !"%(self.mokkafile))
else:
recofile = self.mokkafile
if self.slicfile and not os.path.exists(self.slicfile):
return S_ERROR("File %s does not exist, something went wrong before !"%(self.slicfile))
else:
recofile = self.slicfile
self.attributesdict['DataType'] = 'SIM'
lfnpath = string.join([self.basepath,self.attributesdict['Machine'],self.attributesdict['Energy'],
self.attributesdict['DetectorModel'],self.attributesdict['DataType'],self.attributesdict['EvtType'],
self.attributesdict['ProdID'],recofile],"/")
#.........这里部分代码省略.........
示例9: UserJobFinalization
# 需要导入模块: from DIRAC.RequestManagementSystem.Client.RequestContainer import RequestContainer [as 别名]
# 或者: from DIRAC.RequestManagementSystem.Client.RequestContainer.RequestContainer import setSourceComponent [as 别名]
class UserJobFinalization(ModuleBase):
""" User Job finalization: takes care of uploading the output data to the specified storage elements
If it does not work, it will upload to a Failover SE, then register the request to replicate and remove.
"""
#############################################################################
def __init__(self):
"""Module initialization.
"""
super(UserJobFinalization, self).__init__()
self.version = __RCSID__
self.log = gLogger.getSubLogger( "UserJobFinalization" )
self.enable = True
self.failoverTest = False #flag to put file to failover SE by default
self.defaultOutputSE = gConfig.getValue( '/Resources/StorageElementGroups/Tier1-USER', [])
self.failoverSEs = gConfig.getValue('/Resources/StorageElementGroups/Tier1-Failover', [])
#List all parameters here
self.userFileCatalog = self.ops.getValue('/UserJobs/Catalogs', ['FileCatalog'] )
self.request = None
self.lastStep = False
#Always allow any files specified by users
self.outputDataFileMask = ''
self.userOutputData = ''
self.userOutputSE = ''
self.userOutputPath = ''
self.jobReport = None
#############################################################################
def applicationSpecificInputs(self):
""" By convention the module parameters are resolved here.
"""
self.log.verbose(self.workflow_commons)
self.log.verbose(self.step_commons)
#Earlier modules may have populated the report objects
if self.workflow_commons.has_key('JobReport'):
self.jobReport = self.workflow_commons['JobReport']
if self.step_commons.has_key('Enable'):
self.enable = self.step_commons['Enable']
if not type(self.enable) == type(True):
self.log.warn('Enable flag set to non-boolean value %s, setting to False' %self.enable)
self.enable = False
if self.step_commons.has_key('TestFailover'):
self.enable = self.step_commons['TestFailover']
if not type(self.failoverTest) == type(True):
self.log.warn('Test failover flag set to non-boolean value %s, setting to False' % self.failoverTest)
self.failoverTest = False
if os.environ.has_key('JOBID'):
self.jobID = os.environ['JOBID']
self.log.verbose('Found WMS JobID = %s' % self.jobID)
else:
self.log.info('No WMS JobID found, disabling module via control flag')
self.enable = False
if self.workflow_commons.has_key('Request'):
self.request = self.workflow_commons['Request']
else:
self.request = RequestContainer()
self.request.setRequestName('job_%s_request.xml' % self.jobID)
self.request.setJobID(self.jobID)
self.request.setSourceComponent("Job_%s" % self.jobID)
#Use LHCb utility for local running via dirac-jobexec
if self.workflow_commons.has_key('UserOutputData'):
self.userOutputData = self.workflow_commons['UserOutputData']
if not type(self.userOutputData) == type([]):
self.userOutputData = [i.strip() for i in self.userOutputData.split(';')]
if self.workflow_commons.has_key('UserOutputSE'):
specifiedSE = self.workflow_commons['UserOutputSE']
if not type(specifiedSE) == type([]):
self.userOutputSE = [i.strip() for i in specifiedSE.split(';')]
else:
self.log.verbose('No UserOutputSE specified, using default value: %s' % (string.join(self.defaultOutputSE,
', ')))
self.userOutputSE = self.defaultOutputSE
if self.workflow_commons.has_key('UserOutputPath'):
self.userOutputPath = self.workflow_commons['UserOutputPath']
return S_OK('Parameters resolved')
#############################################################################
def execute(self):
""" Main execution function.
"""
#Have to work out if the module is part of the last step i.e.
#user jobs can have any number of steps and we only want
#to run the finalization once.
currentStep = int(self.step_commons['STEP_NUMBER'])
totalSteps = int(self.workflow_commons['TotalSteps'])
if currentStep == totalSteps:
self.lastStep = True
else:
self.log.verbose('Current step = %s, total steps of workflow = %s, UserJobFinalization will enable itself only \
at the last workflow step.' % (currentStep, totalSteps))
if not self.lastStep:
#.........这里部分代码省略.........
示例10: UploadLogFile
# 需要导入模块: from DIRAC.RequestManagementSystem.Client.RequestContainer import RequestContainer [as 别名]
# 或者: from DIRAC.RequestManagementSystem.Client.RequestContainer.RequestContainer import setSourceComponent [as 别名]
class UploadLogFile(ModuleBase):
""" Handle log file uploads in the production jobs
"""
#############################################################################
def __init__(self):
"""Module initialization.
"""
super(UploadLogFile, self).__init__()
self.version = __RCSID__
self.log = gLogger.getSubLogger("UploadLogFile")
self.PRODUCTION_ID = None
self.JOB_ID = None
self.workflow_commons = None
self.request = None
self.logFilePath = ""
self.logLFNPath = ""
self.logdir = ""
self.logSE = self.ops.getValue("/LogStorage/LogSE", "LogSE")
self.root = gConfig.getValue("/LocalSite/Root", os.getcwd())
self.logSizeLimit = self.ops.getValue("/LogFiles/SizeLimit", 20 * 1024 * 1024)
self.logExtensions = []
self.failoverSEs = gConfig.getValue("/Resources/StorageElementGroups/Tier1-Failover", [])
self.diracLogo = self.ops.getValue(
"/SAM/LogoURL", "https://lhcbweb.pic.es/DIRAC/images/logos/DIRAC-logo-transp.png"
)
self.rm = ReplicaManager()
self.experiment = "CLIC"
self.enable = True
self.failoverTest = False # flag to put log files to failover by default
self.jobID = ""
######################################################################
def applicationSpecificInputs(self):
if self.step_commons.has_key("Enable"):
self.enable = self.step_commons["Enable"]
if not type(self.enable) == type(True):
self.log.warn("Enable flag set to non-boolean value %s, setting to False" % self.enable)
self.enable = False
if self.step_commons.has_key("TestFailover"):
self.enable = self.step_commons["TestFailover"]
if not type(self.failoverTest) == type(True):
self.log.warn("Test failover flag set to non-boolean value %s, setting to False" % self.failoverTest)
self.failoverTest = False
if os.environ.has_key("JOBID"):
self.jobID = os.environ["JOBID"]
self.log.verbose("Found WMS JobID = %s" % self.jobID)
else:
self.log.info("No WMS JobID found, disabling module via control flag")
self.enable = False
if self.workflow_commons.has_key("LogFilePath") and self.workflow_commons.has_key("LogTargetPath"):
self.logFilePath = self.workflow_commons["LogFilePath"]
self.logLFNPath = self.workflow_commons["LogTargetPath"]
else:
self.log.info("LogFilePath parameter not found, creating on the fly")
result = getLogPath(self.workflow_commons)
if not result["OK"]:
self.log.error("Could not create LogFilePath", result["Message"])
return result
self.logFilePath = result["Value"]["LogFilePath"][0]
self.logLFNPath = result["Value"]["LogTargetPath"][0]
if not type(self.logFilePath) == type(" "):
self.logFilePath = self.logFilePath[0]
if not type(self.logLFNPath) == type(" "):
self.logLFNPath = self.logLFNPath[0]
example_file = self.logFilePath
if "/ilc/prod/clic" in example_file:
self.experiment = "CLIC"
elif "/ilc/prod/ilc/sid" in example_file:
self.experiment = "ILC_SID"
elif "/ilc/prod/ilc/mc-dbd" in example_file:
self.experiment = "ILC_ILD"
else:
self.log.warn("Failed to determine experiment, reverting to default: %s" % self.experiment)
if self.workflow_commons.has_key("Request"):
self.request = self.workflow_commons["Request"]
else:
self.request = RequestContainer()
self.request.setRequestName("job_%s_request.xml" % self.jobID)
self.request.setJobID(self.jobID)
self.request.setSourceComponent("Job_%s" % self.jobID)
return S_OK("Parameters resolved")
######################################################################
def execute(self):
""" Main execution method
"""
self.log.info("Initializing %s" % self.version)
# Add global reporting tool
self.resolveInputVariables()
#.........这里部分代码省略.........
示例11: UploadOutputData
# 需要导入模块: from DIRAC.RequestManagementSystem.Client.RequestContainer import RequestContainer [as 别名]
# 或者: from DIRAC.RequestManagementSystem.Client.RequestContainer.RequestContainer import setSourceComponent [as 别名]
class UploadOutputData(ModuleBase):
""" As name suggest: upload output data. For Production only: See L{UserJobFinalization} for User job upload.
"""
#############################################################################
def __init__(self):
"""Module initialization.
"""
super(UploadOutputData, self).__init__()
self.version = __RCSID__
self.log = gLogger.getSubLogger( "UploadOutputData" )
self.commandTimeOut = 10*60
self.enable = True
self.failoverTest = False #flag to put file to failover SE by default
self.failoverSEs = gConfig.getValue('/Resources/StorageElementGroups/Tier1-Failover', [])
self.ops = Operations()
#List all parameters here
self.outputDataFileMask = ''
self.outputMode = 'Any' #or 'Local' for reco case
self.outputList = []
self.request = None
self.PRODUCTION_ID = ""
self.prodOutputLFNs = []
self.experiment = "CLIC"
#############################################################################
def applicationSpecificInputs(self):
""" By convention the module parameters are resolved here.
"""
self.log.verbose("Workflow commons:")
self.log.verbose(self.workflow_commons)
self.log.verbose("Step commons:")
self.log.verbose(self.step_commons)
if self.step_commons.has_key('Enable'):
self.enable = self.step_commons['Enable']
if not type(self.enable) == type(True):
self.log.warn('Enable flag set to non-boolean value %s, setting to False' % self.enable)
self.enable = False
if self.step_commons.has_key('TestFailover'):
self.enable = self.step_commons['TestFailover']
if not type(self.failoverTest) == type(True):
self.log.warn('Test failover flag set to non-boolean value %s, setting to False' % self.failoverTest)
self.failoverTest = False
if self.workflow_commons.has_key("PRODUCTION_ID"):
self.PRODUCTION_ID = self.workflow_commons["PRODUCTION_ID"]
if os.environ.has_key('JOBID'):
self.log.verbose('Found WMS JobID = %s' % self.jobID)
else:
self.log.info('No WMS JobID found, disabling module via control flag')
self.enable = False
if self.workflow_commons.has_key('Request'):
self.request = self.workflow_commons['Request']
else:
self.request = RequestContainer()
self.request.setRequestName('job_%s_request.xml' % self.jobID)
self.request.setJobID(self.jobID)
self.request.setSourceComponent("Job_%s" % self.jobID)
##This is the thing that is used to establish the list of outpufiles to treat:
## Make sure that all that is in the : "listoutput" and also in the ProductionData
## is treated properly. Needed as whatever is in listoutput does not contain any reference to the
## prodID and task ID. Also if for some reason a step failed, then the corresponding data will not be there
if self.workflow_commons.has_key('outputList'):
self.outputList = self.workflow_commons['outputList']
if self.workflow_commons.has_key('ProductionOutputData'):
proddata = self.workflow_commons['ProductionOutputData'].split(";")
self.log.verbose("prod data : %s" % proddata )
olist = {}
for obj in self.outputList:
fname_in_outputlist = obj['outputFile'].lower()
extension = ''
if fname_in_outputlist.count("_sim") or fname_in_outputlist.count("_rec") or fname_in_outputlist.count("_dst"):
extension = ".slcio"
elif fname_in_outputlist.count("_gen"):
extension = ".stdhep"
fname_in_outputlist = fname_in_outputlist.replace(extension,"")
for prodfile in proddata:
prodfile = os.path.basename(prodfile)
extension = ''
if prodfile.count("_sim") or prodfile.count("_rec") or prodfile.count("_dst"):
extension = ".slcio"
elif prodfile.count("_gen"):
extension = ".stdhep"
prodfile = prodfile.replace(extension,"")
if olist.has_key(prodfile):
## This has already been treated, no need to come back to it.
continue
appdict = {}
if (fname_in_outputlist.count("_gen")):# and prodfile.lower().count("_gen_")) :
genf = obj['outputFile'].split("_gen")[0]
genf += "_gen"
if (prodfile.count(genf)):
appdict.update(obj)
appdict['outputFile'] = prodfile+extension
olist[prodfile] = appdict
if (fname_in_outputlist.count("_sim")):
#.........这里部分代码省略.........
示例12: FailoverRequest
# 需要导入模块: from DIRAC.RequestManagementSystem.Client.RequestContainer import RequestContainer [as 别名]
# 或者: from DIRAC.RequestManagementSystem.Client.RequestContainer.RequestContainer import setSourceComponent [as 别名]
class FailoverRequest(ModuleBase):
""" Handle the failover requests issued by previous steps. Used in production.
"""
#############################################################################
def __init__(self):
"""Module initialization.
"""
super(FailoverRequest, self).__init__()
self.version = __RCSID__
self.log = gLogger.getSubLogger( "FailoverRequest" )
#Internal parameters
self.enable = True
self.jobID = ''
self.productionID = None
self.prodJobID = None
#Workflow parameters
self.jobReport = None
self.fileReport = None
self.request = None
#############################################################################
def applicationSpecificInputs(self):
""" By convention the module input parameters are resolved here.
"""
self.log.debug(self.workflow_commons)
self.log.debug(self.step_commons)
if os.environ.has_key('JOBID'):
self.jobID = os.environ['JOBID']
self.log.verbose('Found WMS JobID = %s' %self.jobID)
else:
self.log.info('No WMS JobID found, disabling module via control flag')
self.enable = False
if self.step_commons.has_key('Enable'):
self.enable = self.step_commons['Enable']
if not type(self.enable) == type(True):
self.log.warn('Enable flag set to non-boolean value %s, setting to False' % self.enable)
self.enable = False
#Earlier modules will have populated the report objects
if self.workflow_commons.has_key('JobReport'):
self.jobReport = self.workflow_commons['JobReport']
if self.workflow_commons.has_key('FileReport'):
self.fileReport = self.workflow_commons['FileReport']
if self.InputData:
if type(self.InputData) != type([]):
self.InputData = self.InputData.split(';')
self.InputData = [x.replace('LFN:','') for x in self.InputData]
if self.workflow_commons.has_key('Request'):
self.request = self.workflow_commons['Request']
if not self.request:
self.request = RequestContainer()
self.request.setRequestName('job_%s_request.xml' % self.jobID)
self.request.setJobID(self.jobID)
self.request.setSourceComponent("Job_%s" % self.jobID)
if self.workflow_commons.has_key('PRODUCTION_ID'):
self.productionID = self.workflow_commons['PRODUCTION_ID']
if self.workflow_commons.has_key('JOB_ID'):
self.prodJobID = self.workflow_commons['JOB_ID']
return S_OK('Parameters resolved')
#############################################################################
def execute(self):
""" Main execution function.
"""
self.log.info('Initializing %s' % self.version)
result = self.resolveInputVariables()
if not result['OK']:
self.log.error(result['Message'])
return result
if not self.fileReport:
self.fileReport = FileReport('Transformation/TransformationManager')
if self.InputData:
inputFiles = self.fileReport.getFiles()
for lfn in self.InputData:
if not lfn in inputFiles:
self.log.verbose('No status populated for input data %s, setting to "Unused"' % lfn)
result = self.fileReport.setFileStatus(int(self.productionID), lfn, 'Unused')
if not self.workflowStatus['OK'] or not self.stepStatus['OK']:
self.log.info('Workflow status = %s, step status = %s' %(self.workflowStatus['OK'], self.stepStatus['OK']))
inputFiles = self.fileReport.getFiles()
for lfn in inputFiles:
if inputFiles[lfn] != 'ApplicationCrash':
self.log.info('Forcing status to "Unused" due to workflow failure for: %s' % (lfn))
self.fileReport.setFileStatus(int(self.productionID), lfn, 'Unused')
else:
inputFiles = self.fileReport.getFiles()
if inputFiles:
self.log.info('Workflow status OK, setting input file status to Processed')
#.........这里部分代码省略.........
示例13: getRequestForSubRequest
# 需要导入模块: from DIRAC.RequestManagementSystem.Client.RequestContainer import RequestContainer [as 别名]
# 或者: from DIRAC.RequestManagementSystem.Client.RequestContainer.RequestContainer import setSourceComponent [as 别名]
def getRequestForSubRequest(self, itself, subRequestID ):
"""
Select Request given SubRequestID.
:param self: plugin reference
:param itself: patient reference for injection
:param int subRequestID: SubRequests.SubRequestID
:warn: function has to be injected to RequestDBMySQL instance
"""
## get RequestID
requestID = "SELECT RequestID FROM SubRequests WHERE SubRequestID = %s;" % str(subRequestID)
requestID = self._query( requestID )
if not requestID["OK"]:
return requestID
requestID = requestID["Value"][0]
## create RequestContainer
requestContainer = RequestContainer( init = False )
requestContainer.setRequestID( requestID )
## put some basic infos in
requestInfo = "SELECT RequestName, JobID, OwnerDN, OwnerGroup, DIRACSetup, SourceComponent, CreationTime, SubmissionTime, LastUpdate, Status "
requestInfo += "FROM Requests WHERE RequestID = %d;" % requestID
requestInfo = self._query( requestInfo )
if not requestInfo["OK"]:
return requestInfo
requestName, jobID, ownerDN, ownerGroup, diracSetup, sourceComponent, creationTime, submissionTime, lastUpdate, status = requestInfo['Value'][0]
requestContainer.setRequestName( requestName )
requestContainer.setJobID( jobID )
requestContainer.setOwnerDN( ownerDN )
requestContainer.setOwnerGroup( ownerGroup )
requestContainer.setDIRACSetup( diracSetup )
requestContainer.setSourceComponent( sourceComponent )
requestContainer.setCreationTime( str( creationTime ) )
requestContainer.setLastUpdate( str( lastUpdate ) )
requestContainer.setStatus( status )
## get sub-requests
subRequests = "SELECT SubRequestID, Status, RequestType, Operation, Arguments, ExecutionOrder, SourceSE, "
subRequests += "TargetSE, Catalogue, CreationTime, SubmissionTime, LastUpdate FROM SubRequests WHERE RequestID=%s;" % requestID
subRequests = self._query( subRequests )
if not subRequests["OK"]:
return subRequests
## loop over sub requests
for subRequestID, status, requestType, operation, arguments, executionOrder, sourceSE, targetSE, catalogue, creationTime, submissionTime, lastUpdate in subRequests["Value"]:
res = requestContainer.initiateSubRequest( requestType )
ind = res["Value"]
subRequestDict = { "Status" : status, "SubRequestID" : subRequestID, "Operation" : operation, "Arguments" : arguments,
"ExecutionOrder" : int( executionOrder ), "SourceSE" : sourceSE, "TargetSE" : targetSE,
"Catalogue" : catalogue, "CreationTime" : creationTime, "SubmissionTime" : submissionTime,
"LastUpdate" : lastUpdate }
res = requestContainer.setSubRequestAttributes( ind, requestType, subRequestDict )
if not res["OK"]:
return res
## get files for this subrequest
req = "SELECT FileID, LFN, Size, PFN, GUID, Md5, Addler, Attempt, Status FROM Files WHERE SubRequestID = %s ORDER BY FileID;" % str(subRequestID)
res = self._query( req )
if not res["OK"]:
return res
files = []
for fileID, lfn, size, pfn, guid, md5, addler, attempt, status in res["Value"]:
fileDict = { "FileID" : fileID, "LFN" : lfn, "Size" : size,
"PFN" : pfn, "GUID" : guid, "Md5" : md5,
"Addler" : addler, "Attempt" : attempt,
"Status" : status }
files.append( fileDict )
res = requestContainer.setSubRequestFiles( ind, requestType, files )
if not res["OK"]:
return res
## dump request to XML
res = requestContainer.toXML()
if not res["OK"]:
return res
requestString = res["Value"]
## return dictonary with all info in at least
return S_OK( {
"RequestName" : requestName,
"RequestString" : requestString,
"JobID" : jobID,
"RequestContainer" : requestContainer
} )