本文整理汇总了Python中DIRAC.Resources.Catalog.FileCatalogClient.FileCatalogClient.findDirectoriesByMetadata方法的典型用法代码示例。如果您正苦于以下问题:Python FileCatalogClient.findDirectoriesByMetadata方法的具体用法?Python FileCatalogClient.findDirectoriesByMetadata怎么用?Python FileCatalogClient.findDirectoriesByMetadata使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类DIRAC.Resources.Catalog.FileCatalogClient.FileCatalogClient
的用法示例。
在下文中一共展示了FileCatalogClient.findDirectoriesByMetadata方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: TransformationCleaningAgent
# 需要导入模块: from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient [as 别名]
# 或者: from DIRAC.Resources.Catalog.FileCatalogClient.FileCatalogClient import findDirectoriesByMetadata [as 别名]
#.........这里部分代码省略.........
timeStamp = 'LastUpdate' )
if res['OK']:
for transDict in res['Value']:
res = self.archiveTransformation( transDict['TransformationID'] )
if not res['OK']:
self.log.error( "Problems archiving transformation %s: %s" % ( transDict['TransformationID'],
res['Message'] ) )
else:
self.log.error( "Could not get the transformations" )
return S_OK()
#############################################################################
#
# Get the transformation directories for checking
#
def getTransformationDirectories( self, transID ):
''' get the directories for the supplied transformation from the transformation system
:param self: self reference
:param int transID: transformation ID
'''
directories = []
if 'TransformationDB' in self.directoryLocations:
res = self.transClient.getTransformationParameters( transID, ['OutputDirectories'] )
if not res['OK']:
self.log.error( "Failed to obtain transformation directories", res['Message'] )
return res
transDirectories = res['Value'].splitlines()
directories = self._addDirs( transID, transDirectories, directories )
if 'MetadataCatalog' in self.directoryLocations:
res = self.metadataClient.findDirectoriesByMetadata( {self.transfidmeta:transID} )
if not res['OK']:
self.log.error( "Failed to obtain metadata catalog directories", res['Message'] )
return res
transDirectories = res['Value']
directories = self._addDirs( transID, transDirectories, directories )
if not directories:
self.log.info( "No output directories found" )
directories = sortList( directories )
return S_OK( directories )
@classmethod
def _addDirs( self, transID, newDirs, existingDirs ):
''' append uniqe :newDirs: list to :existingDirs: list
:param self: self reference
:param int transID: transformationID
:param list newDirs: src list of paths
:param list existingDirs: dest list of paths
'''
for folder in newDirs:
transStr = str( transID ).zfill( 8 )
if re.search( transStr, str( folder ) ):
if not folder in existingDirs:
existingDirs.append( folder )
return existingDirs
#############################################################################
#
# These are the methods for performing the cleaning of catalogs and storage
#
示例2: ValidateOutputDataAgent
# 需要导入模块: from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient [as 别名]
# 或者: from DIRAC.Resources.Catalog.FileCatalogClient.FileCatalogClient import findDirectoriesByMetadata [as 别名]
#.........这里部分代码省略.........
transID = transDict['TransformationID']
gLogger.info( "-" * 40 )
res = self.integrityClient.getTransformationProblematics( int( transID ) )
if not res['OK']:
gLogger.error( "Failed to determine waiting problematics for transformation", res['Message'] )
elif not res['Value']:
res = self.transClient.setTransformationParameter( transID, 'Status', 'ValidatedOutput' )
if not res['OK']:
gLogger.error( "Failed to update status of transformation %s to ValidatedOutput" % ( transID ) )
else:
gLogger.info( "Updated status of transformation %s to ValidatedOutput" % ( transID ) )
else:
gLogger.info( "%d problematic files for transformation %s were found" % ( len( res['Value'] ), transID ) )
return
#############################################################################
#
# Get the transformation directories for checking
#
def getTransformationDirectories( self, transID ):
""" Get the directories for the supplied transformation from the transformation system
"""
directories = []
if 'TransformationDB' in self.directoryLocations:
res = self.transClient.getTransformationParameters( transID, ['OutputDirectories'] )
if not res['OK']:
gLogger.error( "Failed to obtain transformation directories", res['Message'] )
return res
transDirectories = res['Value'].splitlines()
directories = self._addDirs( transID, transDirectories, directories )
if 'MetadataCatalog' in self.directoryLocations:
res = self.fileCatalogClient.findDirectoriesByMetadata( {self.transfidmeta:transID} )
if not res['OK']:
gLogger.error( "Failed to obtain metadata catalog directories", res['Message'] )
return res
transDirectories = res['Value']
directories = self._addDirs( transID, transDirectories, directories )
if not directories:
gLogger.info( "No output directories found" )
directories = sorted( directories )
return S_OK( directories )
@staticmethod
def _addDirs( transID, newDirs, existingDirs ):
for nDir in newDirs:
transStr = str( transID ).zfill( 8 )
if re.search( transStr, nDir ):
if not nDir in existingDirs:
existingDirs.append( nDir )
return existingDirs
#############################################################################
def checkTransformationIntegrity( self, transID ):
""" This method contains the real work
"""
gLogger.info( "-" * 40 )
gLogger.info( "Checking the integrity of transformation %s" % transID )
gLogger.info( "-" * 40 )
res = self.getTransformationDirectories( transID )
if not res['OK']:
return res
directories = res['Value']
if not directories:
示例3: TransformationCleaningAgent
# 需要导入模块: from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient [as 别名]
# 或者: from DIRAC.Resources.Catalog.FileCatalogClient.FileCatalogClient import findDirectoriesByMetadata [as 别名]
class TransformationCleaningAgent( AgentModule ):
#############################################################################
def initialize( self ):
"""Sets defaults """
self.replicaManager = ReplicaManager()
self.transClient = TransformationClient()
self.wmsClient = WMSClient()
self.requestClient = RequestClient()
self.metadataClient = FileCatalogClient()
self.storageUsageClient = StorageUsageClient()
# This sets the Default Proxy to used as that defined under
# /Operations/Shifter/DataManager
# the shifterProxy option in the Configuration can be used to change this default.
self.am_setOption( 'shifterProxy', 'DataManager' )
self.transformationTypes = sortList( self.am_getOption( 'TransformationTypes', ['MCSimulation', 'DataReconstruction', 'DataStripping', 'MCStripping', 'Merge', 'Replication'] ) )
gLogger.info( "Will consider the following transformation types: %s" % str( self.transformationTypes ) )
self.directoryLocations = sortList( self.am_getOption( 'DirectoryLocations', ['TransformationDB', 'StorageUsage', 'MetadataCatalog'] ) )
gLogger.info( "Will search for directories in the following locations: %s" % str( self.directoryLocations ) )
self.transfidmeta = self.am_getOption( 'TransfIDMeta', "TransformationID" )
gLogger.info( "Will use %s as metadata tag name for TransformationID" % self.transfidmeta )
self.archiveAfter = self.am_getOption( 'ArchiveAfter', 7 ) # days
gLogger.info( "Will archive Completed transformations after %d days" % self.archiveAfter )
self.activeStorages = sortList( self.am_getOption( 'ActiveSEs', [] ) )
gLogger.info( "Will check the following storage elements: %s" % str( self.activeStorages ) )
self.logSE = self.am_getOption( 'TransformationLogSE', 'LogSE' )
gLogger.info( "Will remove logs found on storage element: %s" % self.logSE )
return S_OK()
#############################################################################
def execute( self ):
""" The TransformationCleaningAgent execution method.
"""
self.enableFlag = self.am_getOption( 'EnableFlag', 'True' )
if not self.enableFlag == 'True':
self.log.info( 'TransformationCleaningAgent is disabled by configuration option %s/EnableFlag' % ( self.section ) )
return S_OK( 'Disabled via CS flag' )
# Obtain the transformations in Cleaning status and remove any mention of the jobs/files
res = self.transClient.getTransformations( {'Status':'Cleaning', 'Type':self.transformationTypes} )
if res['OK']:
for transDict in res['Value']:
self.cleanTransformation( transDict['TransformationID'] )
# Obtain the transformations in RemovingFiles status and (wait for it) removes the output files
res = self.transClient.getTransformations( {'Status':'RemovingFiles', 'Type':self.transformationTypes} )
if res['OK']:
for transDict in res['Value']:
self.removeTransformationOutput( transDict['TransformationID'] )
# Obtain the transformations in Completed status and archive if inactive for X days
olderThanTime = datetime.utcnow() - timedelta( days = self.archiveAfter )
res = self.transClient.getTransformations( {'Status':'Completed', 'Type':self.transformationTypes}, older = olderThanTime )
if res['OK']:
for transDict in res['Value']:
self.archiveTransformation( transDict['TransformationID'] )
return S_OK()
#############################################################################
#
# Get the transformation directories for checking
#
def getTransformationDirectories( self, transID ):
""" Get the directories for the supplied transformation from the transformation system """
directories = []
if 'TransformationDB' in self.directoryLocations:
res = self.transClient.getTransformationParameters( transID, ['OutputDirectories'] )
if not res['OK']:
gLogger.error( "Failed to obtain transformation directories", res['Message'] )
return res
transDirectories = res['Value'].splitlines()
directories = self.__addDirs( transID, transDirectories, directories )
if 'StorageUsage' in self.directoryLocations:
res = self.storageUsageClient.getStorageDirectories( '', '', transID, [] )
if not res['OK']:
gLogger.error( "Failed to obtain storage usage directories", res['Message'] )
return res
transDirectories = res['Value']
directories = self.__addDirs( transID, transDirectories, directories )
if 'MetadataCatalog' in self.directoryLocations:
res = self.metadataClient.findDirectoriesByMetadata( {self.transfidmeta:transID} )
if not res['OK']:
gLogger.error( "Failed to obtain metadata catalog directories", res['Message'] )
return res
transDirectories = res['Value']
directories = self.__addDirs( transID, transDirectories, directories )
if not directories:
gLogger.info( "No output directories found" )
directories = sortList( directories )
return S_OK( directories )
def __addDirs( self, transID, newDirs, existingDirs ):
for dir in newDirs:
transStr = str( transID ).zfill( 8 )
#.........这里部分代码省略.........
示例4: TransformationCleaningAgent
# 需要导入模块: from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient [as 别名]
# 或者: from DIRAC.Resources.Catalog.FileCatalogClient.FileCatalogClient import findDirectoriesByMetadata [as 别名]
#.........这里部分代码省略.........
#############################################################################
#
# Get the transformation directories for checking
#
def getTransformationDirectories(self, transID):
""" get the directories for the supplied transformation from the transformation system.
These directories are used by removeTransformationOutput and cleanTransformation for removing output.
:param self: self reference
:param int transID: transformation ID
"""
self.log.verbose("Cleaning Transformation directories of transformation %d" % transID)
directories = []
if 'TransformationDB' in self.directoryLocations:
res = self.transClient.getTransformationParameters(transID, ['OutputDirectories'])
if not res['OK']:
self.log.error("Failed to obtain transformation directories", res['Message'])
return res
transDirectories = []
if res['Value']:
if not isinstance(res['Value'], list):
try:
transDirectories = ast.literal_eval(res['Value'])
except BaseException:
# It can happen if the res['Value'] is '/a/b/c' instead of '["/a/b/c"]'
transDirectories.append(res['Value'])
else:
transDirectories = res['Value']
directories = self._addDirs(transID, transDirectories, directories)
if 'MetadataCatalog' in self.directoryLocations:
res = self.metadataClient.findDirectoriesByMetadata({self.transfidmeta: transID})
if not res['OK']:
self.log.error("Failed to obtain metadata catalog directories", res['Message'])
return res
transDirectories = res['Value']
directories = self._addDirs(transID, transDirectories, directories)
if not directories:
self.log.info("No output directories found")
directories = sorted(directories)
return S_OK(directories)
@classmethod
def _addDirs(cls, transID, newDirs, existingDirs):
""" append unique :newDirs: list to :existingDirs: list
:param self: self reference
:param int transID: transformationID
:param list newDirs: src list of paths
:param list existingDirs: dest list of paths
"""
for folder in newDirs:
transStr = str(transID).zfill(8)
if re.search(transStr, str(folder)):
if folder not in existingDirs:
existingDirs.append(os.path.normpath(folder))
return existingDirs
#############################################################################
#
# These are the methods for performing the cleaning of catalogs and storage
#
示例5: ProductionJob
# 需要导入模块: from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient [as 别名]
# 或者: from DIRAC.Resources.Catalog.FileCatalogClient.FileCatalogClient import findDirectoriesByMetadata [as 别名]
#.........这里部分代码省略.........
self.finalMetaDict[path] = {"DetectorType" : application.detectortype}
path += '/'
elif self.detector:
path += self.detector
self.finalMetaDict[path] = {"DetectorType" : self.detector}
path += '/'
if not application.datatype and self.datatype:
application.datatype = self.datatype
path += application.datatype
self.finalMetaDict[path] = {'Datatype' : application.datatype}
LOG.info("Will store the files under", "%s" % path)
self.finalpaths.append(path)
extension = 'stdhep'
if application.datatype in ['SIM', 'REC']:
extension = 'slcio'
fname = self.basename + "_%s" % (application.datatype.lower()) + "." + extension
application.setOutputFile(fname, path)
self.basepath = path
res = self._updateProdParameters(application)
if not res['OK']:
return res
self.checked = True
return S_OK()
def _updateProdParameters(self, application):
""" Update the prod parameters stored in the production parameters visible from the web
"""
try:
self.prodparameters.update(application.prodparameters)
except Exception as x:
return S_ERROR("Exception: %r" % x )
if hasattr( application, 'extraCLIArguments' ) and application.extraCLIArguments:
self.prodparameters['extraCLIArguments'] = repr(application.extraCLIArguments)
return S_OK()
def _jobSpecificModules(self, application, step):
return application._prodjobmodules(step)
def getEnergyPath(self):
"""returns the energy path 250gev or 3tev or 1.4tev etc."""
energy = Decimal(str(self.energy))
tD = Decimal('1000.0')
unit = 'gev' if energy < tD else 'tev'
energy = energy if energy < tD else energy/tD
energyPath = ("%1.2f" % energy).rstrip('0').rstrip('.')
energyPath = energyPath+unit+'/'
LOG.info("Energy path is: ", energyPath)
return energyPath
def _checkMetaKeys( self, metakeys, extendFileMeta=False ):
""" check if metadata keys are allowed to be metadata
:param list metakeys: metadata keys for production metadata
:param bool extendFileMeta: also use FileMetaFields for checking meta keys
:returns: S_OK, S_ERROR
"""
res = self.fc.getMetadataFields()
if not res['OK']:
LOG.error("Could not contact File Catalog")
return S_ERROR("Could not contact File Catalog")
metaFCkeys = res['Value']['DirectoryMetaFields'].keys()
if extendFileMeta:
metaFCkeys.extend( res['Value']['FileMetaFields'].keys() )
for key in metakeys:
for meta in metaFCkeys:
if meta != key and meta.lower() == key.lower():
return self._reportError("Key syntax error %r, should be %r" % (key, meta), name = self.__class__.__name__)
if key not in metaFCkeys:
return self._reportError("Key %r not found in metadata keys, allowed are %r" % (key, metaFCkeys))
return S_OK()
def _checkFindDirectories( self, metadata ):
""" find directories by metadata and check that there are directories found
:param dict metadata: metadata dictionary
:returns: S_OK, S_ERROR
"""
res = self.fc.findDirectoriesByMetadata(metadata)
if not res['OK']:
return self._reportError("Error looking up the catalog for available directories")
elif len(res['Value']) < 1:
return self._reportError('Could not find any directories corresponding to the query issued')
return res
def setReconstructionBasePaths( self, recPath, dstPath ):
""" set the output Base paths for the reconstruction REC and DST files """
self._recBasePaths['REC'] = recPath
self._recBasePaths['DST'] = dstPath
示例6: dexit
# 需要导入模块: from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient [as 别名]
# 或者: from DIRAC.Resources.Catalog.FileCatalogClient.FileCatalogClient import findDirectoriesByMetadata [as 别名]
info = []
if clip.prodid:
res = tc.getTransformation(clip.prodid)
if not res['OK']:
gLogger.error(res['Message'])
dexit(1)
trans = res['Value']
res = tc.getTransformationInputDataQuery( clip.prodid )
if res['OK']:
trans['InputDataQuery'] = res['Value']
res = tc.getAdditionalParameters ( clip.prodid )
if res['OK']:
trans['AddParams'] = res['Value']
#do something with transf
res = fc.findDirectoriesByMetadata({'ProdID':clip.prodid})
if res['OK']:
if len(res['Value'].values()):
gLogger.verbose("Found some directory matching the metadata")
for dirs in res['Value'].values():
res = fc.getDirectoryMetadata(dirs)
if res['OK']:
fmeta.update(res['Value'])
else:
gLogger.warn("Failed to get dir metadata")
res = fc.listDirectory(dirs)
if not res['OK']:
continue
content = res['Value']['Successful'][dirs]
if content["Files"]:
for f_ex in content["Files"].keys():
示例7: _getInfo
# 需要导入模块: from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient [as 别名]
# 或者: from DIRAC.Resources.Catalog.FileCatalogClient.FileCatalogClient import findDirectoriesByMetadata [as 别名]
def _getInfo():
"""gets info about transformation"""
clip = _Params()
clip.registerSwitches()
Script.parseCommandLine()
if not clip.prodid and not clip.filename:
Script.showHelp()
dexit(1)
from DIRAC import gLogger
import os
from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient
tc = TransformationClient()
from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient
fc = FileCatalogClient()
fmeta = {}
trans = None
info = []
if clip.prodid:
res = tc.getTransformation(clip.prodid)
if not res['OK']:
gLogger.error(res['Message'])
dexit(1)
trans = res['Value']
res = tc.getTransformationInputDataQuery( clip.prodid )
if res['OK']:
trans['InputDataQuery'] = res['Value']
res = tc.getAdditionalParameters ( clip.prodid )
if res['OK']:
trans['AddParams'] = res['Value']
#do something with transf
res1 = fc.findDirectoriesByMetadata({'ProdID':clip.prodid})
if res1['OK'] and len(res1['Value'].values()):
gLogger.verbose("Found %i directory matching the metadata" % len(res1['Value'].values()) )
for dirs in res1['Value'].values():
res = fc.getDirectoryUserMetadata(dirs)
if res['OK']:
fmeta.update(res['Value'])
else:
gLogger.error("Failed to get metadata for %s, SKIPPING" % dirs)
continue
res = fc.listDirectory(dirs)
if not res['OK']:
continue
content = res['Value']['Successful'][dirs]
if content["Files"]:
for f_ex in content["Files"].keys():
res = fc.getFileUserMetadata(f_ex)
if res['OK']:
fmeta.update(res['Value'])
break
#here we have trans and fmeta
info.append("")
info.append("Production %s has the following parameters:" % trans['TransformationID'])
info.extend(_createTransfoInfo(trans))
if fmeta:
info.append('The files created by this production have the following metadata:')
info.extend(_createFileInfo(fmeta))
info.append("It's possible that some meta data was not brought back,")
info.append("in particular file level metadata, so check some individual files")
if clip.filename:
pid = ""
if clip.filename.count("/"):
fpath = os.path.dirname(clip.filename)
res = fc.getDirectoryUserMetadata(fpath)
if not res['OK']:
gLogger.error(res['Message'])
dexit(0)
fmeta.update(res['Value'])
res = fc.getFileUserMetadata(clip.filename)
if not res['OK']:
gLogger.error(res['Message'])
dexit(1)
fmeta.update(res['Value'])
if 'ProdID' in fmeta:
pid = str(fmeta['ProdID'])
res = fc.getFileAncestors([clip.filename], 1)
if res["OK"]:
for dummy_lfn,ancestorsDict in res['Value']['Successful'].items():
if ancestorsDict.keys():
fmeta["Ancestors"] = ancestorsDict.keys()
res = fc.getFileDescendents([clip.filename], 1)
if res["OK"]:
for dummy_lfn,descendDict in res['Value']['Successful'].items():
if descendDict.keys():
fmeta['Descendants'] = descendDict.keys()
else:
ext = clip.filename.split(".")[-1]
fitems = []
for i in clip.filename.split('.')[:-1]:
fitems.extend(i.split('_'))
pid = ''
if ext == 'stdhep':
#.........这里部分代码省略.........
示例8: setInputDataQuery
# 需要导入模块: from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient [as 别名]
# 或者: from DIRAC.Resources.Catalog.FileCatalogClient.FileCatalogClient import findDirectoriesByMetadata [as 别名]
def setInputDataQuery(self, metadata):
""" Define the input data query needed, also get from the data the meta info requested to build the path
"""
metakeys = metadata.keys()
client = FileCatalogClient()
res = client.getMetadataFields()
if not res['OK']:
print "Could not contact File Catalog"
return S_ERROR()
metaFCkeys = res['Value']['DirectoryMetaFields'].keys()
for key in metakeys:
for meta in metaFCkeys:
if meta != key:
if meta.lower() == key.lower():
return self._reportError("Key syntax error %s, should be %s" % (key, meta), name = 'SIDProduction')
if not metaFCkeys.count(key):
return self._reportError("Key %s not found in metadata keys, allowed are %s" % (key, metaFCkeys))
#if not metadata.has_key("ProdID"):
# return self._reportError("Input metadata dictionary must contain at least a key 'ProdID' as reference")
res = client.findDirectoriesByMetadata(metadata)
if not res['OK']:
return self._reportError("Error looking up the catalog for available directories")
elif len(res['Value']) < 1:
return self._reportError('Could not find any directory corresponding to the query issued')
dirs = res['Value'].values()
for mdir in dirs:
res = self.fc.getDirectoryMetadata(mdir)
if not res['OK']:
return self._reportError("Error looking up the catalog for directory metadata")
compatmeta = res['Value']
compatmeta.update(metadata)
if compatmeta.has_key('EvtType'):
if type(compatmeta['EvtType']) in types.StringTypes:
self.evttype = compatmeta['EvtType']
if type(compatmeta['EvtType']) == type([]):
self.evttype = compatmeta['EvtType'][0]
else:
return self._reportError("EvtType is not in the metadata, it has to be!")
if compatmeta.has_key('NumberOfEvents'):
if type(compatmeta['NumberOfEvents']) == type([]):
self.nbevts = int(compatmeta['NumberOfEvents'][0])
else:
self.nbevts = int(compatmeta['NumberOfEvents'])
if compatmeta.has_key("Energy"):
if type(compatmeta["Energy"]) in types.StringTypes:
self.energycat = compatmeta["Energy"]
if type(compatmeta["Energy"]) == type([]):
self.energycat = compatmeta["Energy"][0]
if compatmeta.has_key("Polarisation"):
if type(compatmeta["Polarisation"]) in types.StringTypes:
self.polarization = compatmeta["Polarisation"]
if type(compatmeta["Polarisation"]) == type([]):
self.polarization = compatmeta["Polarisation"][0]
if compatmeta.has_key("MachineParams"):
if type(compatmeta["MachineParams"]) in types.StringTypes:
self.machineTuning = compatmeta["MachineParams"]
if type(compatmeta["MachineParams"]) == type([]):
self.machineparams = compatmeta["MachineParams"][0]
gendata = False
if compatmeta.has_key('Datatype'):
if type(compatmeta['Datatype']) in types.StringTypes:
self.datatype = compatmeta['Datatype']
if compatmeta['Datatype'] == 'GEN':
gendata = True
if type(compatmeta['Datatype']) == type([]):
self.datatype = compatmeta['Datatype'][0]
if compatmeta['Datatype'][0] == 'GEN':
gendata = True
if compatmeta.has_key("DetectorModel") and not gendata:
if type(compatmeta["DetectorModel"]) in types.StringTypes:
self.detector = compatmeta["DetectorModel"]
if type(compatmeta["DetectorModel"]) == type([]):
self.detector = compatmeta["DetectorModel"][0]
self.basename = self.evttype+"_"+self.polarization
self.energy = Decimal(self.energycat)
self.inputBKSelection = metadata
self.prodparameters['nbevts'] = self.nbevts
self.prodparameters["FCInputQuery"] = self.inputBKSelection
self.inputdataquery = True
return S_OK()
示例9: TransformationCleaningAgent
# 需要导入模块: from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient [as 别名]
# 或者: from DIRAC.Resources.Catalog.FileCatalogClient.FileCatalogClient import findDirectoriesByMetadata [as 别名]
#.........这里部分代码省略.........
res = self.archiveTransformation( transDict['TransformationID'] )
if not res['OK']:
self.log.error( "Problems archiving transformation %s: %s" % ( transDict['TransformationID'],
res['Message'] ) )
else:
self.log.error( "Could not get the transformations" )
return S_OK()
#############################################################################
#
# Get the transformation directories for checking
#
def getTransformationDirectories( self, transID ):
""" get the directories for the supplied transformation from the transformation system
:param self: self reference
:param int transID: transformation ID
"""
directories = []
if 'TransformationDB' in self.directoryLocations:
res = self.transClient.getTransformationParameters( transID, ['OutputDirectories'] )
if not res['OK']:
self.log.error( "Failed to obtain transformation directories", res['Message'] )
return res
if type( res['Value'] ) != type( [] ):
transDirectories = ast.literal_eval( res['Value'] )
else:
transDirectories = res['Value']
directories = self._addDirs( transID, transDirectories, directories )
if 'MetadataCatalog' in self.directoryLocations:
res = self.metadataClient.findDirectoriesByMetadata( {self.transfidmeta:transID} )
if not res['OK']:
self.log.error( "Failed to obtain metadata catalog directories", res['Message'] )
return res
transDirectories = res['Value']
directories = self._addDirs( transID, transDirectories, directories )
if not directories:
self.log.info( "No output directories found" )
directories = sorted( directories )
return S_OK( directories )
# FIXME If a classmethod, should it not have cls instead of self?
@classmethod
def _addDirs( self, transID, newDirs, existingDirs ):
""" append uniqe :newDirs: list to :existingDirs: list
:param self: self reference
:param int transID: transformationID
:param list newDirs: src list of paths
:param list existingDirs: dest list of paths
"""
for folder in newDirs:
transStr = str( transID ).zfill( 8 )
if re.search( transStr, str( folder ) ):
if not folder in existingDirs:
existingDirs.append( folder )
return existingDirs
#############################################################################
#
# These are the methods for performing the cleaning of catalogs and storage
#