本文整理汇总了Python中DIRAC.Resources.Catalog.FileCatalogClient.FileCatalogClient类的典型用法代码示例。如果您正苦于以下问题:Python FileCatalogClient类的具体用法?Python FileCatalogClient怎么用?Python FileCatalogClient使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了FileCatalogClient类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: getFileInfo
def getFileInfo(lfn):
""" Retrieve the file info
"""
from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient
from DIRAC.Core.Utilities import DEncode
from DIRAC import gLogger
fc = FileCatalogClient()
lumi = 0
nbevts = 0
res = fc.getFileUserMetadata(lfn)
if not res['OK']:
gLogger.error("Failed to get metadata of %s" % lfn)
return (0,0,{})
if res['Value'].has_key('Luminosity'):
lumi += float(res['Value']['Luminosity'])
addinfo = {}
if 'AdditionalInfo' in res['Value']:
addinfo = res['Value']['AdditionalInfo']
if addinfo.count("{"):
addinfo = eval(addinfo)
else:
addinfo = DEncode.decode(addinfo)[0]
if "NumberOfEvents" in res['Value'].keys():
nbevts += int(res['Value']['NumberOfEvents'])
return (float(lumi),int(nbevts),addinfo)
示例2: _getProdInfoFromIDs
def _getProdInfoFromIDs(self):
"""get the processName, energy and eventsPerJob from the MetaData catalog
:raises: AttributeError if some of the information cannot be found
:returns: None
"""
if not self.prodIDs:
raise AttributeError("No prodIDs defined")
self.eventsPerJobs = []
self.processes = []
self.energies = []
from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient
from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient
trc = TransformationClient()
fc = FileCatalogClient()
for prodID in self.prodIDs:
gLogger.notice("Getting information for %s" % prodID)
tRes = trc.getTransformation(str(prodID))
if not tRes['OK']:
raise AttributeError("No prodInfo found for %s" % prodID)
self.eventsPerJobs.append(int(tRes['Value']['EventsPerTask']))
lfnRes = fc.findFilesByMetadata({'ProdID': prodID})
if not lfnRes['OK'] or not lfnRes['Value']:
raise AttributeError("Could not find files for %s: %s " % (prodID, lfnRes.get('Message', lfnRes.get('Value'))))
path = os.path.dirname(lfnRes['Value'][0])
fileRes = fc.getDirectoryUserMetadata(path)
self.processes.append(fileRes['Value']['EvtType'])
self.energies.append(fileRes['Value']['Energy'])
gLogger.notice("Found (Evts,Type,Energy): %s %s %s " %
(self.eventsPerJobs[-1], self.processes[-1], self.energies[-1]))
示例3: registerInputData
def registerInputData(filepath, size, prefix = '/cepc/lustre-ro'):
infoDict = {}
infoDict['PFN'] = ''
infoDict['Size'] = size
infoDict['SE'] = 'IHEP-STORM'
infoDict['GUID'] = commands.getoutput('uuidgen')
infoDict['Checksum'] = ''
fileDict = {}
lfn = prefix + filepath
fileDict[lfn] = infoDict
fcc = FileCatalogClient('DataManagement/FileCatalog')
rm = ReplicaManager()
result = {}
result['lfn'] = lfn
result['is_registered'] = False
#查询
for repeatTimes in range(10):
is_registered = fcc.isFile(lfn)
if (is_registered['OK'] and is_registered['Value']['Successful'].has_key(lfn)):
break
# else:
# continue
if not is_registered['OK']:#查询失败
result['is_registered'] = 'querry error. unkown'
print 'Failed to query %s in DFC. Error message is %s' %(lfn, is_registered['Message'])
if is_registered['Value']['Successful'][lfn]:#已注册
result['is_registered'] = True
for repeatTimes in range(10):
is_removed = rm.removeCatalogFile(lfn)#删除
if (is_removed['OK'] and is_removed['Value']['Successful'][lfn]['FileCatalog']):
result['is_removed'] = True
break
# else:
# continue
if not is_removed['OK']:#删除失败
result['is_removed'] = 'remove error'
print 'Failed to remove %s from DFC.' %lfn
#add
for repeatTimes in range(10):
is_added = fcc.addFile(fileDict)#add/register
if (is_added['OK'] and is_added['Value']['Successful'][lfn]):
result['OK'] = True
return result
# else:
# continue
if not is_added['OK']:#add unsuccessfully
result['OK'] = False
result['Message'] = is_added['Message']
elif is_added['Value']['Failed']:
result['OK'] = False
result['Message'] = 'Failed to add file' + lfn
return result
示例4: __retrieveAllRemoteAttributes
def __retrieveAllRemoteAttributes(self, lfnList):
fc = FileCatalogClient('DataManagement/FileCatalog')
result = fc.getFileMetadata(lfnList)
if not result['OK']:
raise Exception('getFileMetadata failed: %s' % result['Message'])
attributes = {}
for lfn in lfnList:
if lfn in result['Value']['Successful']:
attributes[lfn] = self.__parseMetadata(result['Value']['Successful'][lfn])
return attributes
示例5: eventTypeCheck
def eventTypeCheck(eventType):
entries = []
client = FileCatalogClient()
dir = '/BES3/EventTypeList'
result = client.listDirectory(dir)
if result['OK']:
for i,v in enumerate(result['Value']['Successful'][dir]['SubDirs']):
if eventType == v.split('/')[2]:
return True
return False
示例6: getFiles
def getFiles():
""" Get the lfns: This is not the point of this example, so keep it out of the main
"""
fc = FileCatalogClient()
meta = {}
meta['ProdID'] = 1543
meta["Datatype"] = "DST"
result = fc.findFilesByMetadata(meta, "/ilc/prod/clic")
if not result["OK"]:
gLogger.error(result["Message"])
dexit(1)
return result['Value']
示例7: _findInFC
def _findInFC():
"""Find something in the FileCatalog"""
from DIRAC import exit as dexit
clip = _Params()
clip.registerSwitches()
Script.parseCommandLine()
args = Script.getPositionalArgs()
if len(args)<2:
Script.showHelp('ERROR: Not enough arguments')
gLogger.error("Run %s --help" % SCRIPTNAME )
dexit(1)
path = args[0]
if path == '.':
path = '/'
## Check that the first argument is not a MetaQuery
if any( op in path for op in OPLIST ):
gLogger.error("ERROR: Path '%s' is not a valid path! The first argument must be a path" % path)
gLogger.error("Run %s --help" % SCRIPTNAME )
dexit(1)
gLogger.verbose("Path:", path)
metaQuery = args[1:]
metaDataDict = _createQueryDict(metaQuery)
gLogger.verbose("Query:",str(metaDataDict))
if not metaDataDict:
gLogger.info("No query")
dexit(1)
fc = FileCatalogClient()
res = fc.findFilesByMetadata(metaDataDict, path)
if not res['OK']:
gLogger.error(res['Message'])
dexit(1)
if not res['Value']:
gLogger.notice("No files found")
listToPrint = None
if clip.printOnlyDirectories:
listToPrint = set( "/".join(fullpath.split("/")[:-1]) for fullpath in res['Value'] )
else:
listToPrint = res['Value']
for entry in listToPrint:
print entry
dexit(0)
示例8: initialize
def initialize( self ):
"""Sets defaults """
self.replicaManager = ReplicaManager()
self.transClient = TransformationClient()
self.wmsClient = WMSClient()
self.requestClient = RequestClient()
self.metadataClient = FileCatalogClient()
self.storageUsageClient = StorageUsageClient()
# This sets the Default Proxy to used as that defined under
# /Operations/Shifter/DataManager
# the shifterProxy option in the Configuration can be used to change this default.
self.am_setOption( 'shifterProxy', 'DataManager' )
self.transformationTypes = sortList( self.am_getOption( 'TransformationTypes', ['MCSimulation', 'DataReconstruction', 'DataStripping', 'MCStripping', 'Merge', 'Replication'] ) )
gLogger.info( "Will consider the following transformation types: %s" % str( self.transformationTypes ) )
self.directoryLocations = sortList( self.am_getOption( 'DirectoryLocations', ['TransformationDB', 'StorageUsage', 'MetadataCatalog'] ) )
gLogger.info( "Will search for directories in the following locations: %s" % str( self.directoryLocations ) )
self.transfidmeta = self.am_getOption( 'TransfIDMeta', "TransformationID" )
gLogger.info( "Will use %s as metadata tag name for TransformationID" % self.transfidmeta )
self.archiveAfter = self.am_getOption( 'ArchiveAfter', 7 ) # days
gLogger.info( "Will archive Completed transformations after %d days" % self.archiveAfter )
self.activeStorages = sortList( self.am_getOption( 'ActiveSEs', [] ) )
gLogger.info( "Will check the following storage elements: %s" % str( self.activeStorages ) )
self.logSE = self.am_getOption( 'TransformationLogSE', 'LogSE' )
gLogger.info( "Will remove logs found on storage element: %s" % self.logSE )
return S_OK()
示例9: __init__
def __init__(self):
super(OverlayInput, self).__init__()
self.enable = True
self.STEP_NUMBER = ''
self.log = gLogger.getSubLogger( "OverlayInput" )
self.applicationName = 'OverlayInput'
self.curdir = os.getcwd()
self.applicationLog = ''
self.printoutflag = ''
self.prodid = 0
self.detector = '' ##needed for backward compatibility
self.detectormodel = ""
self.energytouse = ''
self.energy = 0
self.nbofeventsperfile = 100
self.lfns = []
self.nbfilestoget = 0
self.BkgEvtType = 'gghad'
self.BXOverlay = 0
self.ggtohadint = 3.2
self.nbsigeventsperfile = 0
self.nbinputsigfile = 1
self.NbSigEvtsPerJob = 0
self.rm = ReplicaManager()
self.fc = FileCatalogClient()
self.site = DIRAC.siteName()
self.machine = 'clic_cdr'
示例10: __init__
def __init__( self, *args, **kwargs ):
''' c'tor
'''
AgentModule.__init__( self, *args, **kwargs )
# # replica manager
self.replicaManager = ReplicaManager()
# # transformation client
self.transClient = TransformationClient()
# # wms client
self.wmsClient = WMSClient()
# # request client
self.requestClient = RequestClient()
# # file catalog clinet
self.metadataClient = FileCatalogClient()
# # placeholders for CS options
# # transformations types
self.transformationTypes = None
# # directory locations
self.directoryLocations = None
# # transformation metadata
self.transfidmeta = None
# # archive periof in days
self.archiveAfter = None
# # active SEs
self.activeStorages = None
# # transformation log SEs
self.logSE = None
# # enable/disable execution
self.enableFlag = None
示例11: __init__
def __init__(self, script = None):
super(ProductionJob, self).__init__( script )
self.prodVersion = __RCSID__
self.dryrun = False
self.created = False
self.checked = False
self.call_finalization = False
self.finalsdict = {}
self.transfid = 0
self.type = 'Production'
self.csSection = '/Production/Defaults'
self.ops = Operations()
self.fc = FileCatalogClient()
self.trc = TransformationClient()
self.defaultProdID = '12345'
self.defaultProdJobID = '12345'
self.jobFileGroupSize = 1
self.nbtasks = 1
self.slicesize =0
self.basename = ''
self.basepath = self.ops.getValue('/Production/CLIC/BasePath','/ilc/prod/clic/')
self.evttype = ''
self.datatype = ''
self.energycat = ''
self.detector = ''
self.currtrans = None
self.description = ''
self.finalpaths = []
self.finalMetaDict = defaultdict( dict )
self.prodMetaDict = {}
self.finalMetaDictNonSearch = {}
self.metadict_external = {}
self.outputStorage = ''
self.proxyinfo = getProxyInfo()
self.inputdataquery = False
self.inputBKSelection = {}
self.plugin = 'Standard'
self.prodGroup = ''
self.prodTypes = ['MCGeneration', 'MCSimulation', 'Test', 'MCReconstruction',
'MCReconstruction_Overlay', 'Merge', 'Split',
'MCGeneration_ILD',
'MCSimulation_ILD',
'MCReconstruction_ILD',
'MCReconstruction_Overlay_ILD',
'Split_ILD'
]
self.prodparameters = {}
self.prodparameters['NbInputFiles'] = 1
self.prodparameters['nbevts'] = 0
#self.prodparameters["SWPackages"] = ''
self._addParameter(self.workflow, "IS_PROD", 'JDL', True, "This job is a production job")
if not script:
self.__setDefaults()
self._recBasePaths = {}
self.maxFCFoldersToCheck = 100000
示例12: __init__
def __init__(self, fcClient=False):
"""Internal initialization of Badger API.
"""
if not fcClient:
_fcType = "DataManagement/FileCatalog"
self.client = FileCatalogClient(_fcType)
else:
self.client = fcClient
示例13: __init__
def __init__(self):
super(DBDGenRegisterOutputData).__init__()
self.version = "DBDGenRegisterOutputData v1"
self.commandTimeOut = 10 * 60
self.enable = True
self.fcc = FileCatalogClient()
self.nbofevents = 0
self.prodOutputLFNs = []
示例14: runOne
def runOne ():
fc = FileCatalogClient()
for mokkaJobNumber in range(39,45):
for eventType in ['Z_uds','Photon','Kaon0L','Muon']:
energies = []
if eventType == 'Z_uds':
energies = [91,200,360,500,750,1000,2000,3000]
elif eventType == 'Photon':
energies = [10]
elif eventType == 'Muon':
energies = [10]
elif eventType == 'Kaon0L':
energies = [1,2,3,4,5,6,7,8,9,10,15,20,25,30,35,40,45,50]
for energy in energies:
path = '/ilc/user/s/sgreen/0804/HCalStudiessg568/GJN' + str(mokkaJobNumber) + '/' + eventType + '/' + str(energy) + 'GeV'
pathdict = {'path':path, 'meta':{'MokkaJobNumber':mokkaJobNumber, 'Type':eventType, 'Energy':energy}}
res = fc.setMetadata(pathdict['path'], pathdict['meta'])
return
示例15: compare
def compare(attributes,input):
error_list = {}
inDFC = {}
#print "input keys:",input.keys()
keys = sorted(input.keys())
#print "keys after being sorted:",keys
expNum = _get_exp_num(attributes["expNum"])
eventType = _get_event_type(attributes["eventType"])
#dir = "/BES3/File/"+attributes["resonance"]+"/"+attributes["bossVer"]
dir = "/BES3/File/"+attributes["resonance"]+"/"+attributes["bossVer"]
if attributes["streamId"]=="stream0":
dir = dir+ "/data"+"/"+eventType + "/"+expNum+"/"+attributes["LFN"]
else:
dir = dir+"/mc"+"/"+eventType+"/"+expNum+"/"+attributes["streamId"]+"/"+attributes["LFN"]
client=FileCatalogClient()
result = client.getFileMetadata(dir)
file_exist = len(result['Value']['Successful'])
if file_exist == 0:
print "this file does't exist in DFC",attributes['LFN']
else:
result = client.getDirectoryMetadata(dir)
if result["OK"]:
inDFC["resonance"] = result["Value"]["resonance"]
inDFC["streamId"] = result["Value"]["streamId"]
inDFC["eventType"] = result["Value"]["eventType"]
inDFC["bossVer"] = result["Value"]["bossVer"]
inDFC["expNum"] = result["Value"]["expNum"]
for key in keys:
if input[key] != inDFC[key]:
error_list[key] = inDFC[key]
if error_list is not None:
return error_list