本文整理汇总了Python中WMCore.Services.UserFileCache.UserFileCache.UserFileCache类的典型用法代码示例。如果您正苦于以下问题:Python UserFileCache类的具体用法?Python UserFileCache怎么用?Python UserFileCache使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了UserFileCache类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: executeInternal
def executeInternal(self, *args, **kw):
inputFiles = args[0][2]
splitterResult = args[0][3][0]
cwd = os.getcwd()
try:
os.chdir(kw['tempDir'])
splittingSummary = SplittingSummary(kw['task']['tm_split_algo'])
for jobgroup in splitterResult:
jobs = jobgroup.getJobs()
splittingSummary.addJobs(jobs)
splittingSummary.dump('splitting-summary.json')
inputFiles.append('splitting-summary.json')
self.packSandbox(inputFiles)
self.logger.info('Uploading dry run tarball to the user file cache')
ufc = UserFileCache(mydict={'cert': kw['task']['user_proxy'], 'key': kw['task']['user_proxy'], 'endpoint': kw['task']['tm_cache_url']})
result = ufc.uploadLog('dry-run-sandbox.tar.gz')
os.remove('dry-run-sandbox.tar.gz')
if 'hashkey' not in result:
raise TaskWorkerException('Failed to upload dry-run-sandbox.tar.gz to the user file cache: ' + str(result))
else:
self.logger.info('Uploaded dry run tarball to the user file cache: ' + str(result))
update = {'workflow': kw['task']['tm_taskname'], 'subresource': 'state', 'status': 'UPLOADED'}
self.logger.debug('Updating task status: %s' % str(update))
self.server.post(self.resturi, data=urllib.urlencode(update))
finally:
os.chdir(cwd)
return Result(task=kw['task'], result=args[0])
示例2: UserFileCacheTest
class UserFileCacheTest(unittest.TestCase):
"""
Unit tests for UserFileCache Service
"""
def testChecksum(self):
"""
Tests checksum method
"""
self.ufc = UserFileCache()
checksum1 = self.ufc.checksum(fileName=path.join(getTestBase(), 'WMCore_t/Services_t/UserFileCache_t/ewv_crab_EwvAnalysis_31_111229_140959_publish.tgz'))
checksum2 = self.ufc.checksum(fileName=path.join(getTestBase(), 'WMCore_t/Services_t/UserFileCache_t/ewv_crab_EwvAnalysis_31_resubmit_111229_144319_publish.tgz'))
self.assertTrue(checksum1)
self.assertTrue(checksum2)
self.assertFalse(checksum1 == checksum2)
self.assertRaises(IOError, self.ufc.checksum, **{'fileName': 'does_not_exist'})
return
def testUploadDownload(self):
if 'UFCURL' in os.environ:
currdir = getTestBase()
upfile = path.join(currdir, 'WMCore_t/Services_t/UserFileCache_t/test_file.tgz') #file to upload
ufc = UserFileCache({'endpoint':os.environ['UFCURL']})
#named upload/download
res = ufc.upload(upfile, 'name_publish.tgz')
ufc.download(name=res['name'], output='name_publish.tgz')
#hashkey upload/download
res = ufc.upload(upfile)
ufc.download(res['hashkey'], output='pippo_publish_down.tgz')
示例3: executeAction
def executeAction(self, nextinput, work):
""" Execute an action and deal with the error handling and upload of the tasklogfile to the crabcache
"""
try:
output = work.execute(nextinput, task=self._task, tempDir=self.tempDir)
except TaskWorkerException as twe:
self.logger.debug(str(traceback.format_exc())) #print the stacktrace only in debug mode
raise WorkerHandlerException(str(twe), retry = twe.retry) #TaskWorker error, do not add traceback to the error propagated to the REST
except Exception as exc:
msg = "Problem handling %s because of %s failure, traceback follows\n" % (self._task['tm_taskname'], str(exc))
msg += str(traceback.format_exc())
self.logger.error(msg)
raise WorkerHandlerException(msg) #Errors not foreseen. Print everything!
finally:
#TODO: we need to do that also in Worker.py otherwise some messages might only be in the TW file but not in the crabcache.
logpath = 'logs/tasks/%s/%s.log' % (self._task['tm_username'], self._task['tm_taskname'])
if os.path.isfile(logpath) and 'user_proxy' in self._task: #the user proxy might not be there if myproxy retrieval failed
cacheurldict = {'endpoint':self._task['tm_cache_url'], 'cert':self._task['user_proxy'], 'key':self._task['user_proxy']}
try:
ufc = UserFileCache(cacheurldict)
logfilename = self._task['tm_taskname'] + '_TaskWorker.log'
ufc.uploadLog(logpath, logfilename)
except HTTPException as hte:
msg = "Failed to upload the logfile to %s for task %s. More details in the http headers and body:\n%s\n%s" % (self._task['tm_cache_url'], self._task['tm_taskname'], hte.headers, hte.result)
self.logger.error(msg)
except Exception: #pylint: disable=broad-except
msg = "Unknown error while uploading the logfile for task %s" % self._task['tm_taskname']
self.logger.exception(msg) #upload logfile of the task to the crabcache
return output
示例4: executeInternal
def executeInternal(self, *args, **kw):
# FIXME: In PanDA, we provided the executable as a URL.
# So, the filename becomes http:// -- and doesn't really work. Hardcoding the analysis wrapper.
#transform_location = getLocation(kw['task']['tm_transformation'], 'CAFUtilities/src/python/transformation/CMSRunAnalysis/')
transform_location = getLocation('CMSRunAnalysis.sh', 'CRABServer/scripts/')
cmscp_location = getLocation('cmscp.py', 'CRABServer/scripts/')
gwms_location = getLocation('gWMS-CMSRunAnalysis.sh', 'CRABServer/scripts/')
dag_bootstrap_location = getLocation('dag_bootstrap_startup.sh', 'CRABServer/scripts/')
bootstrap_location = getLocation("dag_bootstrap.sh", "CRABServer/scripts/")
adjust_location = getLocation("AdjustSites.py", "CRABServer/scripts/")
shutil.copy(transform_location, '.')
shutil.copy(cmscp_location, '.')
shutil.copy(gwms_location, '.')
shutil.copy(dag_bootstrap_location, '.')
shutil.copy(bootstrap_location, '.')
shutil.copy(adjust_location, '.')
# Bootstrap the ISB if we are using UFC
if UserFileCache and kw['task']['tm_cache_url'].find('/crabcache')!=-1:
ufc = UserFileCache(dict={'cert': kw['task']['user_proxy'], 'key': kw['task']['user_proxy'], 'endpoint' : kw['task']['tm_cache_url']})
try:
ufc.download(hashkey=kw['task']['tm_user_sandbox'].split(".")[0], output="sandbox.tar.gz")
except Exception, ex:
self.logger.exception(ex)
raise TaskWorkerException("The CRAB3 server backend could not download the input sandbox with your code "+\
"from the frontend (crabcache component).\nThis could be a temporary glitch; please try to submit a new task later "+\
"(resubmit will not work) and contact the experts if the error persists.\nError reason: %s" % str(ex)) #TODO url!?
kw['task']['tm_user_sandbox'] = 'sandbox.tar.gz'
示例5: executeInternal
def executeInternal(self, *args, **kw):
# FIXME: In PanDA, we provided the executable as a URL.
# So, the filename becomes http:// -- and doesn't really work. Hardcoding the analysis wrapper.
#transform_location = getLocation(kw['task']['tm_transformation'], 'CAFUtilities/src/python/transformation/CMSRunAnalysis/')
transform_location = getLocation('CMSRunAnalysis.sh', 'CRABServer/scripts/')
cmscp_location = getLocation('cmscp.py', 'CRABServer/scripts/')
gwms_location = getLocation('gWMS-CMSRunAnalysis.sh', 'CRABServer/scripts/')
dag_bootstrap_location = getLocation('dag_bootstrap_startup.sh', 'CRABServer/scripts/')
bootstrap_location = getLocation("dag_bootstrap.sh", "CRABServer/scripts/")
adjust_location = getLocation("AdjustSites.py", "CRABServer/scripts/")
shutil.copy(transform_location, '.')
shutil.copy(cmscp_location, '.')
shutil.copy(gwms_location, '.')
shutil.copy(dag_bootstrap_location, '.')
shutil.copy(bootstrap_location, '.')
shutil.copy(adjust_location, '.')
# Bootstrap the ISB if we are using UFC
if UserFileCache and kw['task']['tm_cache_url'].find('/crabcache')!=-1:
ufc = UserFileCache(mydict={'cert': kw['task']['user_proxy'], 'key': kw['task']['user_proxy'], 'endpoint' : kw['task']['tm_cache_url']})
try:
ufc.download(hashkey=kw['task']['tm_user_sandbox'].split(".")[0], output="sandbox.tar.gz")
except Exception as ex:
self.logger.exception(ex)
raise TaskWorkerException("The CRAB3 server backend could not download the input sandbox with your code "+\
"from the frontend (crabcache component).\nThis could be a temporary glitch; please try to submit a new task later "+\
"(resubmit will not work) and contact the experts if the error persists.\nError reason: %s" % str(ex)) #TODO url!?
kw['task']['tm_user_sandbox'] = 'sandbox.tar.gz'
# Bootstrap the runtime if it is available.
job_runtime = getLocation('CMSRunAnalysis.tar.gz', 'CRABServer/')
shutil.copy(job_runtime, '.')
task_runtime = getLocation('TaskManagerRun.tar.gz', 'CRABServer/')
shutil.copy(task_runtime, '.')
kw['task']['resthost'] = self.server['host']
kw['task']['resturinoapi'] = self.restURInoAPI
self.task = kw['task']
params = {}
if kw['task']['tm_dry_run'] == 'F':
params = self.sendDashboardTask()
inputFiles = ['gWMS-CMSRunAnalysis.sh', 'CMSRunAnalysis.sh', 'cmscp.py', 'RunJobs.dag', 'Job.submit', 'dag_bootstrap.sh', \
'AdjustSites.py', 'site.ad', 'site.ad.json', 'run_and_lumis.tar.gz', 'input_files.tar.gz']
self.extractMonitorFiles(inputFiles, **kw)
if kw['task'].get('tm_user_sandbox') == 'sandbox.tar.gz':
inputFiles.append('sandbox.tar.gz')
if os.path.exists("CMSRunAnalysis.tar.gz"):
inputFiles.append("CMSRunAnalysis.tar.gz")
if os.path.exists("TaskManagerRun.tar.gz"):
inputFiles.append("TaskManagerRun.tar.gz")
info, splitterResult = self.createSubdag(*args, **kw)
return info, params, inputFiles, splitterResult
示例6: actionWork
def actionWork(self, *args, **kwargs):
"""Performing the set of actions"""
nextinput = args
#set the logger to save the tasklog
formatter = logging.Formatter("%(asctime)s:%(levelname)s:%(module)s:%(message)s")
taskdirname = "logs/tasks/%s/" % self._task['tm_username']
if not os.path.isdir(taskdirname):
os.mkdir(taskdirname)
taskhandler = FileHandler(taskdirname + self._task['tm_taskname'] + '.log')
taskhandler.setLevel(logging.DEBUG)
self.logger.addHandler(taskhandler)
for work in self.getWorks():
self.logger.debug("Starting %s on %s" % (str(work), self._task['tm_taskname']))
t0 = time.time()
try:
output = work.execute(nextinput, task=self._task)
except StopHandler as sh:
msg = "Controlled stop of handler for %s on %s " % (self._task, str(sh))
self.logger.error(msg)
nextinput = Result(task=self._task, result='StopHandler exception received, controlled stop')
break #exit normally. Worker will not notice there was an error
except TaskWorkerException as twe:
self.logger.debug(str(traceback.format_exc())) #print the stacktrace only in debug mode
self.removeTaskLogHandler(taskhandler)
raise WorkerHandlerException(str(twe)) #TaskWorker error, do not add traceback to the error propagated to the REST
except Exception as exc:
msg = "Problem handling %s because of %s failure, traceback follows\n" % (self._task['tm_taskname'], str(exc))
msg += str(traceback.format_exc())
self.logger.error(msg)
self.removeTaskLogHandler(taskhandler)
raise WorkerHandlerException(msg) #Errors not foreseen. Print everything!
finally:
#upload logfile of the task to the crabcache
logpath = 'logs/tasks/%s/%s.log' % (self._task['tm_username'], self._task['tm_taskname'])
if os.path.isfile(logpath) and 'user_proxy' in self._task: #the user proxy might not be there if myproxy retrieval failed
cacheurldict = {'endpoint': self._task['tm_cache_url'], 'cert' : self._task['user_proxy'], 'key' : self._task['user_proxy']}
try:
ufc = UserFileCache(cacheurldict)
logfilename = self._task['tm_taskname'] + '_TaskWorker.log'
ufc.uploadLog(logpath, logfilename)
except HTTPException as hte:
msg = ("Failed to upload the logfile to %s for task %s. More details in the http headers and body:\n%s\n%s" %
(self._task['tm_cache_url'], self._task['tm_taskname'], hte.headers, hte.result))
self.logger.error(msg)
except Exception as e:
msg = "Unknown error while uploading the logfile for task %s" % self._task['tm_taskname']
self.logger.exception(msg)
t1 = time.time()
self.logger.info("Finished %s on %s in %d seconds" % (str(work), self._task['tm_taskname'], t1-t0))
try:
nextinput = output.result
except AttributeError:
nextinput = output
self.removeTaskLogHandler(taskhandler)
return nextinput
示例7: actionWork
def actionWork(self, *args, **kwargs):
"""Performing the set of actions"""
nextinput = args
taskhandler = self.addTaskLogHandler()
# I know it looks like a duplicated printout from the process logs (proc.N.log) perspective.
# Infact we have a smilar printout in the processWorker function of the Worker module, but
# it does not go to the task logfile and it is useful imho.
self.logger.debug("Process %s is starting %s on task %s" % (self.procnum, self.workFunction, self._task['tm_taskname']))
for work in self.getWorks():
#Loop that iterates over the actions to be performed
self.logger.debug("Starting %s on %s" % (str(work), self._task['tm_taskname']))
t0 = time.time()
try:
output = work.execute(nextinput, task=self._task)
except StopHandler as sh:
msg = "Controlled stop of handler for %s on %s " % (self._task, str(sh))
self.logger.error(msg)
nextinput = Result(task=self._task, result='StopHandler exception received, controlled stop')
break #exit normally. Worker will not notice there was an error
except TaskWorkerException as twe:
self.logger.debug(str(traceback.format_exc())) #print the stacktrace only in debug mode
self.removeTaskLogHandler(taskhandler)
raise WorkerHandlerException(str(twe)) #TaskWorker error, do not add traceback to the error propagated to the REST
except Exception as exc:
msg = "Problem handling %s because of %s failure, traceback follows\n" % (self._task['tm_taskname'], str(exc))
msg += str(traceback.format_exc())
self.logger.error(msg)
self.removeTaskLogHandler(taskhandler)
raise WorkerHandlerException(msg) #Errors not foreseen. Print everything!
finally:
#upload logfile of the task to the crabcache
logpath = 'logs/tasks/%s/%s.log' % (self._task['tm_username'], self._task['tm_taskname'])
if os.path.isfile(logpath) and 'user_proxy' in self._task: #the user proxy might not be there if myproxy retrieval failed
cacheurldict = {'endpoint': self._task['tm_cache_url'], 'cert' : self._task['user_proxy'], 'key' : self._task['user_proxy']}
try:
ufc = UserFileCache(cacheurldict)
logfilename = self._task['tm_taskname'] + '_TaskWorker.log'
ufc.uploadLog(logpath, logfilename)
except HTTPException as hte:
msg = ("Failed to upload the logfile to %s for task %s. More details in the http headers and body:\n%s\n%s" %
(self._task['tm_cache_url'], self._task['tm_taskname'], hte.headers, hte.result))
self.logger.error(msg)
except Exception:
msg = "Unknown error while uploading the logfile for task %s" % self._task['tm_taskname']
self.logger.exception(msg)
t1 = time.time()
self.logger.info("Finished %s on %s in %d seconds" % (str(work), self._task['tm_taskname'], t1 - t0))
try:
nextinput = output.result
except AttributeError:
nextinput = output
self.removeTaskLogHandler(taskhandler)
return nextinput
示例8: __call__
def __call__(self):
self.logger.info('Getting the tarball hash key')
tarballdir = glob.glob(self.requestarea+'/inputs/*.tgz')
if len(tarballdir) != 1:
self.logger.info('%sError%s: Could not find tarball or there is more than one tarball'% (colors.RED, colors.NORMAL))
raise ConfigurationException
tarballdir = tarballdir[0]
#checking task status
self.logger.info('Checking task status')
serverFactory = CRABClient.Emulator.getEmulator('rest')
server = serverFactory(self.serverurl, self.proxyfilename, self.proxyfilename, version=__version__)
dictresult, status, _ = server.get(self.uri, data = {'workflow': self.cachedinfo['RequestName'], 'verbose': 0})
dictresult = dictresult['result'][0] #take just the significant part
if status != 200:
msg = "Problem retrieving task status:\ninput: %s\noutput: %s\nreason: %s" % (str(self.cachedinfo['RequestName']), str(dictresult), str(reason))
raise RESTCommunicationException(msg)
self.logger.info('Task status: %s' % dictresult['status'])
accepstate = ['KILLED','FINISHED','FAILED','KILLFAILED', 'COMPLETED']
if dictresult['status'] not in accepstate:
msg = ('%sError%s: Only tasks with these status can be purged: {0}'.format(accepstate) % (colors.RED, colors.NORMAL))
raise ConfigurationException(msg)
#getting the cache url
cacheresult = {}
scheddresult = {}
gsisshdict = {}
if not self.options.scheddonly:
baseurl = getUrl(self.instance, resource='info')
cacheurl = server_info('backendurls', self.serverurl, self.proxyfilename, baseurl)
cacheurl = cacheurl['cacheSSL']
cacheurldict = {'endpoint': cacheurl, 'pycurl': True}
ufc = UserFileCache(cacheurldict)
hashkey = ufc.checksum(tarballdir)
self.logger.info('Tarball hashkey: %s' %hashkey)
self.logger.info('Attempting to remove task file from crab server cache')
try:
ufcresult = ufc.removeFile(hashkey)
except HTTPException, re:
if re.headers.has_key('X-Error-Info') and 'Not such file' in re.headers['X-Error-Info']:
self.logger.info('%sError%s: Failed to find task file in crab server cache; the file might have been already purged' % (colors.RED,colors.NORMAL))
raise HTTPException , re
if ufcresult == '':
self.logger.info('%sSuccess%s: Successfully removed task files from crab server cache' % (colors.GREEN, colors.NORMAL))
cacheresult = 'SUCCESS'
else:
self.logger.info('%sError%s: Failed to remove task files from crab server cache' % (colors.RED, colors.NORMAL))
cacheresult = 'FAILED'
示例9: uploadPublishWorkflow
def uploadPublishWorkflow(config, workflow, ufcEndpoint, workDir):
"""
Write out and upload to the UFC a JSON file
with all the info needed to publish this dataset later
"""
retok, proxyfile = getProxy(config, workflow.dn, workflow.vogroup, workflow.vorole)
if not retok:
logging.info("Cannot get the user's proxy")
return False
ufc = UserFileCache({'endpoint': ufcEndpoint, 'cert': proxyfile, 'key': proxyfile})
# Skip tasks ending in LogCollect, they have nothing interesting.
taskNameParts = workflow.task.split('/')
if taskNameParts.pop() in ['LogCollect']:
logging.info('Skipping LogCollect task')
return False
logging.info('Generating JSON for publication of %s of type %s' % (workflow.name, workflow.wfType))
myThread = threading.currentThread()
dbsDaoFactory = DAOFactory(package = "WMComponent.DBS3Buffer",
logger = myThread.logger, dbinterface = myThread.dbi)
findFiles = dbsDaoFactory(classname = "LoadFilesByWorkflow")
# Fetch and filter the files to the ones we actually need
uploadDatasets = {}
uploadFiles = findFiles.execute(workflowName = workflow.name)
for file in uploadFiles:
datasetName = file['datasetPath']
if not uploadDatasets.has_key(datasetName):
uploadDatasets[datasetName] = []
uploadDatasets[datasetName].append(file)
if not uploadDatasets:
logging.info('No datasets found to upload.')
return False
# Write JSON file and then create tarball with it
baseName = '%s_publish.tgz' % workflow.name
jsonName = os.path.join(workDir, '%s_publish.json' % workflow.name)
tgzName = os.path.join(workDir, baseName)
with open(jsonName, 'w') as jsonFile:
json.dump(uploadDatasets, fp=jsonFile, cls=FileEncoder, indent=2)
# Only in 2.7 does tarfile become usable as context manager
tgzFile = tarfile.open(name=tgzName, mode='w:gz')
tgzFile.add(jsonName)
tgzFile.close()
result = ufc.upload(fileName=tgzName, name=baseName)
logging.debug('Upload result %s' % result)
# If this doesn't work, exception will propogate up and block archiving the task
logging.info('Uploaded with name %s and hashkey %s' % (result['name'], result['hashkey']))
return
示例10: upload
def upload(self):
"""
Upload the tarball to the File Cache
"""
self.close()
archiveName = self.tarfile.name
serverUrl = ""
self.logger.debug(" uploading archive to cache %s " % archiveName)
ufc = UserFileCache({'endpoint' : self.config.JobType.filecacheurl})
result = ufc.upload(archiveName)
if 'hashkey' not in result:
self.logger.error("Failed to upload source files: %s" % str(result))
raise CachefileNotFoundException
return self.config.JobType.filecacheurl, str(result['hashkey']) + '.tar.gz', self.checksum
示例11: testUploadDownload
def testUploadDownload(self):
if "UFCURL" in os.environ:
currdir = getTestBase()
upfile = path.join(currdir, "WMCore_t/Services_t/UserFileCache_t/test_file.tgz") # file to upload
upfileLog = path.join(currdir, "WMCore_t/Services_t/UserFileCache_t/uplog.txt") # file to upload
ufc = UserFileCache({"endpoint": os.environ["UFCURL"], "pycurl": True})
# hashkey upload/download
res = ufc.upload(upfile)
ufc.download(res["hashkey"], output="pippo_publish_down.tgz")
# hashkey deletion
ufc.removeFile(res["hashkey"])
# log upload/download
res = ufc.uploadLog(upfileLog)
ufc.downloadLog(upfileLog, upfileLog + ".downloaded")
self.assertTrue(filecmp.cmp(upfileLog, upfileLog + ".downloaded"))
示例12: testUploadDownload
def testUploadDownload(self):
if 'UFCURL' in os.environ:
currdir = getTestBase()
upfile = path.join(currdir, 'WMCore_t/Services_t/UserFileCache_t/test_file.tgz') #file to upload
upfileLog = path.join(currdir, 'WMCore_t/Services_t/UserFileCache_t/uplog.txt') #file to upload
ufc = UserFileCache({'endpoint':os.environ['UFCURL'], 'pycurl': True})
#hashkey upload/download
res = ufc.upload(upfile)
ufc.download(res['hashkey'], output='pippo_publish_down.tgz')
#hashkey deletion
ufc.removeFile(res['hashkey'])
#log upload/download
res = ufc.uploadLog(upfileLog)
ufc.downloadLog(upfileLog, upfileLog+'.downloaded')
self.assertTrue(filecmp.cmp(upfileLog, upfileLog+'.downloaded'))
示例13: testChecksum
def testChecksum(self):
"""
Tests checksum method
"""
self.ufc = UserFileCache()
checksum1 = self.ufc.checksum(fileName=path.join(getTestBase(), 'WMCore_t/Services_t/UserFileCache_t/ewv_crab_EwvAnalysis_31_111229_140959_publish.tgz'))
checksum2 = self.ufc.checksum(fileName=path.join(getTestBase(), 'WMCore_t/Services_t/UserFileCache_t/ewv_crab_EwvAnalysis_31_resubmit_111229_144319_publish.tgz'))
self.assertTrue(checksum1)
self.assertTrue(checksum2)
self.assertFalse(checksum1 == checksum2)
self.assertRaises(IOError, self.ufc.checksum, **{'fileName': 'does_not_exist'})
return
示例14: testUploadDownload
def testUploadDownload(self):
if "UFCURL" in os.environ:
currdir = getTestBase()
upfile = path.join(currdir, "WMCore_t/Services_t/UserFileCache_t/test_file.tgz") # file to upload
ufc = UserFileCache({"endpoint": os.environ["UFCURL"]})
# named upload/download
res = ufc.upload(upfile, "name_publish.tgz")
ufc.download(name=res["name"], output="name_publish.tgz")
# hashkey upload/download
res = ufc.upload(upfile)
ufc.download(res["hashkey"], output="pippo_publish_down.tgz")
示例15: testUploadDownload
def testUploadDownload(self):
if 'UFCURL' in os.environ:
currdir = getTestBase()
upfile = path.join(currdir, 'WMCore_t/Services_t/UserFileCache_t/test_file.tgz') #file to upload
ufc = UserFileCache({'endpoint':os.environ['UFCURL']})
#named upload/download
res = ufc.upload(upfile, 'name_publish.tgz')
ufc.download(name=res['name'], output='name_publish.tgz')
#hashkey upload/download
res = ufc.upload(upfile)
ufc.download(res['hashkey'], output='pippo_publish_down.tgz')