本文整理汇总了Python中pandatools.Client.putFile方法的典型用法代码示例。如果您正苦于以下问题:Python Client.putFile方法的具体用法?Python Client.putFile怎么用?Python Client.putFile使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pandatools.Client
的用法示例。
在下文中一共展示了Client.putFile方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: master_prepare
# 需要导入模块: from pandatools import Client [as 别名]
# 或者: from pandatools.Client import putFile [as 别名]
def master_prepare(self,app,appmasterconfig):
# PandaTools
from pandatools import Client
from pandatools import AthenaUtils
from taskbuffer.JobSpec import JobSpec
from taskbuffer.FileSpec import FileSpec
job = app._getParent()
logger.debug('AthenaMCPandaRTHandler master_prepare called for %s', job.getFQID('.'))
usertag = configDQ2['usertag']
#usertag='user09'
nickname = getNickname(allowMissingNickname=True)
self.libDataset = '%s.%s.ganga.%s_%d.lib._%06d' % (usertag,nickname,commands.getoutput('hostname').split('.')[0],int(time.time()),job.id)
# self.userprefix='%s.%s.ganga' % (usertag,gridProxy.identity())
sources = 'sources.%s.tar.gz' % commands.getoutput('uuidgen 2> /dev/null')
self.library = '%s.lib.tgz' % self.libDataset
# check DBRelease
# if job.backend.dbRelease != '' and job.backend.dbRelease.find(':') == -1:
# raise ApplicationConfigurationError(None,"ERROR : invalid argument for backend.dbRelease. Must be 'DatasetName:FileName'")
# unpack library
logger.debug('Creating source tarball ...')
tmpdir = '/tmp/%s' % commands.getoutput('uuidgen 2> /dev/null')
os.mkdir(tmpdir)
inputbox=[]
if os.path.exists(app.transform_archive):
# must add a condition on size.
inputbox += [ File(app.transform_archive) ]
if app.evgen_job_option:
self.evgen_job_option=app.evgen_job_option
if os.path.exists(app.evgen_job_option):
# locally modified job option file to add to the input sand box
inputbox += [ File(app.evgen_job_option) ]
self.evgen_job_option=app.evgen_job_option.split("/")[-1]
# add input sandbox files
if (job.inputsandbox):
for file in job.inputsandbox:
inputbox += [ file ]
# add option files
for extFile in job.backend.extOutFile:
try:
shutil.copy(extFile,tmpdir)
except IOError:
os.makedirs(tmpdir)
shutil.copy(extFile,tmpdir)
# fill the archive
for opt_file in inputbox:
try:
shutil.copy(opt_file.name,tmpdir)
except IOError:
os.makedirs(tmpdir)
shutil.copy(opt_file.name,tmpdir)
# now tar it up again
inpw = job.getInputWorkspace()
rc, output = commands.getstatusoutput('tar czf %s -C %s .' % (inpw.getPath(sources),tmpdir))
if rc:
logger.error('Packing sources failed with status %d',rc)
logger.error(output)
raise ApplicationConfigurationError(None,'Packing sources failed.')
shutil.rmtree(tmpdir)
# upload sources
logger.debug('Uploading source tarball ...')
try:
cwd = os.getcwd()
os.chdir(inpw.getPath())
rc, output = Client.putFile(sources)
if output != 'True':
logger.error('Uploading sources %s failed. Status = %d', sources, rc)
logger.error(output)
raise ApplicationConfigurationError(None,'Uploading archive failed')
finally:
os.chdir(cwd)
# Use Panda's brokerage
## if job.inputdata and len(app.sites)>0:
## # update cloud, use inputdata's
## from dq2.info.TiersOfATLAS import whichCloud,ToACache
## inclouds=[]
## for site in app.sites:
## cloudSite=whichCloud(app.sites[0])
## if cloudSite not in inclouds:
## inclouds.append(cloudSite)
## # now converting inclouds content into proper brokering stuff.
## outclouds=[]
## for cloudSite in inclouds:
## for cloudID, eachCloud in ToACache.dbcloud.iteritems():
## if cloudSite==eachCloud:
## cloud=cloudID
## outclouds.append(cloud)
## break
#.........这里部分代码省略.........