本文整理汇总了Python中Ganga.Lib.LCG.Grid.__get_lfc_host__方法的典型用法代码示例。如果您正苦于以下问题:Python Grid.__get_lfc_host__方法的具体用法?Python Grid.__get_lfc_host__怎么用?Python Grid.__get_lfc_host__使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Ganga.Lib.LCG.Grid
的用法示例。
在下文中一共展示了Grid.__get_lfc_host__方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __setup_sandboxcache__
# 需要导入模块: from Ganga.Lib.LCG import Grid [as 别名]
# 或者: from Ganga.Lib.LCG.Grid import __get_lfc_host__ [as 别名]
def __setup_sandboxcache__(self, job):
'''Sets up the sandbox cache object to adopt the runtime configuration of the LCG backend'''
re_token = re.compile('^token:(.*):(.*)$')
self.sandboxcache.timeout = config['SandboxTransferTimeout']
if self.sandboxcache._name == 'LCGSandboxCache':
if not self.sandboxcache.lfc_host:
self.sandboxcache.lfc_host = Grid.__get_lfc_host__()
if not self.sandboxcache.se:
token = ''
se_host = config['DefaultSE']
m = re_token.match(se_host)
if m:
token = m.group(1)
se_host = m.group(2)
self.sandboxcache.se = se_host
if token:
self.sandboxcache.srm_token = token
if (self.sandboxcache.se_type in ['srmv2']) and (not self.sandboxcache.srm_token):
self.sandboxcache.srm_token = config['DefaultSRMToken']
return True
示例2: __setup_sandboxcache__
# 需要导入模块: from Ganga.Lib.LCG import Grid [as 别名]
# 或者: from Ganga.Lib.LCG.Grid import __get_lfc_host__ [as 别名]
def __setup_sandboxcache__(self, job):
'''Sets up the sandbox cache object to adopt the runtime configuration of the LCG backend'''
re_token = re.compile('^token:(.*):(.*)$')
self.sandboxcache.vo = config['VirtualOrganisation']
self.sandboxcache.timeout = config['SandboxTransferTimeout']
if self.sandboxcache._name == 'LCGSandboxCache':
if not self.sandboxcache.lfc_host:
self.sandboxcache.lfc_host = Grid.__get_lfc_host__()
if not self.sandboxcache.se:
token = ''
se_host = config['DefaultSE']
m = re_token.match(se_host)
if m:
token = m.group(1)
se_host = m.group(2)
self.sandboxcache.se = se_host
if token:
self.sandboxcache.srm_token = token
if (self.sandboxcache.se_type in ['srmv2']) and (not self.sandboxcache.srm_token):
self.sandboxcache.srm_token = config['DefaultSRMToken']
elif self.sandboxcache._name == 'DQ2SandboxCache':
# generate a new dataset name if not given
if not self.sandboxcache.dataset_name:
from GangaAtlas.Lib.ATLASDataset.DQ2Dataset import dq2outputdatasetname
self.sandboxcache.dataset_name, unused = dq2outputdatasetname(
"%s.input" % get_uuid(), 0, False, '')
# subjobs inherits the dataset name from the master job
for sj in job.subjobs:
sj.backend.sandboxcache.dataset_name = self.sandboxcache.dataset_name
elif self.sandboxcache._name == 'GridftpSandboxCache':
if config['CreamInputSandboxBaseURI']:
self.sandboxcache.baseURI = config['CreamInputSandboxBaseURI']
elif self.CE:
ce_host = re.sub(r'\:[0-9]+', '', self.CE.split('/cream')[0])
self.sandboxcache.baseURI = 'gsiftp://%s/opt/glite/var/cream_sandbox/%s' % (
ce_host, self.sandboxcache.vo)
else:
logger.error('baseURI not available for GridftpSandboxCache')
return False
return True
示例3: __check_and_prestage_inputfile__
# 需要导入模块: from Ganga.Lib.LCG import Grid [as 别名]
# 或者: from Ganga.Lib.LCG.Grid import __get_lfc_host__ [as 别名]
def __check_and_prestage_inputfile__(self, file):
'''Checks the given input file size and if it's size is
over "BoundSandboxLimit", prestage it to a grid SE.
The argument is a path of the local file.
It returns a dictionary containing information to refer to the file:
idx = {'lfc_host': lfc_host,
'local': [the local file pathes],
'remote': {'fname1': 'remote index1', 'fname2': 'remote index2', ... }
}
If prestaging failed, None object is returned.
If the file has been previously uploaded (according to md5sum),
the prestaging is ignored and index to the previously uploaded file
is returned.
'''
idx = {'lfc_host': '', 'local': [], 'remote': {}}
job = self.getJobObject()
# read-in the previously uploaded files
uploadedFiles = []
# getting the uploaded file list from the master job
if job.master:
uploadedFiles += job.master.backend.sandboxcache.get_cached_files()
# set and get the $LFC_HOST for uploading oversized sandbox
self.__setup_sandboxcache__(job)
uploadedFiles += self.sandboxcache.get_cached_files()
lfc_host = None
# for LCGSandboxCache, take the one specified in the sansboxcache object.
# the value is exactly the same as the one from the local grid shell env. if
# it is not specified exclusively.
if self.sandboxcache._name == 'LCGSandboxCache':
lfc_host = self.sandboxcache.lfc_host
# or in general, query it from the Grid object
if not lfc_host:
lfc_host = Grid.__get_lfc_host__()
idx['lfc_host'] = lfc_host
abspath = os.path.abspath(file)
fsize = os.path.getsize(abspath)
if fsize > config['BoundSandboxLimit']:
md5sum = get_md5sum(abspath, ignoreGzipTimestamp=True)
doUpload = True
for uf in uploadedFiles:
if uf.md5sum == md5sum:
# the same file has been uploaded to the iocache
idx['remote'][os.path.basename(file)] = uf.id
doUpload = False
break
if doUpload:
logger.warning(
'The size of %s is larger than the sandbox limit (%d byte). Please wait while pre-staging ...' % (file, config['BoundSandboxLimit']))
if self.sandboxcache.upload([abspath]):
remote_sandbox = self.sandboxcache.get_cached_files()[-1]
idx['remote'][remote_sandbox.name] = remote_sandbox.id
else:
logger.error(
'Oversized sandbox not successfully pre-staged')
return None
else:
idx['local'].append(abspath)
return idx