本文整理汇总了Python中Ganga.GPIDev.Lib.Job.Job类的典型用法代码示例。如果您正苦于以下问题:Python Job类的具体用法?Python Job怎么用?Python Job使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Job类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test__common_submit
def test__common_submit(tmpdir, db):
from Ganga.Core import BackendError
j = Job()
j.id = 0
j.backend = db
db._parent = j
name = str(tmpdir.join('submit_script'))
with open(name, 'w') as fd:
fd.write(script_template.replace('###PARAMETRIC_INPUTDATA###', str([['a'], ['b']])))
with patch('GangaDirac.Lib.Backends.DiracBase.execute', return_value={}):
db.id = 1234
db.actualCE = 'test'
db.status = 'test'
with pytest.raises(BackendError):
db._common_submit(name)
assert db.id is None, 'id not None'
assert db.actualCE is None, 'actualCE not None'
assert db.status is None, 'status not None'
with patch('GangaDirac.Lib.Backends.DiracBase.execute', return_value={'OK': True, 'Value': 12345}) as execute:
assert db._common_submit(name)
execute.assert_called_once_with("execfile('%s')" % name)
assert db.id == 12345, 'id not set'
with patch('GangaDirac.Lib.Backends.DiracBase.execute', return_value={'OK': True, 'Value': [123, 456]}):
with patch.object(db, '_setup_bulk_subjobs') as _setup_bulk_subjobs:
db._common_submit(name)
_setup_bulk_subjobs.assert_called_once_with([123, 456], name)
示例2: test__common_submit
def test__common_submit(tmpdir, db, mocker):
mocker.patch('Ganga.GPIDev.Credentials.credential_store')
from Ganga.Core import BackendError
j = Job()
j.id = 0
j.backend = db
db._parent = j
name = str(tmpdir.join('submit_script'))
with open(name, 'w') as fd:
fd.write(script_template.replace('###PARAMETRIC_INPUTDATA###', str([['a'], ['b']])))
with patch('GangaDirac.Lib.Backends.DiracBase.execute', side_effect=GangaDiracError('test Exception')):
db.id = 1234
db.actualCE = 'test'
db.status = 'test'
with pytest.raises(BackendError):
db._common_submit(name)
assert db.id is None, 'id not None'
assert db.actualCE is None, 'actualCE not None'
assert db.status is None, 'status not None'
with patch('GangaDirac.Lib.Backends.DiracBase.execute', return_value=12345) as execute:
assert db._common_submit(name)
execute.assert_called_once_with("execfile('%s')" % name, cred_req=mocker.ANY)
assert db.id == 12345, 'id not set'
with patch('GangaDirac.Lib.Backends.DiracBase.execute', return_value=[123, 456]):
with patch.object(db, '_setup_bulk_subjobs') as _setup_bulk_subjobs:
db._common_submit(name)
_setup_bulk_subjobs.assert_called_once_with([123, 456], name)
示例3: test_reset
def test_reset(db):
j = Job()
j.id = 0
j.backend = db
db._parent = j
db.getJobObject().subjobs = [Job(), Job()]
for subjob in db.getJobObject().subjobs:
subjob.backend = db
for j in db.getJobObject().subjobs:
j.status = 'completing'
disallowed_status = ['submitting', 'killed']
for status in disallowed_status:
db.getJobObject().status = status
db.reset()
assert db.getJobObject().status == status, 'status shouldn\'t have changed'
db.getJobObject().status = 'completing'
db.reset()
assert db.getJobObject().status == 'submitted', 'didn\t reset job'
assert [j.status for j in db.getJobObject().subjobs] != ['submitted', 'submitted'], 'subjobs not reset properly'
db.reset(doSubjobs=True)
assert [j.status for j in db.getJobObject().subjobs] == ['submitted', 'submitted'], 'subjobs not reset properly'
for j in db.getJobObject().subjobs:
j.status = 'completed'
db.reset(doSubjobs=True)
assert [j.status for j in db.getJobObject().subjobs] != ['submitted', 'submitted'], 'subjobs not supposed to reset'
示例4: test_getOutputDataLFNs
def test_getOutputDataLFNs(db):
j = Job()
j.id = 0
j.backend = db
db._parent = j
#######################
class TestFile(object):
def __init__(self, lfn):
self.lfn = lfn
#######################
def fake_outputfiles_iterator(job, file_type):
assert isinstance(job, Job)
if subjob:
assert job.master is not None
else:
assert job.master is None
assert file_type == DiracFile
return [TestFile('a'), TestFile(''),
TestFile('b'), TestFile(''),
TestFile('c'), TestFile('')]
with patch('GangaDirac.Lib.Backends.DiracBase.outputfiles_iterator', fake_outputfiles_iterator):
subjob = False
assert db.getOutputDataLFNs() == ['a', 'b', 'c']
j.subjobs = [Job(), Job(), Job()]
for sj in j.subjobs:
sj._setParent(j)
subjob = True
assert db.getOutputDataLFNs() == ['a', 'b', 'c'] * 3
示例5: createSubjob
def createSubjob(self,job):
""" Create a new subjob by copying the master job and setting all fields correctly.
"""
from Ganga.GPIDev.Lib.Job import Job
j = Job()
j.copyFrom(job)
j.splitter=None
#FIXME:
j.inputsandbox = []
return j
示例6: _create_subjob
def _create_subjob(self, job, dataset):
logger.debug("_create_subjob")
datatmp = []
logger.debug("dataset size: %s" % str(len(dataset)))
#logger.debug( "dataset: %s" % str(dataset) )
from GangaLHCb.Lib.LHCbDataset.LHCbDataset import LHCbDataset
if isinstance(dataset, LHCbDataset):
for i in dataset:
if isType(i, DiracFile):
datatmp.append(i)
else:
logger.error("Unkown file-type %s, cannot perform split with file %s" % (type(i), str(i)))
from Ganga.Core.exceptions import GangaException
raise GangaException("Unkown file-type %s, cannot perform split with file %s" % (type(i), str(i)))
elif type(dataset) == type([]) or isType(dataset, GangaList()):
for this_file in dataset:
if type(this_file) is str:
datatmp.append(allComponentFilters['gangafiles'](this_file, None))
elif isType(this_file, IGangaFile):
datatmp.append(this_file)
else:
logger.error("Unexpected type: %s" % str(type(this_file)))
logger.error("Wanted object to inherit from type: %s: %s" % (str(type(IGangaFile()))))
from Ganga.Core.exceptions import GangaException
x = GangaException("Unknown(unexpected) file object: %s" % this_file)
raise x
elif type(dataset) is str:
datatmp.append(DiracFile(lfn=dataset))
else:
logger.error("Unkown dataset type, cannot perform split here")
from Ganga.Core.exceptions import GangaException
logger.error("Dataset found: " + str(dataset))
raise GangaException("Unkown dataset type, cannot perform split here")
logger.debug("Creating new Job in Splitter")
j = Job()
logger.debug("Copying From Job")
j.copyFrom(stripProxy(job), ['splitter', 'subjobs', 'inputdata', 'inputsandbox', 'inputfiles'])
logger.debug("Unsetting Splitter")
j.splitter = None
#logger.debug("Unsetting Merger")
#j.merger = None
#j.inputsandbox = [] ## master added automatically
#j.inputfiles = []
logger.debug("Setting InputData")
j.inputdata = LHCbDataset(files=datatmp[:],
persistency=self.persistency,
depth=self.depth)
#j.inputdata.XMLCatalogueSlice = self.XMLCatalogueSlice
logger.debug("Returning new subjob")
return j
示例7: split
def split(self,job):
from Ganga.GPIDev.Lib.Job import Job
subjobs = []
if self.fail == 'exception':
x = 'triggered failure during splitting'
raise Exception(x)
for b in self.backs:
j = Job()
j.copyFrom(job)
j.backend = b
subjobs.append(j)
return subjobs
示例8: __make_subjob__
def __make_subjob__(self, mj, guids, names, sjob_evnts=-1, sites=None):
"""
private method to create subjob object
"""
logger.debug('generating subjob to run %d events in-total on files: %s' % (sjob_evnts, repr(guids)))
j = Job()
j.name = mj.name
j.inputdata = mj.inputdata
if j.inputdata.type in ['','DQ2']:
j.inputdata.guids = guids
j.inputdata.names = names
j.outputdata = mj.outputdata
j.application = mj.application
if sjob_evnts != -1:
j.application.max_events = sjob_evnts
j.backend = mj.backend
if j.backend._name in ['LCG'] and j.backend.requirements._name == 'AtlasLCGRequirements':
if sites:
j.backend.requirements.sites = sites
j.inputsandbox = mj.inputsandbox
j.outputsandbox = mj.outputsandbox
return j
示例9: _create_subjob
def _create_subjob(self, job, inputdata):
j = Job()
j.copyFrom(job)
j.splitter = None
j.merger = None
j.inputsandbox = [] # master added automatically
j.inputfiles = []
j.inputdata = inputdata
return j
示例10: split
def split(self,job):
from Ganga.GPIDev.Lib.Job import Job
logger.debug("AnaTaskSplitterJob split called")
sjl = []
transform = stripProxy(job.application.getTransform())
transform.setAppStatus(job.application, "removed")
# Do the splitting
for sj in self.subjobs:
j = Job()
j.inputdata = transform.partitions_data[sj-1]
j.outputdata = job.outputdata
j.application = job.application
j.application.atlas_environment.append("OUTPUT_FILE_NUMBER=%i" % sj)
j.backend = job.backend
if transform.partitions_sites:
if hasattr(j.backend.requirements, 'sites'):
j.backend.requirements.sites = transform.partitions_sites[sj-1]
else:
j.backend.site = transform.partitions_sites[sj-1]
j.inputsandbox = job.inputsandbox
j.outputsandbox = job.outputsandbox
sjl.append(j)
# Task handling
j.application.tasks_id = job.application.tasks_id
j.application.id = transform.getNewAppID(sj)
#transform.setAppStatus(j.application, "submitting")
if not job.application.tasks_id.startswith("00"):
job.application.tasks_id = "00:%s" % job.application.tasks_id
return sjl
示例11: test_getOutputSandbox
def test_getOutputSandbox(db):
j = Job()
j.id = 0
j.backend = db
db._parent = j
db.id = 1234
temp_dir = j.getOutputWorkspace().getPath()
with patch('GangaDirac.Lib.Backends.DiracBase.execute', return_value={'OK': True}) as execute:
assert db.getOutputSandbox(), 'didn\'t run'
execute.assert_called_once_with("getOutputSandbox(1234,'%s')" % temp_dir)
test_dir = 'test_dir'
with patch('GangaDirac.Lib.Backends.DiracBase.execute', return_value={'OK': True}) as execute:
assert db.getOutputSandbox(test_dir), 'didn\'t run with modified dir'
execute.assert_called_once_with("getOutputSandbox(1234,'%s')" % test_dir)
with patch('GangaDirac.Lib.Backends.DiracBase.execute') as execute:
assert not db.getOutputSandbox(test_dir), 'didn\'t fail gracefully'
execute.assert_called_once()
示例12: test_removeOutputData
def test_removeOutputData(db):
from GangaDirac.Lib.Files.DiracFile import DiracFile
j = Job()
j.id = 0
j.backend = db
db._parent = j
#######################
class TestFile(object):
def __init__(self):
pass
def remove(self):
return 27
#######################
def fake_outputfiles_foreach(job, file_type, func):
import types
assert isinstance(job, Job)
if subjob:
assert job.master is not None
else:
assert job.master is None
assert file_type == DiracFile
assert isinstance(func, types.FunctionType)
assert func(TestFile()) == 27, 'Didn\'t call remove function'
with patch('GangaDirac.Lib.Backends.DiracBase.outputfiles_foreach', fake_outputfiles_foreach):
subjob = False
assert db.removeOutputData() is None
j.subjobs = [Job(), Job(), Job()]
for sj in j.subjobs:
sj._setParent(j)
subjob = True
assert db.removeOutputData() is None
示例13: test_getOutputSandbox
def test_getOutputSandbox(db, mocker):
mocker.patch('Ganga.GPIDev.Credentials.credential_store')
j = Job()
j.id = 0
j.backend = db
db._parent = j
db.id = 1234
temp_dir = j.getOutputWorkspace().getPath()
with patch('GangaDirac.Lib.Backends.DiracBase.execute', return_value=True) as execute:
assert db.getOutputSandbox(), 'didn\'t run'
execute.assert_called_once_with("getOutputSandbox(1234,'%s')" % temp_dir, cred_req=mocker.ANY)
test_dir = 'test_dir'
with patch('GangaDirac.Lib.Backends.DiracBase.execute', return_value=True) as execute:
assert db.getOutputSandbox(test_dir), 'didn\'t run with modified dir'
execute.assert_called_once_with("getOutputSandbox(1234,'%s')" % test_dir, cred_req=mocker.ANY)
with patch('GangaDirac.Lib.Backends.DiracBase.execute', side_effect=GangaDiracError('test Exception')) as execute:
assert not db.getOutputSandbox(test_dir), 'didn\'t fail gracefully'
execute.assert_called_once()
示例14: _create_subjob
def _create_subjob(self, job, dataset):
logger.debug("_create_subjob")
j = Job()
j.copyFrom(job)
j.splitter = None
j.merger = None
j.inputsandbox = [] # master added automatically
j.inputdata = GaudiDataset(files=dataset)
## if not j.inputdata: j.inputdata = GaudiDataset(files=dataset)
# else: j.inputdata.files = dataset
return j
示例15: create_gaudi_subjob
def create_gaudi_subjob(job, inputdata):
j = Job()
j.name = job.name
j.application = copy_app(job.application)
j.backend = job.backend # no need to deepcopy
if inputdata:
j.inputdata = inputdata
if hasattr(j.application,'extra'):
j.application.extra.inputdata = j.inputdata
else:
j.inputdata = None
if hasattr(j.application,'extra'):
j.application.extra.inputdata = BesDataset()
j.outputsandbox = job.outputsandbox[:]
j.outputdata = job.outputdata
return j