本文整理汇总了Python中DIRAC.Interfaces.API.Job.Job.setOutputData方法的典型用法代码示例。如果您正苦于以下问题:Python Job.setOutputData方法的具体用法?Python Job.setOutputData怎么用?Python Job.setOutputData使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类DIRAC.Interfaces.API.Job.Job
的用法示例。
在下文中一共展示了Job.setOutputData方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: submitJob
# 需要导入模块: from DIRAC.Interfaces.API.Job import Job [as 别名]
# 或者: from DIRAC.Interfaces.API.Job.Job import setOutputData [as 别名]
def submitJob(jobPara):
dirac = Dirac()
j = Job()
j.setName(jobPara['jobName'])
j.setJobGroup(jobPara['jobGroup'])
j.setExecutable(jobPara['jobScript'], logFile = jobPara['jobScriptLog'])
j.setInputSandbox(jobPara['inputSandbox'])
j.setOutputSandbox(jobPara['outputSandbox'])
j.setOutputData(jobPara['outputData'], jobPara['SE'])
j.setDestination(jobPara['sites'])
j.setCPUTime(jobPara['CPUTime'])
result = dirac.submit(j)
if result['OK']:
print 'Job %s submitted successfully. ID = %d' %(jobPara['jobName'],result['Value'])
else:
print 'Job %s submitted failed' %jobPara['jobName']
return result
示例2: submit
# 需要导入模块: from DIRAC.Interfaces.API.Job import Job [as 别名]
# 或者: from DIRAC.Interfaces.API.Job.Job import setOutputData [as 别名]
def submit(self, param):
j = Job()
j.setName(param['jobName'])
j.setExecutable(param['jobScript'],logFile = param['jobScriptLog'])
if self.site:
j.setDestination(self.site)
if self.jobGroup:
j.setJobGroup(self.jobGroup)
j.setInputSandbox(param['inputSandbox'])
j.setOutputSandbox(param['outputSandbox'])
j.setOutputData(param['outputData'], outputSE = self.outputSE, outputPath = self.outputPath)
dirac = GridDirac()
result = dirac.submit(j)
status = {}
status['submit'] = result['OK']
if status['submit']:
status['job_id'] = result['Value']
return status
示例3: basicTest
# 需要导入模块: from DIRAC.Interfaces.API.Job import Job [as 别名]
# 或者: from DIRAC.Interfaces.API.Job.Job import setOutputData [as 别名]
def basicTest(self):
j = Job()
j.setCPUTime(50000)
j.setExecutable('/Users/stuart/dirac/workspace/DIRAC3/DIRAC/Interfaces/API/test/myPythonScript.py')
# j.setExecutable('/bin/echo hello')
j.setOwner('paterson')
j.setType('test')
j.setName('MyJobName')
#j.setAncestorDepth(1)
j.setInputSandbox(['/Users/stuart/dirac/workspace/DIRAC3/DIRAC/Interfaces/API/test/DV.opts','/Users/stuart/dirac/workspace/DIRAC3/DIRAC/Interfaces/API/test/DV2.opts'])
j.setOutputSandbox(['firstfile.txt','anotherfile.root'])
j.setInputData(['/lhcb/production/DC04/v2/DST/00000742_00003493_11.dst',
'/lhcb/production/DC04/v2/DST/00000742_00003493_10.dst'])
j.setOutputData(['my.dst','myfile.log'])
j.setDestination('LCG.CERN.ch')
j.setPlatform('LCG')
j.setSystemConfig('x86_64-slc5-gcc43-opt')
j.setSoftwareTags(['VO-lhcb-Brunel-v30r17','VO-lhcb-Boole-v12r10'])
#print j._toJDL()
#print j.printObj()
xml = j._toXML()
testFile = 'jobDescription.xml'
if os.path.exists(testFile):
os.remove(testFile)
xmlfile = open(testFile,'w')
xmlfile.write(xml)
xmlfile.close()
print '>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>Creating code for the workflow'
print j.createCode()
print '>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>Executing the workflow'
j.execute()
print '>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>Trying to run the same workflow from generated XML file'
workflow = fromXMLFile(testFile)
code = workflow.createCode()
print code
workflow.execute()
示例4: Dirac
# 需要导入模块: from DIRAC.Interfaces.API.Job import Job [as 别名]
# 或者: from DIRAC.Interfaces.API.Job.Job import setOutputData [as 别名]
from DIRAC.Core.Base import Script
Script.parseCommandLine()
from DIRAC.Interfaces.API.Dirac import Dirac
from DIRAC.Interfaces.API.Job import Job
dirac = Dirac()
j = Job()
j.setName("compile_mrfilter")
j.setCPUTime(80)
j.setInputSandbox(["dirac_compile_mrfilter_pilot.sh"])
j.setExecutable("dirac_compile_mrfilter_pilot.sh", "")
j.setOutputData(["mr_filter"], outputSE=None,
outputPath="cta/bin/mr_filter/v3_1/")
Dirac().submit(j)
示例5: Dirac
# 需要导入模块: from DIRAC.Interfaces.API.Job import Job [as 别名]
# 或者: from DIRAC.Interfaces.API.Job.Job import setOutputData [as 别名]
# As we're just copying the input sandbox to the storage element
# via OutputData, we'll just list the files as a check for the
# output written to StdOut.
j.setExecutable('/bin/ls -l')
# Here we add the names of the temporary copies of the frame data
# files in the dataset to the input sandbox. These will be uploaded
# to the grid with the job...
j.setInputSandbox(file_dict.keys())
#...and added to the desired storage element with the corresponding
# LFN via the job's OutputData. You may wish to change:
# * The Storage Element - by changing the outputSE parameter;
# * The LFN base name - by changing the outputPath parameter.
j.setOutputData(file_dict.keys(), \
outputSE='%s' % (se), \
outputPath='/%s/' % (gridoutdir)\
)
# These are the files retrieved with the local job output.
j.setOutputSandbox(['StdOut', 'StdErr'])
# You can set your preferred site here.
j.setDestination(sitename)
## The DIRAC instance.
dirac = Dirac()
# # Submit the job and print the result.
# result = dirac.submit(j)
# print 'Submission result: ', result
示例6: print
# 需要导入模块: from DIRAC.Interfaces.API.Job import Job [as 别名]
# 或者: from DIRAC.Interfaces.API.Job.Job import setOutputData [as 别名]
# simple `ls` for good measure
j.setExecutable('ls', '-lh')
# if there is more than one file per job, merge the output tables
if window_sizes[i] > 1:
for in_name, out_name in [('classified_events_wave', output_filename_wave),
('classified_events_tail', output_filename_tail)]:
j.setExecutable('./pilot.sh',
pilot_args_append.format(
in_name=in_name,
out_name=out_name))
print
print("OutputData: {}{}".format(output_path, output_filename_wave))
print("OutputData: {}{}".format(output_path, output_filename_tail))
j.setOutputData([output_filename_wave, output_filename_tail],
outputSE=None, outputPath=output_path)
# check if we should somehow stop doing what we are doing
if "dry" in sys.argv:
print("\nrunning dry -- not submitting")
exit()
# this sends the job to the GRID and uploads all the
# files into the input sandbox in the process
print("\nsubmitting job")
print('Submission Result: {}\n'.format(dirac.submit(j)['Value']))
# break if this is only a test submission
if "test" in sys.argv:
print("test run -- only submitting one job")
exit()
示例7: open
# 需要导入模块: from DIRAC.Interfaces.API.Job import Job [as 别名]
# 或者: from DIRAC.Interfaces.API.Job.Job import setOutputData [as 别名]
j.setCPUTime(345600) ## 4 days
## download the script for preparing corsika input file for usage with cvmfs
j.setExecutable( 'curl', arguments = ' -fsSLkO http://issaf.spacescience.ro/adrian/AUGER/make_run4cvmfs',logFile='cmd_logs.log')
j.setExecutable( 'chmod', arguments = ' +x make_run4cvmfs',logFile='cmd_logs.log')
## create the simulation script configured for use with cvmfs
## set the make_run4cvmfs arguments to include the corsika_version and corsika_bin
make_run4cvmfs_arg = input_file_base + " " + corsika_version + " " + corsika_bin
j.setExecutable( './make_run4cvmfs', arguments = make_run4cvmfs_arg, logFile='cmd_logs.log')
## run simulation
j.setExecutable( './execsim',logFile='cmd_logs.log')
j.setOutputSandbox(output_files)
j.setOutputData(output_files, outputSE=se, outputPath=outdir)
##j.runLocal() ## test local
jobID = dirac.submit(j)
print 'Submission Result: ',jobID
id = str(jobID) + "\n"
with open('jobids.list', 'a') as f_id_log:
f_id_log.write(jobID.Value + '\n')
示例8: job
# 需要导入模块: from DIRAC.Interfaces.API.Job import Job [as 别名]
# 或者: from DIRAC.Interfaces.API.Job.Job import setOutputData [as 别名]
# Set the name of the job (viewable in the web portal).
j.setName(jobname)
#
j.setExecutable('/bin/sh', arguments='%s %s %s' % ('run.sh', '/cvmfs/cernatschool.gridpp.ac.uk/grid-klustering-001-00-07/', 'process-frames.py'))
#
j.setInputSandbox(inputfiles)
#...and added to the desired storage element with the corresponding
# LFN via the job's OutputData. You may wish to change:
# * The Storage Element - by changing the outputSE parameter;
# * The LFN base name - by changing the outputPath parameter.
j.setOutputData(kluster_file_names, \
outputSE='%s' % (se), \
outputPath='/%s/' % (gridoutdir)\
)
# These are the files retrieved with the local job output.
j.setOutputSandbox(['StdOut', 'StdErr', 'klusters.json', 'log_process_frames.log'])
# You can set your preferred site here.
j.setDestination(sitename)
## The DIRAC instance.
dirac = Dirac()
# # Submit the job and print the result.
# result = dirac.submit(j)
# print 'Submission result: ', result
示例9: str
# 需要导入模块: from DIRAC.Interfaces.API.Job import Job [as 别名]
# 或者: from DIRAC.Interfaces.API.Job.Job import setOutputData [as 别名]
## prepare the output location in GRID storage; the input path will be the used also for GRID storage
# outdir = grid_basedir_output + PROD_NAME + "/" + str(e_min) + "_" + str(e_max) + "/" + str(theta_min) + "_" + str(theta_max) + "/" + str(prmpar) + "/" + str(runnr)
# outdir = "/" + PROD_NAME + "/" + str(e_min) + "_" + str(e_max) + "/" + str(theta_min) + "_" + str(theta_max) + "/" + str(prmpar) + "/" + str(runnr)
outdir = "/" + PROD_NAME + "/" + str(e_min) + "/" + str(theta_min) + "/" + str(prmpar) + "/" + str(runnr)
print 'SE = ',se
lfns_list = []
if ( group == "auger_prod" ):
base_output_path = prod_path
## add base directory to each file to have a list of lfns
for f in output_files:
lfn = "LFN:" + base_output_path + outdir + "/" + f
lfns_list.append(lfn)
j.setOutputData(lfns_list, outputSE=se)
print 'Output - list of lfns :'
pp.pprint (lfns_list)
else:
## base_output_path = PWD
j.setOutputData(output_files, outputSE=se, outputPath=outdir)
print 'Output files = ', output_files
print 'outputPath = ', outdir
#####################
## PREPARE JOB ##
#####################
if (DO_NOT_SUBMIT):
sys.exit(os.EX_USAGE)
### ALWAYS, INFO, VERBOSE, WARN, DEBUG