本文整理汇总了Python中radical.entk.Task.cores方法的典型用法代码示例。如果您正苦于以下问题:Python Task.cores方法的具体用法?Python Task.cores怎么用?Python Task.cores使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类radical.entk.Task
的用法示例。
在下文中一共展示了Task.cores方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: init_cycle
# 需要导入模块: from radical.entk import Task [as 别名]
# 或者: from radical.entk.Task import cores [as 别名]
def init_cycle(self, replicas, replica_cores, python_path, md_executable, exchange_method, min_temp, max_temp, timesteps, basename, pre_exec): # "cycle" = 1 MD stage plus the subsequent exchange computation
"""
Initial cycle consists of:
1) Create tarball of MD input data
2) Transfer the tarball to pilot sandbox
3) Untar the tarball
4) Run first cycle
"""
#Initialize Pipeline
self._prof.prof('InitTar', uid=self._uid)
p = Pipeline()
p.name = 'initpipeline'
md_dict = dict() #bookkeeping
tar_dict = dict() #bookkeeping
#Write the input files
self._prof.prof('InitWriteInputs', uid=self._uid)
writeInputs.writeInputs(
max_temp=max_temp,
min_temp=min_temp,
replicas=replicas,
timesteps=timesteps,
basename=basename)
self._prof.prof('EndWriteInputs', uid=self._uid)
self._prof.prof('InitTar', uid=self._uid)
#Create Tarball of input data
tar = tarfile.open("input_files.tar", "w")
for name in [
basename + ".prmtop", basename + ".inpcrd", basename + ".mdin"
]:
tar.add(name)
for r in range(replicas):
tar.add('mdin_{0}'.format(r))
tar.close()
#delete all input files outside the tarball
for r in range(replicas):
os.remove('mdin_{0}'.format(r))
self._prof.prof('EndTar', uid=self._uid)
#Create Untar Stage
repo = git.Repo('.', search_parent_directories=True)
aux_function_path = repo.working_tree_dir
untar_stg = Stage()
untar_stg.name = 'untarStg'
#Untar Task
untar_tsk = Task()
untar_tsk.name = 'untartsk'
untar_tsk.executable = ['python']
untar_tsk.upload_input_data = [
str(aux_function_path)+'/repex/untar_input_files.py', 'input_files.tar'
]
untar_tsk.arguments = ['untar_input_files.py', 'input_files.tar']
untar_tsk.cpu_reqs = 1
#untar_tsk.post_exec = ['']
untar_stg.add_tasks(untar_tsk)
p.add_stages(untar_stg)
tar_dict[0] = '$Pipeline_%s_Stage_%s_Task_%s' % (
p.name, untar_stg.name, untar_tsk.name)
# First MD stage: needs to be defined separately since workflow is not built from a predetermined order, also equilibration needs to happen first.
md_stg = Stage()
md_stg.name = 'mdstg0'
self._prof.prof('InitMD_0', uid=self._uid)
# MD tasks
for r in range(replicas):
md_tsk = AMBERTask(cores=replica_cores, md_executable=md_executable, pre_exec=pre_exec)
md_tsk.name = 'mdtsk-{replica}-{cycle}'.format(replica=r, cycle=0)
md_tsk.link_input_data += [
'%s/inpcrd' % tar_dict[0],
'%s/prmtop' % tar_dict[0],
'%s/mdin_{0}'.format(r) %
tar_dict[0] #Use for full temperature exchange
]
md_tsk.arguments = [
'-O',
'-p',
'prmtop',
'-i',
'mdin_{0}'.format(r),
#.........这里部分代码省略.........
示例2: list
# 需要导入模块: from radical.entk import Task [as 别名]
# 或者: from radical.entk.Task import cores [as 别名]
# Bookkeeping
stage_uids = list()
task_uids = dict()
Stages = 3
Replicas = 4
for N_Stg in range(Stages):
stg = Stage() ## initialization
task_uids['Stage_%s'%N_Stg] = list()
if N_Stg == 0:
for n0 in range(Replicas):
t = Task()
t.executable = ['/usr/local/packages/gromacs/5.1.4/INTEL-140-MVAPICH2-2.0/bin/gmx_mpi_d'] #MD Engine
t.upload_input_data = ['in.gro', 'in.top', 'FNF.itp', 'martini_v2.2.itp', 'in.mdp']
t.pre_exec = ['module load gromacs', '/usr/local/packages/gromacs/5.1.4/INTEL-140-MVAPICH2-2.0/bin/gmx_mpi_d grompp -f in.mdp -c in.gro -o in.tpr -p in.top']
t.arguments = ['mdrun', '-s', 'in.tpr', '-deffnm', 'out']
t.cores = 32
stg.add_tasks(t)
task_uids['Stage_%s'%N_Stg].append(t.uid)
p.add_stages(stg)
stage_uids.append(stg.uid)
else:
for n0 in range(Replicas):
t = Task()
t.executable = ['/usr/local/packages/gromacs/5.1.4/INTEL-140-MVAPICH2-2.0/bin/gmx_mpi_d'] #MD Engine
t.copy_input_data = ['$Pipeline_%s_Stage_%s_Task_%s/out.gro > in.gro'%(p.uid, stage_uids[N_Stg-1], task_uids['Stage_%s'%(N_Stg-1)][n0]), '$Pipeline_%s_Stage_%s_Task_%s/in.top'%(p.uid, stage_uids[N_Stg-1], task_uids['Stage_%s'%(N_Stg-1)][n0]), '$Pipeline_%s_Stage_%s_Task_%s/FNF.itp'%(p.uid, stage_uids[N_Stg-1], task_uids['Stage_%s'%(N_Stg-1)][n0]), '$Pipeline_%s_Stage_%s_Task_%s/martini_v2.2.itp'%(p.uid, stage_uids[N_Stg-1], task_uids['Stage_%s'%(N_Stg-1)][n0]), '$Pipeline_%s_Stage_%s_Task_%s/in.mdp'%(p.uid, stage_uids[N_Stg-1], task_uids['Stage_%s'%(N_Stg-1)][n0])]
t.pre_exec = ['module load gromacs', '/usr/local/packages/gromacs/5.1.4/INTEL-140-MVAPICH2-2.0/bin/gmx_mpi_d grompp -f in.mdp -c in.gro -o in.tpr -p in.top']
t.arguments = ['mdrun', '-s', 'in.tpr', '-deffnm', 'out']
示例3: Cycle
# 需要导入模块: from radical.entk import Task [as 别名]
# 或者: from radical.entk.Task import cores [as 别名]
def Cycle(Replicas, Replica_Cores, Cycles, MD_Executable, ExchangeMethod):
"""
All cycles after the initial cycle
"""
with open("exchangePairs.dat","r") as f: # Read exchangePairs.dat
ExchangeArray = []
for line in f:
ExchangeArray.append(int(line.split()[1]))
#ExchangeArray.append(line)
#print ExchangeArray
q = Pipeline()
#Bookkeeping
stage_uids = list()
task_uids = list() ## = dict()
md_dict = dict()
#Create initial MD stage
md_stg = Stage()
for r in range (Replicas):
md_tsk = Task()
md_tsk.executable = [MD_Executable] #MD Engine, Blue Waters
md_tsk.link_input_data = ['%s/restrt > inpcrd'%(Book[Cycle-1][ExchangeArray[r]]),
'%s/prmtop'%(Book[Cycle-1][r]),
#'%s/mdin_{0}'.format(r)%(Book[k-1][r])]
'%s/mdin'%(Book[Cycle-1][r])]
md_tsk.pre_exec = ['export AMBERHOME=$HOME/amber/amber14/'] # Should be abstracted from user?
#md_tsk.pre_exec = ['module load amber']
#md_tsk.arguments = ['-O', '-i', 'mdin_{0}'.format(n0), '-p', 'prmtop', '-c', 'inpcrd', '-o', 'out_{0}'.format(n0),'-inf', 'mdinfo_{0}'.format(n0)]
md_tsk.arguments = ['-O', '-i', 'mdin', '-p', 'prmtop', '-c', 'inpcrd', '-o', 'out_{0}'.format(r),'-inf', 'mdinfo_{0}'.format(r)]
md_tsk.cores = Replica_Cores
md_tsk.mpi = True
md_dict[r] = '$Pipeline_%s_Stage_%s_Task_%s'%(p.uid, md_stg.uid, md_tsk.uid)
md_stg.add_tasks(md_tsk)
#task_uids.append(md_tsk.uid)
q.add_stages(md_stg)
ex_stg= Stage()
#Create Exchange Task
ex_tsk = Task()
ex_tsk.executable = ['python']
ex_tsk.upload_input_data = ['exchangeMethods/TempEx.py']
for n1 in range (Replicas):
#print d[n1]
ex_tsk.link_input_data += ['%s/mdinfo_%s'%(d[n1],n1)]
ex_tsk.arguments = ['TempEx.py','{0}'.format(Replicas)]
ex_tsk.cores = 1
ex_tsk.mpi = False
ex_tsk.download_output_data = ['exchangePairs.dat']
ex_stg.add_tasks(ex_tsk)
#task_uids.append(ex_tsk.uid)
q.add_stages(ex_stg)
#stage_uids.append(ex_stg.uid)
Book.append(md_dict)
#print d
#print Book
return q
示例4: general_cycle
# 需要导入模块: from radical.entk import Task [as 别名]
# 或者: from radical.entk.Task import cores [as 别名]
def general_cycle(self, replicas, replica_cores, cycle, python_path, md_executable, exchange_method, pre_exec):
"""
All cycles after the initial cycle
Pulls up exchange pairs file and generates the new workflow
"""
self._prof.prof('InitcreateMDwokflow_{0}'.format(cycle), uid=self._uid)
with open('exchangePairs_{0}.dat'.format(cycle),
'r') as f: # Read exchangePairs.dat
exchange_array = []
for line in f:
exchange_array.append(int(line.split()[1]))
#exchange_array.append(line)
#print exchange_array
q = Pipeline()
q.name = 'genpipeline{0}'.format(cycle)
#bookkeeping
stage_uids = list()
task_uids = list() ## = dict()
md_dict = dict()
#Create MD stage
md_stg = Stage()
md_stg.name = 'mdstage{0}'.format(cycle)
self._prof.prof('InitMD_{0}'.format(cycle), uid=self._uid)
for r in range(replicas):
md_tsk = AMBERTask(cores=replica_cores, md_executable=md_executable, pre_exec=pre_exec)
md_tsk.name = 'mdtsk-{replica}-{cycle}'.format(
replica=r, cycle=cycle)
md_tsk.link_input_data = [
'%s/restrt > inpcrd' %
(self.book[cycle - 1][exchange_array[r]]),
'%s/prmtop' % (self.book[0][r]),
'%s/mdin_{0}'.format(r) % (self.book[0][r])
]
### The Following softlinking scheme is to be used ONLY if node local file system is to be used: not fully supported yet.
#md_tsk.link_input_data = ['$NODE_LFS_PATH/rstrt-{replica}-{cycle}'.format(replica=exchange_array[r],cycle=cycle-1) > '$NODE_LFS_PATH/inpcrd',
# #'%s/restrt > inpcrd'%(self.book[cycle-1][exchange_array[r]]),
# '%s/prmtop'%(self.book[0][r]),
# '%s/mdin_{0}'.format(r)%(self.Book[0][r])]
md_tsk.arguments = [
'-O',
'-i',
'mdin_{0}'.format(r),
'-p',
'prmtop',
'-c',
'inpcrd',
#'-c', 'rstrt-{replica}-{cycle}'.format(replica=r,cycle=cycle-1),
'-o',
'out-{replica}-{cycle}'.format(replica=r, cycle=cycle),
'-r',
'restrt',
#'-r', 'rstrt-{replica}-{cycle}'.format(replica=r,cycle=cycle),
'-x',
'mdcrd-{replica}-{cycle}'.format(replica=r, cycle=cycle),
'-inf',
'mdinfo_{0}'.format(r)
]
#md_tsk.tag = 'mdtsk-{replica}-{cycle}'.format(replica=r,cycle=0)
md_dict[r] = '$Pipeline_%s_Stage_%s_Task_%s' % (
q.name, md_stg.name, md_tsk.name)
self.md_task_list.append(md_tsk)
md_stg.add_tasks(md_tsk)
q.add_stages(md_stg)
ex_stg = Stage()
ex_stg.name = 'exstg{0}'.format(cycle + 1)
#Create Exchange Task
ex_tsk = Task()
ex_tsk.name = 'extsk{0}'.format(cycle + 1)
ex_tsk.executable = [python_path]#['/usr/bin/python'] #['/opt/python/bin/python']
ex_tsk.upload_input_data = [exchange_method]
for r in range(replicas):
ex_tsk.link_input_data += ['%s/mdinfo_%s' % (md_dict[r], r)]
ex_tsk.pre_exec = ['mv *.py exchange_method.py']
ex_tsk.arguments = [
'exchange_method.py', '{0}'.format(replicas), '{0}'.format(cycle + 1)
]
ex_tsk.cores = 1
ex_tsk.mpi = False
ex_tsk.download_output_data = [
'exchangePairs_{0}.dat'.format(cycle + 1)
] # Finds exchange partners, also Generates exchange history trace
ex_stg.add_tasks(ex_tsk)
#task_uids.append(ex_tsk.uid)
self.ex_task_list.append(ex_tsk)
q.add_stages(ex_stg)
#.........这里部分代码省略.........
示例5: InitCycle
# 需要导入模块: from radical.entk import Task [as 别名]
# 或者: from radical.entk.Task import cores [as 别名]
def InitCycle(self, Replicas, Replica_Cores, md_executable, ExchangeMethod, timesteps): # "Cycle" = 1 MD stage plus the subsequent exchange computation
"""
Initial cycle consists of:
1) Create tarball of MD input data
2) Transfer the tarball to pilot sandbox
3) Untar the tarball
4) Run first Cycle
"""
#Initialize Pipeline
#self._prof.prof('InitTar', uid=self._uid)
p = Pipeline()
p.name = 'initpipeline'
md_dict = dict() #Bookkeeping
tar_dict = dict() #Bookkeeping
##Write the input files
self._prof.prof('InitWriteInputs', uid=self._uid)
writeInputs.writeInputs(max_temp=350,min_temp=250,replicas=Replicas,timesteps=timesteps)
self._prof.prof('EndWriteInputs', uid=self._uid)
self._prof.prof('InitTar', uid=self._uid)
#Create Tarball of input data
tar = tarfile.open("Input_Files.tar","w")
for name in ["prmtop", "inpcrd", "mdin"]:
tar.add(name)
for r in range (Replicas):
tar.add('mdin_{0}'.format(r))
tar.close()
#delete all input files outside the tarball
for r in range (Replicas):
os.remove('mdin_{0}'.format(r))
self._prof.prof('EndTar', uid=self._uid)
#Create Untar Stage
untar_stg = Stage()
untar_stg.name = 'untarStg'
#Untar Task
untar_tsk = Task()
untar_tsk.name = 'untartsk'
untar_tsk.executable = ['python']
untar_tsk.upload_input_data = ['untar_input_files.py','Input_Files.tar']
untar_tsk.arguments = ['untar_input_files.py','Input_Files.tar']
untar_tsk.cores = 1
untar_stg.add_tasks(untar_tsk)
p.add_stages(untar_stg)
tar_dict[0] = '$Pipeline_%s_Stage_%s_Task_%s'%(p.name,
untar_stg.name,
untar_tsk.name)
# First MD stage: needs to be defined separately since workflow is not built from a predetermined order
md_stg = Stage()
md_stg.name = 'mdstg0'
self._prof.prof('InitMD_0', uid=self._uid)
# MD tasks
for r in range (Replicas):
md_tsk = AMBERTask(cores=Replica_Cores, MD_Executable=md_executable)
md_tsk.name = 'mdtsk-{replica}-{cycle}'.format(replica=r,cycle=0)
md_tsk.link_input_data += [
'%s/inpcrd'%tar_dict[0],
'%s/prmtop'%tar_dict[0],
'%s/mdin_{0}'.format(r)%tar_dict[0] #Use for full temperature exchange
#'%s/mdin'%tar_dict[0] #Testing only
]
md_tsk.arguments = ['-O','-p','prmtop', '-i', 'mdin_{0}'.format(r), # Use this for full Temperature Exchange
'-c','inpcrd','-o','out_{0}'.format(r),
'-inf','mdinfo_{0}'.format(r)]
md_dict[r] = '$Pipeline_%s_Stage_%s_Task_%s'%(p.name, md_stg.name, md_tsk.name)
md_stg.add_tasks(md_tsk)
self.md_task_list.append(md_tsk)
#print md_tsk.uid
p.add_stages(md_stg)
#.........这里部分代码省略.........
示例6: InitCycle
# 需要导入模块: from radical.entk import Task [as 别名]
# 或者: from radical.entk.Task import cores [as 别名]
def InitCycle(Replicas, Replica_Cores, MD_Executable, ExchangeMethod): # "Cycle" = 1 MD stage plus the subsequent exchange computation
#Initialize Pipeline
p = Pipeline()
md_dict = dict() #Bookkeeping
tar_dict = dict() #Bookkeeping
#Create Tarball of input data
#Create Untar Stage
untar_stg = Stage()
#Untar Task
untar_tsk = Task()
untar_tsk.executable = ['python']
untar_tsk.upload_input_data = ['untar_input_files.py','../../Input_Files.tar']
untar_tsk.arguments = ['untar_input_files.py','Input_Files.tar']
untar_tsk.cores = 1
untar_stg.add_tasks(untar_tsk)
p.add_stages(untar_stg)
tar_dict[0] = '$Pipeline_%s_Stage_%s_Task_%s'%(p.uid,
untar_stg.uid,
untar_tsk.uid)
print tar_dict[0]
# First MD stage: needs to be defined separately since workflow is not built from a predetermined order
md_stg = Stage()
# MD tasks
for r in range (Replicas):
md_tsk = Task()
md_tsk.executable = [MD_Executable]
md_tsk.link_input_data += ['%s/inpcrd'%tar_dict[0],
'%s/prmtop'%tar_dict[0],
#'%s/mdin_{0}'.format(r)%tar_dict[0]
'%s/mdin'%tar_dict[0]
]
md_tsk.pre_exec = ['export AMBERHOME=$HOME/amber/amber14/'] #Should be abstracted from the user?
md_tsk.arguments = ['-O','-p','prmtop', '-i', 'mdin', #'mdin_{0}'.format(r), # Use this for full Temperature Exchange
'-c','inpcrd','-o','out_{0}'.format(r),
'-inf','mdinfo_{0}'.format(r)]
md_tsk.cores = Replica_Cores
md_tsk.mpi = True
md_dict[r] = '$Pipeline_%s_Stage_%s_Task_%s'%(p.uid, md_stg.uid, md_tsk.uid)
md_stg.add_tasks(md_tsk)
#task_uids.append(md_tsk.uid)
p.add_stages(md_stg)
#stage_uids.append(md_stg.uid)
# First Exchange Stage
ex_stg = Stage()
# Create Exchange Task. Exchange task performs a Metropolis Hastings thermodynamic balance condition
# and spits out the exchangePairs.dat file that contains a sorted list of ordered pairs.
# Said pairs then exchange configurations by linking output configuration files appropriately.
ex_tsk = Task()
ex_tsk.executable = ['python']
#ex_tsk.upload_input_data = ['exchangeMethods/TempEx.py']
ex_tsk.upload_input_data = [ExchangeMethod]
for r in range (Replicas):
ex_tsk.link_input_data += ['%s/mdinfo_%s'%(md_dict[r],r)]
ex_tsk.arguments = ['TempEx.py','{0}'.format(Replicas)]
ex_tsk.cores = 1
ex_tsk.mpi = False
ex_tsk.download_output_data = ['exchangePairs.dat']
ex_stg.add_tasks(ex_tsk)
#task_uids.append(ex_tsk.uid)
p.add_stages(ex_stg)
#stage_uids.append(ex_stg.uid)
Book.append(md_dict)
#print Book
return p
示例7: GeneralCycle
# 需要导入模块: from radical.entk import Task [as 别名]
# 或者: from radical.entk.Task import cores [as 别名]
def GeneralCycle(self, Replicas, Replica_Cores, Cycle, MD_Executable, ExchangeMethod):
"""
All cycles after the initial cycle
Pulls up exchange pairs file and generates the new workflow
"""
self._prof.prof('InitcreateMDwokflow_{0}'.format(Cycle), uid=self._uid)
with open('exchangePairs_{0}.dat'.format(Cycle),'r') as f: # Read exchangePairs.dat
ExchangeArray = []
for line in f:
ExchangeArray.append(int(line.split()[1]))
#ExchangeArray.append(line)
#print ExchangeArray
q = Pipeline()
q.name = 'genpipeline{0}'.format(Cycle)
#Bookkeeping
stage_uids = list()
task_uids = list() ## = dict()
md_dict = dict()
#Create initial MD stage
md_stg = Stage()
md_stg.name = 'mdstage{0}'.format(Cycle)
self._prof.prof('InitMD_{0}'.format(Cycle), uid=self._uid)
for r in range (Replicas):
md_tsk = AMBERTask(cores=Replica_Cores, MD_Executable=MD_Executable)
md_tsk.name = 'mdtsk-{replica}-{cycle}'.format(replica=r,cycle=Cycle)
md_tsk.link_input_data = ['%s/restrt > inpcrd'%(self.Book[Cycle-1][ExchangeArray[r]]),
'%s/prmtop'%(self.Book[0][r]),
#'%s/prmtop'%(self.Tarball_path[0]),
'%s/mdin_{0}'.format(r)%(self.Book[0][r])]
#'%s/mdin'%(self.Book[0][r])]
#'%s/mdin'%(self.Tarball_path[0])]
md_tsk.arguments = ['-O', '-i', 'mdin_{0}'.format(r), '-p', 'prmtop', '-c', 'inpcrd', '-o', 'out_{0}'.format(r),'-inf', 'mdinfo_{0}'.format(r)]
#md_tsk.arguments = ['-O', '-i', 'mdin', '-p', 'prmtop', '-c', 'inpcrd', '-o', 'out_{0}'.format(r),'-inf', 'mdinfo_{0}'.format(r)]
md_dict[r] = '$Pipeline_%s_Stage_%s_Task_%s'%(q.name, md_stg.name, md_tsk.name)
self.md_task_list.append(md_tsk)
md_stg.add_tasks(md_tsk)
q.add_stages(md_stg)
ex_stg = Stage()
ex_stg.name = 'exstg{0}'.format(Cycle+1)
#Create Exchange Task
ex_tsk = Task()
ex_tsk.name = 'extsk{0}'.format(Cycle+1)
ex_tsk.executable = ['python']
ex_tsk.upload_input_data = [ExchangeMethod]
for r in range (Replicas):
ex_tsk.link_input_data += ['%s/mdinfo_%s'%(md_dict[r],r)]
ex_tsk.arguments = ['TempEx.py','{0}'.format(Replicas), '{0}'.format(Cycle+1)]
ex_tsk.cores = 1
ex_tsk.mpi = False
ex_tsk.download_output_data = ['exchangePairs_{0}.dat'.format(Cycle+1)] # Finds exchange partners, also Generates exchange history trace
ex_stg.add_tasks(ex_tsk)
#task_uids.append(ex_tsk.uid)
self.ex_task_list.append(ex_tsk)
q.add_stages(ex_stg)
#stage_uids.append(ex_stg.uid)
self.Book.append(md_dict)
#self._prof.prof('EndEx_{0}'.format(Cycle), uid=self._uid)
#print d
#print self.Book
return q
示例8: range
# 需要导入模块: from radical.entk import Task [as 别名]
# 或者: from radical.entk.Task import cores [as 别名]
for N_Stg in range(Stages):
stg = Stage() ## initialization
task_uids['Stage_%s'%N_Stg] = list()
#####Initial MD stage
if N_Stg == 0:
for n0 in range(Replicas):
t = Task()
t.executable = ['/u/sciteam/mushnoor/amber/amber14/bin/sander.MPI'] #MD Engine
t.upload_input_data = ['inpcrd', 'prmtop', 'mdin_{0}'.format(n0)]
t.pre_exec = ['export AMBERHOME=$HOME/amber/amber14/']
t.arguments = ['-O', '-i', 'mdin_{0}'.format(n0), '-p', 'prmtop', '-c', 'inpcrd', '-o', 'out']
t.cores = Replica_Cores
stg.add_tasks(t)
task_uids['Stage_%s'%N_Stg].append(t.uid)
p.add_stages(stg)
stage_uids.append(stg.uid)
#####Exchange Stages
elif N_Stg != 0 and N_Stg%2 = 1:
t = Task()
t.executable = ['python']
t.upload_input_data = ['exchangeMethods/RandEx.py']
#t.link_input_data = ['']
t.arguments = ['RandEx.py', Replicas]
t.cores = 1
示例9: init_cycle
# 需要导入模块: from radical.entk import Task [as 别名]
# 或者: from radical.entk.Task import cores [as 别名]
def init_cycle():
# Create Pipeline Obj
p = Pipeline()
#Bookkeeping
stage_uids = list()
task_uids = list() ## = dict()
d = dict()
dict_tarball = dict()
#Create Tarball stage
tar_stg = Stage()
#Create Tar/untar task
tar_tsk = Task()
tar_tsk.executable = ['python']
tar_tsk.upload_input_data = ['Input_Files.tar', 'untar_input_files.py']
tar_tsk.arguments = ['untar_input_files.py','Input_Files.tar']
tar_tsk.cores = 1
tar_stg.add_tasks(tar_tsk)
#task_uids.append(tar_tsk.uid)
p.add_stages(tar_stg)
#stage_uids.append(tar_stg.uid)
dict_tarball[0] = '$Pipeline_%s_Stage_%s_Task_%s'%(p.uid,tar_stg.uid,tar_tsk.uid)
#Create initial MD stage
md_stg = Stage()
#Create MD task
for n0 in range (Replicas):
md_tsk = Task()
md_tsk.executable = ['/u/sciteam/mushnoor/amber/amber14/bin/sander.MPI'] #MD Engine, BW
#md_tsk.executable = ['/usr/local/packages/amber/16/INTEL-140-MVAPICH2-2.0/bin/pmemd.MPI'] #MD Engine, SuperMIC
#md_tsk.executable = ['/opt/amber/bin/pmemd.MPI']
#md_tsk.upload_input_data = ['inpcrd', 'prmtop', 'mdin_{0}'.format(n0)]
#md_tsk.upload_input_data = ['inpcrd','prmtop','mdin']
md_tsk.link_input_data += ['%s/inpcrd'%dict_tarball[0],
'%s/prmtop'%dict_tarball[0],
'%s/mdin'%dict_tarball[0]]
md_tsk.pre_exec = ['export AMBERHOME=$HOME/amber/amber14/']
#md_tsk.pre_exec = ['module load amber']
#md_tsk.arguments = ['-O', '-i', 'mdin_{0}'.format(n0), '-p', 'prmtop', '-c', 'inpcrd', '-o', 'out_{0}'.format(n0), '-inf', 'mdinfo_{0}'.format(n0)]
md_tsk.arguments = ['-O', '-i', 'mdin', '-p', 'prmtop', '-c', 'inpcrd', '-o', 'out_{0}'.format(n0), '-inf', 'mdinfo_{0}'.format(n0)]
md_tsk.cores = Replica_Cores
md_tsk.mpi = True
d[n0] = '$Pipeline_%s_Stage_%s_Task_%s'%(p.uid, md_stg.uid, md_tsk.uid)
md_stg.add_tasks(md_tsk)
task_uids.append(md_tsk.uid)
p.add_stages(md_stg)
stage_uids.append(md_stg.uid)
#print d
#Create Exchange Stage
ex_stg = Stage()
#Create Exchange Task
ex_tsk = Task()
ex_tsk.executable = ['python']
ex_tsk.upload_input_data = ['exchangeMethods/TempEx.py']
for n1 in range (Replicas):
ex_tsk.link_input_data += ['%s/mdinfo_%s'%(d[n1],n1)]
ex_tsk.arguments = ['TempEx.py','{0}'.format(Replicas)]
ex_tsk.cores = 1
ex_tsk.mpi = False
ex_tsk.download_output_data = ['exchangePairs.dat']
ex_stg.add_tasks(ex_tsk)
task_uids.append(ex_tsk.uid)
p.add_stages(ex_stg)
stage_uids.append(ex_stg.uid)
Book.append(d)
#print Book
return p
示例10: cycle
# 需要导入模块: from radical.entk import Task [as 别名]
# 或者: from radical.entk.Task import cores [as 别名]
def cycle(k):
#read exchangePairs.dat
#
with open("exchangePairs.dat","r") as f:
ExchangeArray = []
for line in f:
ExchangeArray.append(int(line.split()[1]))
#ExchangeArray.append(line)
#print ExchangeArray
p = Pipeline()
#Bookkeeping
stage_uids = list()
task_uids = list() ## = dict()
d = dict()
#Create initial MD stage
md_stg = Stage()
#Create MD task
for n0 in range (Replicas):
md_tsk = Task()
md_tsk.executable = ['/u/sciteam/mushnoor/amber/amber14/bin/sander.MPI'] #MD Engine, Blue Waters
#md_tsk.executable = ['/usr/local/packages/amber/16/INTEL-140-MVAPICH2-2.0/bin/pmemd.MPI'] #MD Engine, SuperMIC
#md_tsk.executable = ['/opt/amber/bin/pmemd.MPI']
md_tsk.link_input_data = ['%s/restrt > inpcrd'%(Book[k-1][ExchangeArray[n0]]),
'%s/prmtop'%(Book[k-1][n0]),
#'%s/mdin_{0}'.format(n0)%(Book[k-1][n0])]
'%s/mdin'%(Book[k-1][n0])]
##Above: Copy from previous PIPELINE, make sure bookkeeping is correct
md_tsk.pre_exec = ['export AMBERHOME=$HOME/amber/amber14/'] #Preexec, BLue Waters
#md_tsk.pre_exec = ['module load amber']
#md_tsk.arguments = ['-O', '-i', 'mdin_{0}'.format(n0), '-p', 'prmtop', '-c', 'inpcrd', '-o', 'out_{0}'.format(n0),'-inf', 'mdinfo_{0}'.format(n0)]
md_tsk.arguments = ['-O', '-i', 'mdin', '-p', 'prmtop', '-c', 'inpcrd', '-o', 'out_{0}'.format(n0),'-inf', 'mdinfo_{0}'.format(n0)]
md_tsk.cores = Replica_Cores
md_tsk.mpi = True
d[n0] = '$Pipeline_%s_Stage_%s_Task_%s'%(p.uid, md_stg.uid, md_tsk.uid)
#print d
md_stg.add_tasks(md_tsk)
task_uids.append(md_tsk.uid)
p.add_stages(md_stg)
stage_uids.append(md_stg.uid)
#Create exchange stage
ex_stg= Stage()
#Create Exchange Task
ex_tsk = Task()
ex_tsk.executable = ['python']
ex_tsk.upload_input_data = ['exchangeMethods/TempEx.py']
for n1 in range (Replicas):
#print d[n1]
ex_tsk.link_input_data += ['%s/mdinfo_%s'%(d[n1],n1)]
ex_tsk.arguments = ['TempEx.py','{0}'.format(Replicas)]
ex_tsk.cores = 1
ex_tsk.mpi = False
ex_tsk.download_output_data = ['exchangePairs.dat']
ex_stg.add_tasks(ex_tsk)
task_uids.append(ex_tsk.uid)
p.add_stages(ex_stg)
stage_uids.append(ex_stg.uid)
Book.append(d)
#print d
#print Book
return p
示例11: Stage
# 需要导入模块: from radical.entk import Task [as 别名]
# 或者: from radical.entk.Task import cores [as 别名]
# Create stage.
s1 = Stage()
s1_task_uids = []
s2_task_uids = []
for cnt in range(4):
# Create a Task object
t1 = Task() ##GROMPP
t1.executable = ['/usr/local/packages/gromacs/5.1.4/INTEL-140-MVAPICH2-2.0/bin/gmx_mpi_d'] #MD Engine
t1.upload_input_data = ['in.gro', 'in.top', 'FNF.itp', 'martini_v2.2.itp', 'in.mdp']
t1.pre_exec = ['module load gromacs', '/usr/local/packages/gromacs/5.1.4/INTEL-140-MVAPICH2-2.0/bin/gmx_mpi_d grompp -f in.mdp -c in.gro -o in.tpr -p in.top']
t1.arguments = ['mdrun', '-s', 'in.tpr', '-deffnm', 'out']
t1.cores = 5
# Add the Task to the Stage
s1.add_tasks(t1)
s1_task_uids.append(t1.uid)
# Add Stage to the Pipeline
p.add_stages(s1)
# Create another Stage object to hold checksum tasks
s2 = Stage() #HARD-CODED EXCHANGE FOLLOWED BY MD
# Create a Task object