本文整理汇总了Python中nipype.Node.clone方法的典型用法代码示例。如果您正苦于以下问题:Python Node.clone方法的具体用法?Python Node.clone怎么用?Python Node.clone使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类nipype.Node
的用法示例。
在下文中一共展示了Node.clone方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: anisotropy
# 需要导入模块: from nipype import Node [as 别名]
# 或者: from nipype.Node import clone [as 别名]
# Fractional anisotropy (FA) map
tensor2faNode = Node(mrtrix.Tensor2FractionalAnisotropy(), name = 'tensor_2_FA')
# Remove noisy background by multiplying the FA Image with the binary brainmask
mrmultNode = Node(Function(input_names = ['in1', 'in2', 'out_file'],
output_names = ['out_file'],
function = multiplyMRTrix),
name = 'mrmult')
# Eigenvector (EV) map
tensor2vectorNode = Node(mrtrix.Tensor2Vector(), name = 'tensor_2_vector')
# Scale the EV map by the FA Image
scaleEvNode = mrmultNode.clone('scale_ev')
# Mask of single-fibre voxels
erodeNode = Node(mrtrix.Erode(), name = 'erode_wmmask')
erodeNode.inputs.number_of_passes = number_of_passes
cleanFaNode = mrmultNode.clone('multiplyFA_Mask')
thresholdFANode = Node(mrtrix.Threshold(), name = 'threshold_FA')
thresholdFANode.inputs.absolute_threshold_value = absolute_threshold_value
# Response function coefficient
estResponseNode = Node(mrtrix.EstimateResponseForSH(), name = 'estimate_deconv_response')
# CSD computation
csdNode = Node(mrtrix.ConstrainedSphericalDeconvolution(), name = 'compute_CSD')
示例2: group_multregress_openfmri
# 需要导入模块: from nipype import Node [as 别名]
# 或者: from nipype.Node import clone [as 别名]
#.........这里部分代码省略.........
'cope_id', '.gz']]
dg.inputs.template_args['varcopes'] = [['model_id', 'task_id', subj_list, 'var', '',
'var', 'cope_id', '.gz']]
dg.iterables=('cope_id', cope_ids)
dg.inputs.sort_filelist = False
wk.connect(info, 'model_id', dg, 'model_id')
wk.connect(info, 'task_id', dg, 'task_id')
model = Node(MultipleRegressDesign(), name='l2model')
model.inputs.groups = groups
model.inputs.contrasts = contrasts[idx]
model.inputs.regressors = regressors_needed[idx]
mergecopes = Node(Merge(dimension='t'), name='merge_copes')
wk.connect(dg, 'copes', mergecopes, 'in_files')
if flamemodel != 'ols':
mergevarcopes = Node(Merge(dimension='t'), name='merge_varcopes')
wk.connect(dg, 'varcopes', mergevarcopes, 'in_files')
mask_file = fsl.Info.standard_image('MNI152_T1_2mm_brain_mask.nii.gz')
flame = Node(FLAMEO(), name='flameo')
flame.inputs.mask_file = mask_file
flame.inputs.run_mode = flamemodel
#flame.inputs.infer_outliers = True
wk.connect(model, 'design_mat', flame, 'design_file')
wk.connect(model, 'design_con', flame, 't_con_file')
wk.connect(mergecopes, 'merged_file', flame, 'cope_file')
if flamemodel != 'ols':
wk.connect(mergevarcopes, 'merged_file', flame, 'var_cope_file')
wk.connect(model, 'design_grp', flame, 'cov_split_file')
if nonparametric:
palm = Node(Function(input_names=['cope_file', 'design_file', 'contrast_file',
'group_file', 'mask_file', 'cluster_threshold'],
output_names=['palm_outputs'],
function=run_palm),
name='palm')
palm.inputs.cluster_threshold = 3.09
palm.inputs.mask_file = mask_file
palm.plugin_args = {'sbatch_args': '-p om_all_nodes -N1 -c2 --mem=10G', 'overwrite': True}
wk.connect(model, 'design_mat', palm, 'design_file')
wk.connect(model, 'design_con', palm, 'contrast_file')
wk.connect(mergecopes, 'merged_file', palm, 'cope_file')
wk.connect(model, 'design_grp', palm, 'group_file')
smoothest = Node(SmoothEstimate(), name='smooth_estimate')
wk.connect(flame, 'zstats', smoothest, 'zstat_file')
smoothest.inputs.mask_file = mask_file
cluster = Node(Cluster(), name='cluster')
wk.connect(smoothest,'dlh', cluster, 'dlh')
wk.connect(smoothest, 'volume', cluster, 'volume')
cluster.inputs.connectivity = 26
cluster.inputs.threshold = 2.3
cluster.inputs.pthreshold = 0.05
cluster.inputs.out_threshold_file = True
cluster.inputs.out_index_file = True
cluster.inputs.out_localmax_txt_file = True
wk.connect(flame, 'zstats', cluster, 'in_file')
ztopval = Node(ImageMaths(op_string='-ztop', suffix='_pval'),
name='z2pval')
wk.connect(flame, 'zstats', ztopval,'in_file')
sinker = Node(DataSink(), name='sinker')
sinker.inputs.base_directory = os.path.join(out_dir, 'task%03d' % task, contrast[0][0])
sinker.inputs.substitutions = [('_cope_id', 'contrast'),
('_maths_', '_reversed_')]
wk.connect(flame, 'zstats', sinker, 'stats')
wk.connect(cluster, 'threshold_file', sinker, '[email protected]')
wk.connect(cluster, 'index_file', sinker, '[email protected]')
wk.connect(cluster, 'localmax_txt_file', sinker, '[email protected]')
if nonparametric:
wk.connect(palm, 'palm_outputs', sinker, 'stats.palm')
if not no_reversal:
zstats_reverse = Node( BinaryMaths() , name='zstats_reverse')
zstats_reverse.inputs.operation = 'mul'
zstats_reverse.inputs.operand_value = -1
wk.connect(flame, 'zstats', zstats_reverse, 'in_file')
cluster2=cluster.clone(name='cluster2')
wk.connect(smoothest, 'dlh', cluster2, 'dlh')
wk.connect(smoothest, 'volume', cluster2, 'volume')
wk.connect(zstats_reverse, 'out_file', cluster2, 'in_file')
ztopval2 = ztopval.clone(name='ztopval2')
wk.connect(zstats_reverse, 'out_file', ztopval2, 'in_file')
wk.connect(zstats_reverse, 'out_file', sinker, '[email protected]')
wk.connect(cluster2, 'threshold_file', sinker, '[email protected]_thr')
wk.connect(cluster2, 'index_file',sinker, '[email protected]_index')
wk.connect(cluster2, 'localmax_txt_file', sinker, '[email protected]_localmax')
meta_workflow.add_nodes([wk])
return meta_workflow
示例3: group_onesample_openfmri
# 需要导入模块: from nipype import Node [as 别名]
# 或者: from nipype.Node import clone [as 别名]
def group_onesample_openfmri(dataset_dir,model_id=None,task_id=None,l1output_dir=None,out_dir=None, no_reversal=False):
wk = Workflow(name='one_sample')
wk.base_dir = os.path.abspath(work_dir)
info = Node(util.IdentityInterface(fields=['model_id','task_id','dataset_dir']),
name='infosource')
info.inputs.model_id=model_id
info.inputs.task_id=task_id
info.inputs.dataset_dir=dataset_dir
num_copes=contrasts_num(model_id,task_id,dataset_dir)
dg = Node(DataGrabber(infields=['model_id','task_id','cope_id'],
outfields=['copes', 'varcopes']),name='grabber')
dg.inputs.template = os.path.join(l1output_dir,'model%03d/task%03d/*/%scopes/mni/%scope%02d.nii.gz')
dg.inputs.template_args['copes'] = [['model_id','task_id','', '', 'cope_id']]
dg.inputs.template_args['varcopes'] = [['model_id','task_id','var', 'var', 'cope_id']]
dg.iterables=('cope_id',num_copes)
dg.inputs.sort_filelist = True
wk.connect(info,'model_id',dg,'model_id')
wk.connect(info,'task_id',dg,'task_id')
model = Node(L2Model(), name='l2model')
wk.connect(dg, ('copes', get_len), model, 'num_copes')
mergecopes = Node(Merge(dimension='t'), name='merge_copes')
wk.connect(dg, 'copes', mergecopes, 'in_files')
mergevarcopes = Node(Merge(dimension='t'), name='merge_varcopes')
wk.connect(dg, 'varcopes', mergevarcopes, 'in_files')
mask_file = fsl.Info.standard_image('MNI152_T1_2mm_brain_mask.nii.gz')
flame = Node(FLAMEO(), name='flameo')
flame.inputs.mask_file = mask_file
flame.inputs.run_mode = 'flame1'
wk.connect(model, 'design_mat', flame, 'design_file')
wk.connect(model, 'design_con', flame, 't_con_file')
wk.connect(mergecopes, 'merged_file', flame, 'cope_file')
wk.connect(mergevarcopes, 'merged_file', flame, 'var_cope_file')
wk.connect(model, 'design_grp', flame, 'cov_split_file')
smoothest = Node(SmoothEstimate(), name='smooth_estimate')
wk.connect(flame, 'zstats', smoothest, 'zstat_file')
smoothest.inputs.mask_file = mask_file
cluster = Node(Cluster(), name='cluster')
wk.connect(smoothest,'dlh', cluster, 'dlh')
wk.connect(smoothest, 'volume', cluster, 'volume')
cluster.inputs.connectivity = 26
cluster.inputs.threshold=2.3
cluster.inputs.pthreshold = 0.05
cluster.inputs.out_threshold_file = True
cluster.inputs.out_index_file = True
cluster.inputs.out_localmax_txt_file = True
wk.connect(flame, 'zstats', cluster, 'in_file')
ztopval = Node(ImageMaths(op_string='-ztop', suffix='_pval'),
name='z2pval')
wk.connect(flame, 'zstats', ztopval,'in_file')
sinker = Node(DataSink(), name='sinker')
sinker.inputs.base_directory = os.path.abspath(out_dir)
sinker.inputs.substitutions = [('_cope_id', 'contrast'),
('_maths__', '_reversed_')]
wk.connect(flame, 'zstats', sinker, 'stats')
wk.connect(cluster, 'threshold_file', sinker, '[email protected]')
wk.connect(cluster, 'index_file', sinker, '[email protected]')
wk.connect(cluster, 'localmax_txt_file', sinker, '[email protected]')
if no_reversal == False:
zstats_reverse = Node( BinaryMaths() , name='zstats_reverse')
zstats_reverse.inputs.operation = 'mul'
zstats_reverse.inputs.operand_value= -1
wk.connect(flame, 'zstats', zstats_reverse, 'in_file')
cluster2=cluster.clone(name='cluster2')
wk.connect(smoothest,'dlh',cluster2,'dlh')
wk.connect(smoothest,'volume',cluster2,'volume')
wk.connect(zstats_reverse,'out_file',cluster2,'in_file')
ztopval2 = ztopval.clone(name='ztopval2')
wk.connect(zstats_reverse,'out_file',ztopval2,'in_file')
wk.connect(zstats_reverse,'out_file',sinker,'[email protected]')
wk.connect(cluster2,'threshold_file',sinker,'[email protected]_thr')
wk.connect(cluster2,'index_file',sinker,'[email protected]_index')
wk.connect(cluster2,'localmax_txt_file',sinker,'[email protected]_localmax')
return wk
示例4: create_workflow
# 需要导入模块: from nipype import Node [as 别名]
# 或者: from nipype.Node import clone [as 别名]
def create_workflow(files,
subject_id,
n_vol=0,
despike=True,
TR=None,
slice_times=None,
slice_thickness=None,
fieldmap_images=[],
norm_threshold=1,
num_components=6,
vol_fwhm=None,
surf_fwhm=None,
lowpass_freq=-1,
highpass_freq=-1,
sink_directory=os.getcwd(),
FM_TEdiff=2.46,
FM_sigma=2,
FM_echo_spacing=.7,
target_subject=['fsaverage3', 'fsaverage4'],
name='resting'):
wf = Workflow(name=name)
# Skip starting volumes
remove_vol = MapNode(fsl.ExtractROI(t_min=n_vol, t_size=-1),
iterfield=['in_file'],
name="remove_volumes")
remove_vol.inputs.in_file = files
# Run AFNI's despike. This is always run, however, whether this is fed to
# realign depends on the input configuration
despiker = MapNode(afni.Despike(outputtype='NIFTI_GZ'),
iterfield=['in_file'],
name='despike')
#despiker.plugin_args = {'qsub_args': '-l nodes=1:ppn='}
wf.connect(remove_vol, 'roi_file', despiker, 'in_file')
# Run Nipy joint slice timing and realignment algorithm
realign = Node(nipy.SpaceTimeRealigner(), name='realign')
realign.inputs.tr = TR
realign.inputs.slice_times = slice_times
realign.inputs.slice_info = 2
if despike:
wf.connect(despiker, 'out_file', realign, 'in_file')
else:
wf.connect(remove_vol, 'roi_file', realign, 'in_file')
# Comute TSNR on realigned data regressing polynomials upto order 2
tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr')
wf.connect(realign, 'out_file', tsnr, 'in_file')
# Compute the median image across runs
calc_median = Node(Function(input_names=['in_files'],
output_names=['median_file'],
function=median,
imports=imports),
name='median')
wf.connect(tsnr, 'detrended_file', calc_median, 'in_files')
# Coregister the median to the surface
register = Node(freesurfer.BBRegister(),
name='bbregister')
register.inputs.subject_id = subject_id
register.inputs.init = 'fsl'
register.inputs.contrast_type = 't2'
register.inputs.out_fsl_file = True
register.inputs.epi_mask = True
# Compute fieldmaps and unwarp using them
if fieldmap_images:
fieldmap = Node(interface=EPIDeWarp(), name='fieldmap_unwarp')
fieldmap.inputs.tediff = FM_TEdiff
fieldmap.inputs.esp = FM_echo_spacing
fieldmap.inputs.sigma = FM_sigma
fieldmap.inputs.mag_file = fieldmap_images[0]
fieldmap.inputs.dph_file = fieldmap_images[1]
wf.connect(calc_median, 'median_file', fieldmap, 'exf_file')
dewarper = MapNode(interface=fsl.FUGUE(), iterfield=['in_file'],
name='dewarper')
wf.connect(tsnr, 'detrended_file', dewarper, 'in_file')
wf.connect(fieldmap, 'exf_mask', dewarper, 'mask_file')
wf.connect(fieldmap, 'vsm_file', dewarper, 'shift_in_file')
wf.connect(fieldmap, 'exfdw', register, 'source_file')
else:
wf.connect(calc_median, 'median_file', register, 'source_file')
# Get the subject's freesurfer source directory
fssource = Node(FreeSurferSource(),
name='fssource')
fssource.inputs.subject_id = subject_id
fssource.inputs.subjects_dir = os.environ['SUBJECTS_DIR']
# Extract wm+csf, brain masks by eroding freesurfer lables and then
# transform the masks into the space of the median
wmcsf = Node(freesurfer.Binarize(), name='wmcsfmask')
mask = wmcsf.clone('anatmask')
wmcsftransform = Node(freesurfer.ApplyVolTransform(inverse=True,
#.........这里部分代码省略.........
示例5: create_workflow
# 需要导入模块: from nipype import Node [as 别名]
# 或者: from nipype.Node import clone [as 别名]
#.........这里部分代码省略.........
# Filter the motion and art confounds and detrend
filter1 = MapNode(fsl.GLM(out_f_name='F_mcart.nii',
out_pf_name='pF_mcart.nii',
demean=True),
iterfield=['in_file', 'design', 'out_res_name'],
name='filtermotion')
wf.connect(normalize_func, 'normalized_files', filter1, 'in_file')
wf.connect(normalize_func, ('normalized_files', rename, '_filtermotart'),
filter1, 'out_res_name')
wf.connect(createfilter1, 'out_files', filter1, 'design')
#wf.connect(masktransform, 'transformed_file', filter1, 'mask')
# Create a filter to remove noise components based on white matter and CSF
createfilter2 = MapNode(Function(input_names=['realigned_file', 'mask_file',
'num_components',
'extra_regressors'],
output_names=['out_files'],
function=extract_noise_components,
imports=imports),
iterfield=['realigned_file', 'extra_regressors'],
name='makecompcorrfilter')
createfilter2.inputs.num_components = num_components
wf.connect(createfilter1, 'out_files', createfilter2, 'extra_regressors')
wf.connect(filter1, 'out_res', createfilter2, 'realigned_file')
wf.connect(bin_and_erode, ('out_file', selectN, 2), createfilter2, 'mask_file')
# Filter noise components from unsmoothed data
filter2 = MapNode(fsl.GLM(out_f_name='F.nii',
out_pf_name='pF.nii',
demean=True),
iterfield=['in_file', 'design', 'out_res_name'],
name='filter_noise_nosmooth')
wf.connect(normalize_func, 'normalized_files', filter2, 'in_file')
wf.connect(normalize_func, ('normalized_files', rename, '_unsmooth_cleaned'),
filter2, 'out_res_name')
wf.connect(createfilter2, 'out_files', filter2, 'design')
wf.connect(mask, 'mask_file', filter2, 'mask')
# Filter noise components from smoothed data
filter3 = MapNode(fsl.GLM(out_f_name='F.nii',
out_pf_name='pF.nii',
demean=True),
iterfield=['in_file', 'design', 'out_res_name'],
name='filter_noise_smooth')
wf.connect(smooth, ('smoothed_files', rename, '_cleaned'),
filter3, 'out_res_name')
wf.connect(smooth, 'smoothed_files', filter3, 'in_file')
wf.connect(createfilter2, 'out_files', filter3, 'design')
wf.connect(mask, 'mask_file', filter3, 'mask')
# Bandpass filter the data
bandpass1 = Node(Function(input_names=['files', 'lowpass_freq',
'highpass_freq', 'fs'],
output_names=['out_files'],
function=bandpass_filter,
imports=imports),
name='bandpass_unsmooth')
bandpass1.inputs.fs = 1./TR
bandpass1.inputs.highpass_freq = highpass_freq
bandpass1.inputs.lowpass_freq = lowpass_freq
wf.connect(filter2, 'out_res', bandpass1, 'files')
bandpass2 = bandpass1.clone(name='bandpass_smooth')
wf.connect(filter3, 'out_res', bandpass2, 'files')
bandpass = Node(Function(input_names=['in1', 'in2'],
output_names=['out_file'],
function=merge_files,
imports=imports),
name='bandpass_merge')
wf.connect(bandpass1, 'out_files', bandpass, 'in1')
wf.connect(bandpass2, 'out_files', bandpass, 'in2')
# Save the relevant data into an output directory
datasink = Node(interface=DataSink(), name="datasink")
datasink.inputs.base_directory = sink_directory
datasink.inputs.container = subject_id
#datasink.inputs.substitutions = [('_target_subject_', '')]
#datasink.inputs.regexp_substitutions = (r'(/_.*(\d+/))', r'/run\2')
wf.connect(realign, 'realignment_parameters', datasink, 'resting.qa.motion')
wf.connect(art, 'norm_files', datasink, '[email protected]')
wf.connect(art, 'intensity_files', datasink, '[email protected]')
wf.connect(art, 'outlier_files', datasink, '[email protected]_files')
wf.connect(smooth, 'smoothed_files', datasink, 'resting.timeseries.fullpass')
wf.connect(bin_and_erode, 'out_file', datasink, 'resting.mask_files')
wf.connect(mask, 'mask_file', datasink, '[email protected]')
wf.connect(filter1, 'out_f', datasink, '[email protected]_F')
wf.connect(filter1, 'out_pf', datasink, '[email protected]_pF')
wf.connect(filter2, 'out_f', datasink, 'resting.qa.compmaps')
wf.connect(filter2, 'out_pf', datasink, '[email protected]')
wf.connect(filter3, 'out_f', datasink, '[email protected]')
wf.connect(filter3, 'out_pf', datasink, '[email protected]')
wf.connect(bandpass, 'out_file', datasink, 'resting.timeseries.bandpassed')
wf.connect(createfilter1, 'out_files',
datasink, '[email protected]')
wf.connect(createfilter2, 'out_files',
datasink, '[email protected]')
return wf
示例6: Node
# 需要导入模块: from nipype import Node [as 别名]
# 或者: from nipype.Node import clone [as 别名]
bbregNode.inputs.out_fsl_file = True
bbregNode.inputs.args = "--tol1d 1e-3"
#bbregNode.inputs.subject_id = reconallFolderName
# ### Surface2Vol
# Transform Left Hemisphere
surf2volNode_lh = Node(freesurfer.utils.Surface2VolTransform(), name = 'surf2vol_lh')
surf2volNode_lh.inputs.hemi = 'lh'
surf2volNode_lh.inputs.mkmask = True
#surf2volNode_lh.inputs.subject_id = reconallFolderName
surf2volNode_lh.inputs.vertexvol_file = 'test'
# Transform right hemisphere
surf2volNode_rh = surf2volNode_lh.clone('surf2vol_rh')
surf2volNode_rh.inputs.hemi = 'rh'
# Merge the hemispheres
mergeHemisNode = Node(fsl.BinaryMaths(), name = 'mergeHemis')
mergeHemisNode.inputs.operation = 'add'
mergeHemisNode.inputs.output_type = 'NIFTI_GZ'
# ### Registration
# Rotate high-res (1mm) WM-border to match dwi data w/o resampling
applyReg_anat2diff_1mm = Node(freesurfer.ApplyVolTransform(), name = 'wmoutline2diff_1mm')
applyReg_anat2diff_1mm.inputs.inverse = True
applyReg_anat2diff_1mm.inputs.interp = 'nearest'
applyReg_anat2diff_1mm.inputs.no_resample = True
示例7: Node
# 需要导入模块: from nipype import Node [as 别名]
# 或者: from nipype.Node import clone [as 别名]
bbregNode.inputs.subject_id = reconallFolderName
# ### Surface2Vol
# In[ ]:
# Transform Left Hemisphere
lhWhiteFilename = "lh_white.nii.gz"
surf2volNode_lh = Node(freesurfer.utils.Surface2VolTransform(), name="surf2vol_lh")
surf2volNode_lh.inputs.hemi = "lh"
surf2volNode_lh.inputs.mkmask = True
surf2volNode_lh.inputs.subject_id = reconallFolderName
# Transform right hemisphere
surf2volNode_rh = surf2volNode_lh.clone("surf2vol_rh")
surf2volNode_rh.inputs.hemi = "rh"
# Merge the hemispheres
mergeHemisNode = Node(fsl.BinaryMaths(), name="mergeHemis")
mergeHemisNode.inputs.operation = "add"
mergeHemisNode.inputs.output_type = "NIFTI_GZ"
# ### Registration
# In[ ]:
# Rotate high-res (1mm) WM-border to match dwi data w/o resampling
applyReg_anat2diff_1mm = Node(freesurfer.ApplyVolTransform(), name="wmoutline2diff_1mm")
applyReg_anat2diff_1mm.inputs.inverse = True
示例8: Node
# 需要导入模块: from nipype import Node [as 别名]
# 或者: from nipype.Node import clone [as 别名]
# reconallNode.plugin_args = {'overwrite': True, 'oarsub_args': '-l nodes=1,walltime=16:00:00'}
# Convert the T1 mgz image to nifti format for later usage
# mriConverter = Node(freesurfer.preprocess.MRIConvert(), name = 'convertAparcAseg')
# mriConverter.inputs.out_type = 'niigz'
# mriConverter.inputs.out_orientation = 'RAS'
mriConverter = Node(Function(input_names = ['in_file', 'out_file'],
output_names = ['out_file'],
function = mri_convert_bm),
name = 'convertAparcAseg')
# Convert the Brainmask file
# brainmaskConv = Node(freesurfer.preprocess.MRIConvert(), name = 'convertBrainmask')
# brainmaskConv.inputs.out_type = 'niigz'
# brainmaskConv.inputs.out_orientation = 'RAS'
brainmaskConv = mriConverter.clone('convertBrainmask')
# ### Diffusion Data (dwMRI) preprocessing
# First extract the diffusion vectors and the pulse intensity (bvec and bval)
# Use dcm2nii for this task
dcm2niiNode = Node(Dcm2nii(), name = 'dcm2niiAndBvecs')
dcm2niiNode.inputs.gzip_output = True
dcm2niiNode.inputs.date_in_filename = False
dcm2niiNode.inputs.events_in_filename = False
# Extract the first image of the DTI series i.e. the b0 image
extrctB0Node = Node(Function(input_names = ['dwMriFile'], output_names = ['b0'],
function = extractB0), name = 'Extract_b0')