本文整理汇总了Python中nipype.pipeline.engine.Node.clone方法的典型用法代码示例。如果您正苦于以下问题:Python Node.clone方法的具体用法?Python Node.clone怎么用?Python Node.clone使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类nipype.pipeline.engine.Node
的用法示例。
在下文中一共展示了Node.clone方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: anatomical_preprocessing
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import clone [as 别名]
def anatomical_preprocessing():
'''
Inputs:
MP2RAGE Skull stripped image using Spectre-2010
Workflow:
1. reorient to RPI
2. create a brain mask
Returns:
brain
brain_mask
'''
# define workflow
flow = Workflow('anat_preprocess')
inputnode = Node(util.IdentityInterface(fields=['anat', 'anat_gm', 'anat_wm', 'anat_csf', 'anat_first']), name = 'inputnode')
outputnode = Node(util.IdentityInterface(fields=['brain','brain_gm', 'brain_wm', 'brain_csf', 'brain_first', 'brain_mask',]), name = 'outputnode')
reorient = Node(interface=preprocess.Resample(), name = 'anat_reorient')
reorient.inputs.orientation = 'RPI'
reorient.inputs.outputtype = 'NIFTI'
erode = Node(interface=fsl.ErodeImage(), name = 'anat_preproc')
reorient_gm = reorient.clone('anat_preproc_gm')
reorient_wm = reorient.clone('anat_preproc_wm')
reorient_cm = reorient.clone('anat_preproc_csf')
reorient_first = reorient.clone('anat_preproc_first')
make_mask = Node(interface=fsl.UnaryMaths(), name = 'anat_preproc_mask')
make_mask.inputs.operation = 'bin'
# connect workflow nodes
flow.connect(inputnode, 'anat' , reorient, 'in_file' )
flow.connect(inputnode, 'anat_gm' , reorient_gm, 'in_file' )
flow.connect(inputnode, 'anat_wm' , reorient_wm, 'in_file' )
flow.connect(inputnode, 'anat_csf' , reorient_cm, 'in_file' )
flow.connect(inputnode, 'anat_first' , reorient_first,'in_file' )
flow.connect(reorient, 'out_file' , erode, 'in_file' )
flow.connect(erode, 'out_file' , make_mask, 'in_file' )
flow.connect(make_mask, 'out_file' , outputnode, 'brain_mask' )
flow.connect(erode, 'out_file' , outputnode, 'brain' )
flow.connect(reorient_gm, 'out_file' , outputnode, 'brain_gm' )
flow.connect(reorient_wm, 'out_file' , outputnode, 'brain_wm' )
flow.connect(reorient_cm, 'out_file' , outputnode, 'brain_csf' )
flow.connect(reorient_first, 'out_file' , outputnode, 'brain_first' )
return flow
示例2: learning_predict_data_2samp_wf
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import clone [as 别名]
#.........这里部分代码省略.........
'X_file_nki',
'df_file_nki',
'reverse_split',
'random_state_nki',
'run_learning_curve',
'life_test_size'],
output_names=['scatter_file',
'brain_age_scatter_file',
'df_life_out_file',
'df_nki_out_file',
'df_big_out_file',
'model_out_file',
'df_res_out_file',
'tuning_curve_file',
'scatter_file_cv',
'learning_curve_plot_file',
'learning_curve_df_file'],
function=run_prediction_split_fct),
name='prediction_split')
backproject_and_split_weights = Node(util.Function(input_names=['trained_model_file',
'multimodal_backprojection_info',
'data_str',
'target_name'],
output_names=['out_file_list',
'out_file_render_list'],
function=backproject_and_split_weights_fct),
name='backproject_and_split_weights')
i = 0
for reg in confound_regression:
the_out_node_str = 'single_source_model_reg_%s_' % (reg)
prediction_node_dict[i] = prediction_split.clone(the_out_node_str)
the_in_node = prediction_node_dict[i]
the_in_node.inputs.regress_confounds = reg
the_in_node.inputs.run_cv = run_cv
the_in_node.inputs.n_jobs_cv = n_jobs_cv
the_in_node.inputs.run_tuning = run_tuning
the_in_node.inputs.reverse_split = reverse_split
the_in_node.inputs.random_state_nki = random_state_nki
the_in_node.inputs.run_learning_curve = run_learning_curve
the_in_node.inputs.life_test_size = life_test_size
wf.connect(select_multimodal_X, 'X_multimodal_selected_file', the_in_node, 'X_file')
wf.connect(target_infosource, 'target_name', the_in_node, 'target_name')
wf.connect(subject_selection_infosource, 'selection_criterium', the_in_node, 'selection_criterium')
wf.connect(select_subjects, 'df_use_pickle_file', the_in_node, 'df_file')
wf.connect(aggregate_multimodal_metrics, 'multimodal_name', the_in_node, 'data_str')
wf.connect(the_in_node, 'model_out_file', ds, the_out_node_str + 'trained_model')
wf.connect(the_in_node, 'scatter_file', ds_pdf, the_out_node_str + 'scatter')
wf.connect(the_in_node, 'brain_age_scatter_file', ds_pdf, the_out_node_str + 'brain_age_scatter')
wf.connect(the_in_node, 'df_life_out_file', ds_pdf, the_out_node_str + 'predicted_life')
wf.connect(the_in_node, 'df_nki_out_file', ds_pdf, the_out_node_str + 'predicted_nki')
wf.connect(the_in_node, 'df_big_out_file', ds_pdf, the_out_node_str + 'predicted')
wf.connect(the_in_node, 'df_res_out_file', ds_pdf, the_out_node_str + 'results_error')
wf.connect(the_in_node, 'tuning_curve_file', ds_pdf, the_out_node_str + 'tuning_curve')
wf.connect(the_in_node, 'scatter_file_cv', ds_pdf, the_out_node_str + 'scatter_cv')
wf.connect(the_in_node, 'learning_curve_plot_file', ds_pdf, the_out_node_str + '[email protected]')
wf.connect(the_in_node, 'learning_curve_df_file', ds_pdf, the_out_node_str + '[email protected]')
# NKI
if run_2sample_training:
wf.connect(select_multimodal_X_nki, 'X_multimodal_selected_file', the_in_node, 'X_file_nki')
示例3: make_func_subcortical_masks
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import clone [as 别名]
#.........这里部分代码省略.........
right_nacc.inputs.roi_file = 'right_nacc.nii.gz'
right_amygdala = Node(interface=fsl.ExtractROI(), name = 'right_amygdala')
right_amygdala.inputs.t_min = 8
right_amygdala.inputs.t_size = 1
right_amygdala.inputs.roi_file = 'right_amygdala.nii.gz'
right_caudate = Node(interface=fsl.ExtractROI(), name = 'right_caudate')
right_caudate.inputs.t_min = 9
right_caudate.inputs.t_size = 1
right_caudate.inputs.roi_file = 'right_caudate.nii.gz'
right_hipoocampus = Node(interface=fsl.ExtractROI(), name = 'right_hipoocampus')
right_hipoocampus.inputs.t_min = 10
right_hipoocampus.inputs.t_size = 1
right_hipoocampus.inputs.roi_file = 'right_hipoocampus.nii.gz'
right_pallidum = Node(interface=fsl.ExtractROI(), name = 'right_pallidum')
right_pallidum.inputs.t_min = 11
right_pallidum.inputs.t_size = 1
right_pallidum.inputs.roi_file = 'right_pallidum.nii.gz'
right_putamen = Node(interface=fsl.ExtractROI(), name = 'right_putamen')
right_putamen.inputs.t_min = 12
right_putamen.inputs.t_size = 1
right_putamen.inputs.roi_file = 'right_putamen.nii.gz'
right_thalamus = Node(interface=fsl.ExtractROI(), name = 'right_thalamus')
right_thalamus.inputs.t_min = 13
right_thalamus.inputs.t_size = 1
right_thalamus.inputs.roi_file = 'right_thalamus.nii.gz'
midbrain = Node(interface=fsl.ExtractROI(), name = 'midbrain')
midbrain.inputs.t_min = 14
midbrain.inputs.t_size = 1
midbrain.inputs.roi_file = 'midbrain.nii.gz'
flow.connect( inputnode , 'func_first' , left_nacc, 'in_file' )
flow.connect( inputnode , 'func_first' , left_amygdala, 'in_file' )
flow.connect( inputnode , 'func_first' , left_caudate, 'in_file' )
flow.connect( inputnode , 'func_first' , left_hipoocampus,'in_file' )
flow.connect( inputnode , 'func_first' , left_pallidum, 'in_file' )
flow.connect( inputnode , 'func_first' , left_putamen, 'in_file' )
flow.connect( inputnode , 'func_first' , left_thalamus, 'in_file' )
flow.connect( inputnode , 'func_first' , right_nacc, 'in_file' )
flow.connect( inputnode , 'func_first' , right_amygdala, 'in_file' )
flow.connect( inputnode , 'func_first' , right_caudate, 'in_file' )
flow.connect( inputnode , 'func_first' , right_hipoocampus,'in_file' )
flow.connect( inputnode , 'func_first' , right_pallidum, 'in_file' )
flow.connect( inputnode , 'func_first' , right_putamen, 'in_file' )
flow.connect( inputnode , 'func_first' , right_thalamus, 'in_file' )
flow.connect( inputnode , 'func_first' , midbrain, 'in_file' )
flow.connect( left_nacc , 'roi_file' ,outputnode , 'left_nacc' )
flow.connect( left_amygdala , 'roi_file' ,outputnode , 'left_amygdala' )
flow.connect( left_caudate , 'roi_file' ,outputnode , 'left_caudate' )
flow.connect( left_hipoocampus , 'roi_file' ,outputnode , 'left_hipoocampus')
flow.connect( left_pallidum , 'roi_file' ,outputnode , 'left_pallidum')
flow.connect( left_putamen , 'roi_file' ,outputnode , 'left_putamen' )
flow.connect( left_thalamus , 'roi_file' ,outputnode , 'left_thalamus' )
flow.connect( right_nacc , 'roi_file' ,outputnode , 'right_nacc' )
flow.connect( right_amygdala , 'roi_file' ,outputnode , 'right_amygdala' )
flow.connect( right_caudate , 'roi_file' ,outputnode , 'right_caudate' )
flow.connect( right_hipoocampus, 'roi_file' ,outputnode , 'right_hipoocampus')
flow.connect( right_pallidum , 'roi_file' ,outputnode , 'right_pallidum')
flow.connect( right_putamen , 'roi_file' ,outputnode , 'right_putamen' )
flow.connect( right_thalamus , 'roi_file' ,outputnode , 'right_thalamus' )
flow.connect( midbrain , 'roi_file' ,outputnode , 'midbrain' )
# add images together
right_striatum = Node(interface=fsl.MultiImageMaths(), name = 'right_striatum')
right_striatum.inputs.op_string = '-add %s -add %s -bin'
right_striatum.out_file = 'right_striatum.nii.gz'
list_R_str = Node(util.Function(input_names = ['file_1', 'file_2'],
output_names= ['list'],
function = return_list),
name = 'list_str_r')
flow.connect( right_pallidum , 'roi_file' ,list_R_str , 'file_1' )
flow.connect( right_putamen , 'roi_file' ,list_R_str , 'file_2' )
flow.connect( right_caudate , 'roi_file' ,right_striatum , 'in_file' )
flow.connect( list_R_str , 'list' ,right_striatum , 'operand_files' )
flow.connect( right_striatum , 'out_file' ,outputnode , 'right_striatum' )
left_striatum = Node(interface=fsl.MultiImageMaths(), name = 'left_striatum')
left_striatum.inputs.op_string = '-add %s -add %s'
left_striatum.out_file = 'left_striatum.nii.gz'
list_L_str = list_R_str.clone('list_str_l')
flow.connect( left_pallidum , 'roi_file' ,list_L_str , 'file_1' )
flow.connect( left_putamen , 'roi_file' ,list_L_str , 'file_2' )
flow.connect( left_caudate , 'roi_file' ,left_striatum , 'in_file' )
flow.connect( list_L_str , 'list' ,left_striatum , 'operand_files' )
flow.connect( left_striatum , 'out_file' ,outputnode , 'left_striatum' )
return flow
示例4: Node
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import clone [as 别名]
morph_closing = Node(fs.Binarize(min=0.5,
dilate=10,
erode=10),
name='morph_close')
medwall.connect([(addmasks, morph_closing, [('out_file', 'in_file')])])
'''alternative with thickness'''
wallmask_rh = Node(fs.Binarize(max=0.2,
out_type = 'nii.gz'),
name='wallmask_rh')
wallmask_lh = wallmask_rh.clone('wallmask_lh')
medwall.connect([(selectfiles, wallmask_rh, [('thickness_rh', 'in_file')]),
(selectfiles, wallmask_lh, [('thickness_lh', 'in_file')])
])
addmasks2= Node(fsl.BinaryMaths(operation='add'),
name='addmasks2')
medwall.connect([(wallmask_rh, addmasks2, [('binary_file', 'in_file')]),
(wallmask_lh, addmasks2, [('binary_file', 'operand_file')])])
'''
followed by
3dclust -savemask $out 0 20 $in
'''
示例5: run_tbss_wf
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import clone [as 别名]
#.........这里部分代码省略.........
wf.connect(selectfiles, 'dMRI_data', b0_4d_init_0, 'in_dwi')
#wf.connect(selectfiles, 'bval_file', b0_4d_init_0, 'in_bval')
wf.connect(bvals_with_nodiff_0, 'bval_file_zero', b0_4d_init_0, 'in_bval')
b0_4d_init_0.inputs.b = 'nodiff'
first_b0 = Node(fsl.ExtractROI(t_min=0, t_size=1), name='first_b0')
wf.connect(b0_4d_init_0, 'out_file', first_b0, 'in_file')
flirt = Node(fsl.FLIRT(dof=6, out_file='b0_moco.nii.gz'), name='flirt')
wf.connect(b0_4d_init_0, 'out_file', flirt, 'in_file')
wf.connect(first_b0, 'roi_file', flirt, 'reference')
mean_b0_moco_init_0 = Node(fsl.MeanImage(), name='mean_b0_moco_init_0')
wf.connect(flirt, 'out_file', mean_b0_moco_init_0, 'in_file')
b0_mask_init_0 = Node(fsl.BET(frac=0.3, mask=True, robust=True), name='b0_mask_init_0')
wf.connect(mean_b0_moco_init_0, 'out_file', b0_mask_init_0, 'in_file')
# HEAD MOTION CORRECTION PIPELINE
hmc = hmc_pipeline()
wf.connect(selectfiles, 'dMRI_data', hmc, 'inputnode.in_file')
#wf.connect(selectfiles, 'bval_file', hmc, 'inputnode.in_bval')
wf.connect(bvals_with_nodiff_0, 'bval_file_zero', hmc, 'inputnode.in_bval')
wf.connect(selectfiles, 'bvec_file', hmc, 'inputnode.in_bvec')
wf.connect(b0_mask_init_0, 'mask_file', hmc, 'inputnode.in_mask')
hmc.inputs.inputnode.ref_num = 0
wf.connect(hmc, 'outputnode.out_file', ds, 'moco')
# GET UPDATED MEAN B0 AND MASK
b0_4d_init_1 = b0_4d_init_0.clone('b0_4d_init_1')
wf.connect(hmc, 'outputnode.out_file', b0_4d_init_1, 'in_dwi')
#wf.connect(selectfiles, 'bval_file', b0_4d_init_1, 'in_bval')
wf.connect(bvals_with_nodiff_0, 'bval_file_zero', b0_4d_init_1, 'in_bval')
mean_b0_moco_init_1 = mean_b0_moco_init_0.clone('mean_b0_moco_init_1')
wf.connect(b0_4d_init_1, 'out_file', mean_b0_moco_init_1, 'in_file')
b0_mask_init_1 = b0_mask_init_0.clone('b0_mask_init_1')
wf.connect(mean_b0_moco_init_1, 'out_file', b0_mask_init_1, 'in_file')
# EDDY
ecc = ecc_pipeline()
wf.connect(selectfiles, 'dMRI_data', ecc, 'inputnode.in_file')
#wf.connect(selectfiles, 'bval_file', ecc, 'inputnode.in_bval')
wf.connect(bvals_with_nodiff_0, 'bval_file_zero', ecc, 'inputnode.in_bval')
wf.connect(b0_mask_init_1, 'mask_file', ecc, 'inputnode.in_mask')
wf.connect(hmc, 'outputnode.out_xfms', ecc, 'inputnode.in_xfms')
wf.connect(ecc, 'outputnode.out_file', ds, 'ecc')
combine_corrections = apply_hmc_and_ecc(name='combine_corrections')
wf.connect(hmc, 'outputnode.out_xfms', combine_corrections, 'inputnode.in_hmc')
wf.connect(ecc, 'outputnode.out_xfms', combine_corrections, 'inputnode.in_ecc')
wf.connect(selectfiles, 'dMRI_data', combine_corrections, 'inputnode.in_dwi')
wf.connect(combine_corrections, 'outputnode.out_file', ds, 'preprocessed')
# GET UPDATED MEAN B0 AND MASK
b0_4d = b0_4d_init_0.clone('b0_4d')
wf.connect(combine_corrections, 'outputnode.out_file', b0_4d, 'in_dwi')
示例6: makelist
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import clone [as 别名]
# robust min max of fields
def makelist(file1, file2):
filelist=[file1,file2]
return filelist
make_list=Node(util.Function(input_names=['file1', 'file2'],
output_names=['filelist'],
function=makelist),
name='make_list')
min_max = MapNode(fsl.ImageStats(op_string='-r'),
iterfield=['in_file'],
name='min_max')
min_max_txt = corr_fields_txt.clone(name='min_max_txt')
min_max_txt.inputs.filename='min_max_fields.txt'
simulated.connect([(simulation, make_list, [('outputnode.nonlin_field_masked', 'file1'),
('outputnode.fmap_field_masked', 'file2')]),
(make_list, min_max, [('filelist', 'in_file')]),
(min_max, min_max_txt, [('out_stat', 'stats')])])
# correlation of different corrections to groundtruth
def makelist2(file1, file2, file3):
filelist=[file1,file2,file3]
return filelist
make_list2=Node(util.Function(input_names=['file1', 'file2', 'file3'],
示例7: learning_predict_data_wf
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import clone [as 别名]
#.........这里部分代码省略.........
###############################################################################################################
# GET INDEXER FOR SUBJECTS OF INTEREST (as defined by selection criterium)
select_subjects = Node(util.Function(input_names=['df_all_subjects_pickle_file',
'subjects_selection_crit_dict',
'selection_criterium'],
output_names=['df_use_file',
'df_use_pickle_file',
'subjects_selection_index'],
function=select_subjects_fct),
name='select_subjects')
select_subjects.inputs.df_all_subjects_pickle_file = df_all_subjects_pickle_file
select_subjects.inputs.subjects_selection_crit_dict = subjects_selection_crit_dict
wf.connect(subject_selection_infosource, 'selection_criterium', select_subjects, 'selection_criterium')
###############################################################################################################
# SELECT MULITMODAL X
# select subjects (rows) from multimodal X according indexer
select_multimodal_X = Node(util.Function(input_names=['X_multimodal_file', 'subjects_selection_index',
'selection_criterium'],
output_names=['X_multimodal_selected_file'],
function=select_multimodal_X_fct),
name='select_multimodal_X')
wf.connect(aggregate_multimodal_metrics, 'X_multimodal_file', select_multimodal_X, 'X_multimodal_file')
wf.connect(select_subjects, 'subjects_selection_index', select_multimodal_X, 'subjects_selection_index')
###############################################################################################################
# RUN PREDICTION
#
prediction_node_dict = {}
select_trained_model_node_dict = {}
prediction = Node(util.Function(input_names=['trained_model_file',
'X_file',
'target_name',
'selection_criterium',
'df_file',
'data_str',
'regress_confounds'],
output_names=['scatter_file',
'brain_age_scatter_file',
'df_use_file',
'df_res_out_file'],
function=run_prediction_from_trained_model_fct),
name='prediction')
def rep(s):
return s.replace('__', '.')
select_trained_model = Node(nio.SelectFiles(trained_model_template), 'select_trained_model')
i = 0
for reg in confound_regression:
the_out_node_str = 'single_source_model_reg_%s_' % reg
select_trained_model_node_dict[i] = select_trained_model.clone(
the_out_node_str + 'select_trained_model')
select_trained_model_node_dict[i].inputs.base_directory = trained_model_dir
select_trained_model_node_dict[i].inputs.ana_stream = the_out_node_str
wf.connect(target_infosource, 'target_name', select_trained_model_node_dict[i], 'target_name')
wf.connect(aggregate_multimodal_metrics, ('multimodal_name', rep),
select_trained_model_node_dict[i],
'multimodal_in_data_name')
prediction_node_dict[i] = prediction.clone(the_out_node_str)
the_in_node = prediction_node_dict[i]
the_in_node.inputs.regress_confounds = reg
wf.connect(select_trained_model_node_dict[i], 'trained_model', the_in_node, 'trained_model_file')
wf.connect(select_multimodal_X, 'X_multimodal_selected_file', the_in_node, 'X_file')
wf.connect(target_infosource, 'target_name', the_in_node, 'target_name')
wf.connect(subject_selection_infosource, 'selection_criterium', the_in_node, 'selection_criterium')
wf.connect(select_subjects, 'df_use_pickle_file', the_in_node, 'df_file')
wf.connect(aggregate_multimodal_metrics, 'multimodal_name', the_in_node, 'data_str')
wf.connect(the_in_node, 'scatter_file', ds_pdf, the_out_node_str + 'scatter')
wf.connect(the_in_node, 'brain_age_scatter_file', ds_pdf, the_out_node_str + 'brain_age_scatter')
wf.connect(the_in_node, 'df_use_file', ds_pdf, the_out_node_str + 'predicted')
wf.connect(the_in_node, 'df_res_out_file', ds_pdf, the_out_node_str + 'results_error')
i += 1
###############################################################################################################
# RUN WF
wf.write_graph(dotfilename=wf.name, graph2use='colored', format='pdf') # 'hierarchical')
wf.write_graph(dotfilename=wf.name, graph2use='orig', format='pdf')
wf.write_graph(dotfilename=wf.name, graph2use='flat', format='pdf')
if plugin_name == 'CondorDAGMan':
wf.run(plugin=plugin_name)
if plugin_name == 'MultiProc':
wf.run(plugin=plugin_name, plugin_args={'n_procs': use_n_procs})
示例8: create_similarity_pipeline
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import clone [as 别名]
def create_similarity_pipeline(name):
similarity=Workflow(name=name)
# inputnode
inputnode=Node(util.IdentityInterface(fields=['anat_brain',
'mask',
'lin_mean',
'nonlin_mean',
'fmap_mean',
'topup_mean',
'filename'
]),
name='inputnode')
# outputnode
outputnode=Node(util.IdentityInterface(fields=['textfile']),
name='outputnode')
# resample all means to make sure they have the same resolution as reference anatomy
resamp_mask = Node(afni.Resample(outputtype='NIFTI_GZ'), name='resample_mask')
resamp_lin = resamp_mask.clone(name = 'resample_lin')
resamp_nonlin = resamp_mask.clone(name='resample_nonlin')
resamp_fmap = resamp_mask.clone(name='resample_fmap')
resamp_topup = resamp_mask.clone(name='resample_topup')
similarity.connect([(inputnode, resamp_mask, [('mask', 'in_file'),
('anat_brain', 'master')]),
(inputnode, resamp_lin, [('lin_mean', 'in_file'),
('anat_brain', 'master')]),
(inputnode, resamp_nonlin, [('nonlin_mean', 'in_file'),
('anat_brain', 'master')]),
(inputnode, resamp_fmap, [('fmap_mean', 'in_file'),
('anat_brain', 'master')]),
(inputnode, resamp_topup, [('topup_mean', 'in_file'),
('anat_brain', 'master')]),
])
# calculate similarity (all possible metrics) for each methods to mni
lin_sim = MapNode(interface = nutil.Similarity(),
name = 'similarity_lin',
iterfield=['metric'])
lin_sim.inputs.metric = ['mi','nmi','cc','cr','crl1']
nonlin_sim = lin_sim.clone(name='similarity_nonlin')
nonlin_sim.inputs.metric = ['mi','nmi','cc','cr','crl1']
fmap_sim = lin_sim.clone(name='similarity_fmap')
fmap_sim.inputs.metric = ['mi','nmi','cc','cr','crl1']
topup_sim = lin_sim.clone(name='similarity_topup')
topup_sim.inputs.metric = ['mi','nmi','cc','cr','crl1']
similarity.connect([(inputnode, lin_sim, [('anat_brain', 'volume1')]),
(resamp_lin, lin_sim, [('out_file', 'volume2')]),
(resamp_mask, lin_sim, [('out_file', 'mask1'),
('out_file', 'mask2')]),
(inputnode, nonlin_sim, [('anat_brain', 'volume1')]),
(resamp_nonlin, nonlin_sim, [('out_file', 'volume2')]),
(resamp_mask, nonlin_sim, [('out_file', 'mask1'),
('out_file', 'mask2')]),
(inputnode, fmap_sim, [('anat_brain', 'volume1')]),
(resamp_fmap, fmap_sim, [('out_file', 'volume2')]),
(resamp_mask, fmap_sim, [('out_file', 'mask1'),
('out_file', 'mask2')]),
(inputnode, topup_sim, [('anat_brain', 'volume1')]),
(resamp_topup, topup_sim, [('out_file', 'volume2')]),
(resamp_mask, topup_sim, [('out_file', 'mask1'),
('out_file', 'mask2')])
])
# write values to one text file per subject
def write_text(lin_metrics, nonlin_metrics, fmap_metrics, topup_metrics, filename):
import numpy as np
import os
lin_array = np.array(lin_metrics)
lin_array=lin_array.reshape(np.size(lin_array),1)
nonlin_array = np.array(nonlin_metrics)
nonlin_array=nonlin_array.reshape(np.size(nonlin_array),1)
fmap_array = np.array(fmap_metrics)
fmap_array=fmap_array.reshape(np.size(fmap_array),1)
topup_array = np.array(topup_metrics)
topup_array=topup_array.reshape(np.size(topup_array),1)
metrics=np.concatenate((lin_array, nonlin_array, fmap_array, topup_array),axis=1)
metrics_file = filename
np.savetxt(metrics_file, metrics, delimiter=' ', fmt='%f')
return os.path.abspath(filename)
write_txt = Node(interface=Function(input_names=['lin_metrics', 'nonlin_metrics', 'fmap_metrics', 'topup_metrics', 'filename'],
output_names=['txtfile'],
function=write_text),
name='write_file')
similarity.connect([(inputnode, write_txt, [('filename', 'filename')]),
(lin_sim, write_txt, [('similarity', 'lin_metrics')]),
(nonlin_sim, write_txt, [('similarity', 'nonlin_metrics')]),
(fmap_sim, write_txt, [('similarity', 'fmap_metrics')]),
(topup_sim, write_txt, [('similarity', 'topup_metrics')]),
#.........这里部分代码省略.........
示例9: func2anat_linear
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import clone [as 别名]
def func2anat_linear():
import nipype.interfaces.fsl as fsl
bbr_shedule = '/usr/share/fsl/5.0/etc/flirtsch/bbr.sch'
#define workflow
linear = Workflow('func2anat_linear')
inputnode = Node(util.IdentityInterface(fields=['func_image',
'func_mask',
'reference_image',
'anat_wm',
'anat_csf',
'anat_gm',
'anat_first',]),
name = 'inputnode')
outputnode = Node(util.IdentityInterface(fields=['func2anat',
'func2anat_xfm',
'anat_downsample',
'anat2func_xfm',
'anat2func',
'func_gm',
'func_wm',
'func_csf',
'func_first']),
name= 'outputnode')
anatdownsample = Node(interface= fsl.FLIRT(), name = 'downsample_anat')
anatdownsample.inputs.apply_isoxfm = 2.3
anatdownsample.inputs.datatype = 'float'
# run flirt with mutual info
mutual_info = Node(interface= fsl.FLIRT(), name = 'func2anat_flirt0_mutualinfo')
mutual_info.inputs.cost = 'mutualinfo'
mutual_info.inputs.dof = 6
mutual_info.inputs.no_resample = True
# run flirt boundary based registration on a func_moco_disco using
# (a) white matter segment as a boundary and (b) the mutualinfo xfm for initialization
bbr = Node(interface= fsl.FLIRT(), name = 'func2anat_flirt1_bbr')
bbr.inputs.cost = 'bbr'
bbr.inputs.dof = 6
bbr.inputs.schedule = bbr_shedule
bbr.inputs.no_resample = True
convert_xfm = Node(interface= fsl.ConvertXFM(), name ='anat2func_xfm')
convert_xfm.inputs.invert_xfm = True
#connect nodes
linear.connect(inputnode , 'reference_image' , anatdownsample , 'in_file' )
linear.connect(inputnode , 'reference_image' , anatdownsample , 'reference' )
linear.connect(inputnode , 'func_image' , mutual_info , 'in_file' )
linear.connect(anatdownsample , 'out_file' , mutual_info , 'reference' )
linear.connect(inputnode , 'func_image' , bbr , 'in_file' )
linear.connect(anatdownsample , 'out_file' , bbr , 'reference' )
linear.connect(inputnode , 'anat_wm' , bbr , 'wm_seg' )
linear.connect(mutual_info , 'out_matrix_file' , bbr , 'in_matrix_file' )
linear.connect(bbr , 'out_matrix_file' , convert_xfm , 'in_file' )
linear.connect(bbr , 'out_file' , outputnode , 'func2anat' )
linear.connect(bbr , 'out_matrix_file' , outputnode , 'func2anat_xfm' )
linear.connect(convert_xfm , 'out_file' , outputnode , 'anat2func_xfm' )
linear.connect(anatdownsample , 'out_file' , outputnode , 'anat_downsample' )
anat_invxfm = Node(interface= fsl.ApplyXfm(), name ='apply_invxfm_anat')
anat_invxfm.inputs.apply_xfm = True
linear.connect(anatdownsample , 'out_file' , anat_invxfm, 'in_file')
linear.connect(inputnode , 'func_image' , anat_invxfm, 'reference')
linear.connect(convert_xfm , 'out_file' , anat_invxfm, 'in_matrix_file')
linear.connect(anat_invxfm , 'out_file' , outputnode, 'anat2func')
# flirt tissue masks back to func space
gm_invxfm = Node(interface= fsl.ApplyXfm(), name ='apply_invxfm_gm')
gm_invxfm.inputs.apply_xfm = True
bin_gm = Node(interface= fsl.Threshold(), name ='apply_invxfm_gm_bin')
bin_gm.inputs.thresh = 0.5
bin_gm.inputs.args = '-bin'
mask_gm = Node(interface=fsl.BinaryMaths(), name='func_gm')
mask_gm.inputs.operation = 'mul'
linear.connect(inputnode , 'anat_gm' , gm_invxfm, 'in_file')
linear.connect(inputnode , 'func_image' , gm_invxfm, 'reference')
linear.connect(convert_xfm , 'out_file' , gm_invxfm, 'in_matrix_file')
linear.connect(gm_invxfm , 'out_file' , bin_gm, 'in_file')
linear.connect(bin_gm , 'out_file' , mask_gm, 'in_file')
linear.connect(inputnode , 'func_mask' , mask_gm, 'operand_file')
linear.connect(mask_gm , 'out_file' , outputnode,'func_gm')
wm_invxfm = gm_invxfm.clone('apply_invxfm_wm')
bin_wm = bin_gm.clone('apply_invxfm_wm_bin')
mask_wm = mask_gm.clone('func_wm')
linear.connect(inputnode , 'anat_wm' , wm_invxfm, 'in_file')
linear.connect(inputnode , 'func_image' , wm_invxfm, 'reference')
linear.connect(convert_xfm , 'out_file' , wm_invxfm, 'in_matrix_file')
linear.connect(wm_invxfm , 'out_file' , bin_wm, 'in_file')
linear.connect(bin_wm , 'out_file' , mask_wm, 'in_file')
linear.connect(inputnode , 'func_mask' , mask_wm, 'operand_file')
linear.connect(mask_wm , 'out_file' , outputnode,'func_wm')
#.........这里部分代码省略.........