当前位置: 首页>>代码示例>>Python>>正文


Python Workflow.run方法代码示例

本文整理汇总了Python中nipype.pipeline.engine.Workflow.run方法的典型用法代码示例。如果您正苦于以下问题:Python Workflow.run方法的具体用法?Python Workflow.run怎么用?Python Workflow.run使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在nipype.pipeline.engine.Workflow的用法示例。


在下文中一共展示了Workflow.run方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: create_conversion

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import run [as 别名]
def create_conversion(
    name, subject, scans, working_dir, out_dir, folder, xnat_server, xnat_user, xnat_pass, project_id, exp_id
):

    convert = Workflow(name=name)
    convert.base_dir = working_dir
    convert.config["execution"]["crashdump_dir"] = convert.base_dir + "/crash_files"

    # infosource to iterate over scans
    scan_infosource = Node(util.IdentityInterface(fields=["scan_key", "scan_val"]), name="scan_infosource")
    scan_infosource.iterables = [("scan_key", scans.keys()), ("scan_val", scans.values())]
    scan_infosource.synchronize = True

    # xnat source
    xnatsource = Node(
        nio.XNATSource(
            infields=["project_id", "subject_id", "exp_id", "scan_id"],
            outfields=["dicom"],
            server=xnat_server,
            user=xnat_user,
            pwd=xnat_pass,
            cache_dir=working_dir,
        ),
        name="xnatsource",
    )

    xnatsource.inputs.query_template = (
        "/projects/%s/subjects/%s/experiments/%s/scans/%d/resources/DICOM/files"
    )  # files')
    xnatsource.inputs.query_template_args["dicom"] = [["project_id", "subject_id", "exp_id", "scan_id"]]
    xnatsource.inputs.project_id = project_id
    xnatsource.inputs.subject_id = subject
    xnatsource.inputs.exp_id = exp_id
    convert.connect([(scan_infosource, xnatsource, [("scan_val", "scan_id")])])

    # workflow to convert dicoms
    dcmconvert = create_dcmconvert_pipeline()
    convert.connect(
        [
            (scan_infosource, dcmconvert, [("scan_key", "inputnode.filename")]),
            (xnatsource, dcmconvert, [("dicom", "inputnode.dicoms")]),
        ]
    )

    # xnat sink
    sink = Node(nio.DataSink(base_directory=out_dir, parameterization=False), name="sink")

    convert.connect([(dcmconvert, sink, [("outputnode.nifti", folder)])])

    convert.run()
开发者ID:JanisReinelt,项目名称:pipelines,代码行数:52,代码来源:convert.py

示例2: create_structural

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import run [as 别名]
def create_structural(subject, working_dir, data_dir, freesurfer_dir, out_dir, standard_brain):
    # main workflow
    struct_preproc = Workflow(name='anat_preproc')
    struct_preproc.base_dir = working_dir
    struct_preproc.config['execution']['crashdump_dir'] = struct_preproc.base_dir + "/crash_files"


    # workflow to get brain, head and wmseg from freesurfer and convert to nifti
    mgzconvert = create_mgzconvert_pipeline()
    mgzconvert.inputs.inputnode.fs_subjects_dir = freesurfer_dir
    mgzconvert.inputs.inputnode.fs_subject_id = subject

    normalize = create_normalize_pipeline()
    normalize.inputs.inputnode.standard = standard_brain

    # sink to store files
    sink = Node(nio.DataSink(base_directory=out_dir,
                             parameterization=False,
                             substitutions=[
                                 ('transform_Warped', 'T1_brain2mni')]),
                name='sink')

    # connections
    struct_preproc.connect(
        [(mgzconvert, normalize, [('outputnode.anat_brain', 'inputnode.anat')]),
         (mgzconvert, sink, [('outputnode.anat_head', '@head')]),
         (mgzconvert, sink, [('outputnode.anat_brain', '@brain')]),
         (mgzconvert, sink, [('outputnode.anat_brain_mask', '@mask')]),
         (normalize, sink, [('outputnode.anat2std', '@anat2std'),
                            ('outputnode.anat2std_transforms', '[email protected]_transforms'),
                            ('outputnode.std2anat_transforms', '[email protected]_transforms')])
         ])

    struct_preproc.write_graph(dotfilename='struct_preproc.dot', graph2use='colored', format='pdf', simple_form=True)
    # struct_preproc.run()
    struct_preproc.run(plugin='CondorDAGMan', plugin_args = {'initial_specs': 'request_memory = 1500'})  #
开发者ID:fBeyer89,项目名称:LIFE_Lemon_mod_mod,代码行数:38,代码来源:structural.py

示例3: Node

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import run [as 别名]
selectfiles = Node(nio.SelectFiles(templates,
                                   base_directory=data_dir),
                   name="selectfiles")

# sink to store files
# sink = Node(nio.DataSink(base_directory=out_dir,
#                           parameterization=False), 
#              name='sink')

# connect to core workflow
mp2rage.connect([#(infosource, selectfiles, [('subject_id', 'subject_id')]),
                 #(infosource, sink, [('subject_id', 'container')]),
                 (selectfiles, inputnode, [('inv2', 'inv2'),
                                           ('uni', 'uni'),
                                           ('t1map', 't1map')]),
#                 (outputnode, sink, [('uni_masked','[email protected]_masked'),
#                                     ('t1map_masked','[email protected]_masked'),
#                                     ('background_mask','[email protected]_mask'),
#                                     ('uni_stripped','[email protected]_stripped'),
#                                     ('skullstrip_mask', '[email protected]_mask'),
#                                     ('uni_reoriented','[email protected]_reoriented')])
                      ])


#### run #########################################################################################

mp2rage.run()
#(plugin='MultiProc')
#(plugin='CondorDAGMan')
开发者ID:juhuntenburg,项目名称:nonlinear_coreg,代码行数:31,代码来源:mp2rage.py

示例4: Node

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import run [as 别名]
                                   base_directory=data_dir),
                   name="selectfiles")

#sink to store files
sink = Node(nio.DataSink(base_directory=out_dir,
                          parameterization=False), 
             name='sink')

# connect to core workflow

fmapepi.connect([(infosource, selectfiles, [('subject_id', 'subject_id')]),
                 (infosource, inputnode, [('subject_id', 'subject_id'),
                                          ('fs_subjects_dir', 'fs_subjects_dir')]),
              (infosource, sink, [('subject_id', 'container')]),
              (selectfiles, inputnode, [('epi_mean', 'epi_mean'),
                                        ('phase', 'phase'),
                                        ('mag', 'mag'),
                                        ('anat_head','anat_head'),
                                        ('anat_brain', 'anat_brain'),
                                        ('wmseg', 'wmseg')]),
            (outputnode, sink, [('fmap','[email protected]'),
                                ('fmap_mean_coreg', '[email protected]_mean_coreg'),
                                ('unwarpfield2fmap', '[email protected]'),
                                ('fmap_fullwarp', '[email protected]_fullwarp'),
                                ('epi2anat_mat', '[email protected]_mat'),
                                ('epi2anat_dat', '[email protected]_dat')
                                ])
                ])
                                               
fmapepi.run(plugin='CondorDAGMan')
    
开发者ID:juhuntenburg,项目名称:nonlinear_coreg,代码行数:32,代码来源:fieldmap_epi2fmap.py

示例5:

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import run [as 别名]
                         container=output_dir),
                name="datasink")

# Use the following DataSink output substitutions
substitutions = [('_subject_id_', ''),
                 ('_apply2con', 'apply2con'),
                 ('_warpall', 'warpall')]
datasink.inputs.substitutions = substitutions

# Connect SelectFiles and DataSink to the workflow
normflow.connect([(infosource, selectfiles, [('subject_id', 'subject_id')]),
                  (selectfiles, apply2con, [('func', 'input_image')]),
                  (selectfiles, apply2mean, [('mean', 'input_image')]),
                  (selectfiles, antsreg, [('anat', 'moving_image')]),
                  (antsreg, datasink, [('warped_image',
                                        '[email protected]_image'),
                                       ('inverse_warped_image',
                                        '[email protected]_warped_image'),
                                       ('composite_transform',
                                        '[email protected]'),
                                       ('inverse_composite_transform',
                                        '[email protected]_transform')]),
                  (apply2con, datasink, [('output_image',
                                          '[email protected]')]),
                  (apply2mean, datasink, [('output_image',
                                          '[email protected]')]),
                  ])

normflow.write_graph(graph2use='colored')
normflow.run('MultiProc', plugin_args={'n_procs': 8})
开发者ID:dalejn,项目名称:ants_scripts,代码行数:32,代码来源:ANTS_TEST.py

示例6:

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import run [as 别名]
                                        '[email protected]'),
                                       ('art.outlier_files',
                                        '[email protected]'),
                                       ('art.plot_files',
                                        '[email protected]'),
                                       ('binarize.binary_file',
                                        '[email protected]'),
                                       ('bbregister.out_reg_file',
                                        '[email protected]_reg_file'),
                                       ('bbregister.out_fsl_file',
                                        '[email protected]_fsl_file'),
                                       ('bbregister.registered_file',
                                        '[email protected]_file'),
                                       ]),
                  (l1analysis, datasink, [('mriconvert.out_file',
                                           '[email protected]'),
                                          ('conestimate.spm_mat_file',
                                           '[email protected]_mat'),
                                          ('conestimate.spmT_images',
                                           '[email protected]'),
                                          ('conestimate.con_images',
                                           '[email protected]'),
                                          ]),
                  ])


###
# Run Workflow
metaflow.write_graph(graph2use='colored')
metaflow.run('MultiProc', plugin_args={'n_procs': 8})
开发者ID:miykael,项目名称:nipype-beginner-s-guide,代码行数:32,代码来源:example_fMRI_1_first_level.py

示例7: Node

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import run [as 别名]
cor_method_infosource = Node(util.IdentityInterface(fields=["cor_method"]), name="cor_method_infosource")
cor_method_infosource.iterables = ("cor_method", ["lin_ts"])  # , 'lin_ts', 'nonlin_ts', 'fmap_ts', 'topup_ts'])


# select files
templates = {
    "timeseries": "kansas1/huntenburg/*_timeseries/_subject_id_{subject_id}/*apply*/{cor_method}.nii.gz",
    #'par_file':'jessica2/Schaare/LEMON/preprocessed/{subject_id}/motion_correction/rest_moco.nii.gz.par'
}

selectfiles = Node(nio.SelectFiles(templates, base_directory=data_dir), name="selectfiles")


# store data
# sink = Node(nio.DataSink(base_directory=out_dir,
#                             parameterization=False),
#                name='sink')

preproc.connect(
    [
        (subject_infosource, selectfiles, [("subject_id", "subject_id")]),
        (cor_method_infosource, selectfiles, [("cor_method", "cor_method")]),
        # (infosource, sink, [('subject_id', 'container')]),
        (selectfiles, tsnr, [("timeseries", "in_file")]),
        (selectfiles, compcor, [("timeseries", "realigned_file")]),
    ]
)

preproc.run()  # (plugin='CondorDAGMan')
# preproc.write_graph(graph2use='flat', simple_form=False)
开发者ID:juhuntenburg,项目名称:nonlinear_coreg,代码行数:32,代码来源:preproc_conn.py

示例8: split_filename

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import run [as 别名]
    z_nii = nb.Nifti1Image(nii.get_data()[:,:,:,2], nii.get_affine(), nii.get_header())
    x_nii.set_data_dtype(np.float32)
    y_nii.set_data_dtype(np.float32)
    z_nii.set_data_dtype(np.float32)
    _, base, _ = split_filename(in_file)
    nb.save(x_nii, base + "_x.nii.gz")
    nb.save(y_nii, base + "_y.nii.gz")
    nb.save(z_nii, base + "_z.nii.gz")
    return [os.path.abspath(base + "_x.nii.gz"), 
            os.path.abspath(base + "_y.nii.gz"),
            os.path.abspath(base + "_z.nii.gz")]

# split mapping in x,y,z components for sampling to surface
split = Node(util.Function(input_names=['in_file'],
                            output_names=['out_files'],
                            function=split_xyz),
                  name='split')

mapping2struct.connect([(func2struct, split, [('output_image', 'in_file')])])
  
# sink relevant files
final_sink = Node(nio.DataSink(parameterization=False,
                               base_directory=final_dir),
             name='final_sink')

mapping2struct.connect([(session_infosource, final_sink, [('session', 'container')]),
                   (split, final_sink, [('out_files', '@func')])
                   ])
    
mapping2struct.run(plugin='MultiProc', plugin_args={'n_procs' : 16})
开发者ID:juhuntenburg,项目名称:myelinconnect,代码行数:32,代码来源:project_mapping2struct.py

示例9: Node

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import run [as 别名]
    np.savetxt(filename, mask_size_array, delimiter=' ', fmt='%f')
    return os.path.abspath(filename)


write_txt = Node(interface=util.Function(input_names=['masksizes', 'filename'],
                                    output_names=['txtfile'],
                                    function=write_text),
              name='write_txt')
write_txt.inputs.filename='min_groupmask_extents.txt'
group.connect([(masksize, write_txt, [('out_stat', 'masksizes')])])


'''connections
=======================
'''

group.connect([(mean_masked, sink, [('out_file', '[email protected]')]),
               (sdv_masked, sink, [('out_file', '[email protected]')]),
               (diffmap_meaner, sink, [('out_file', '[email protected]')]),
               #(field_meaner, sink, [('out_file', '[email protected]')]),
               (mask_meaner, sink, [('out_file', '[email protected]')]),
               (mask_sdv, sink, [('out_file', '[email protected]')]),
               (groupmask, sink, [('out_file', '[email protected]')]),
               (write_txt, sink, [('txtfile', '[email protected]')])
               ])

# run
group.run(plugin='CondorDAGMan')
    
    
开发者ID:juhuntenburg,项目名称:nonlinear_coreg,代码行数:30,代码来源:eval_group.py

示例10: Node

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import run [as 别名]
selectfiles = Node(nio.SelectFiles(templates,
                                   base_directory=data_dir),
                   name="selectfiles")

# sink to store data
# sink = Node(nio.DataSink(base_directory=out_dir,
#                           parameterization=False), 
#              name='sink')

# connect to core workflow 
apply_ts.connect([(infosource, selectfiles, [('subject_id', 'subject_id')]),
                  #(infosource, sink, [('subject_id', 'container')]),
                  (selectfiles, inputnode, [('moco_ts', 'moco_ts'),
                                            ('moco_mean','moco_mean'),
                                            ('anat_head', 'anat_head'),
                                            #('lin_epi2anat_itk', 'lin_epi2anat_itk'),
                                            #('nonlin_anat2epi_itk', 'nonlin_anat2epi_itk'),
                                            #('nonlin_epi2anat_warp', 'nonlin_epi2anat_warp'),
                                            ('fmap_fullwarp','fmap_fullwarp'),
                                            ('topup_fullwarp','topup_fullwarp')
                                            ]),
#                 (outputnode, sink, [('lin_ts', '@lin_ts'),
#                                     ('nonlin_ts', '@nonlin_ts'),
#                                    ('fmap_ts', '@fmap_ts'),
#                                    ('topup_ts', '@topup_ts')
#                                    ])
                ])

apply_ts.run(plugin='CondorDAGMan')
开发者ID:juhuntenburg,项目名称:nonlinear_coreg,代码行数:31,代码来源:apply_timeseries_fsl.py

示例11: calc_local_metrics

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import run [as 别名]

#.........这里部分代码省略.........
    )
    config.update_config(nipype_cfg)
    wf.config["execution"]["crashdump_dir"] = os.path.join(working_dir, "crash")

    ds = Node(nio.DataSink(base_directory=ds_dir), name="ds")
    ds.inputs.regexp_substitutions = [
        ("MNI_resampled_brain_mask_calc.nii.gz", "falff.nii.gz"),
        ("residual_filtered_3dT.nii.gz", "alff.nii.gz"),
        ("_parcellation_", ""),
        ("_bp_freqs_", "bp_"),
    ]

    #####################
    # ITERATORS
    #####################
    # PARCELLATION ITERATOR
    parcellation_infosource = Node(util.IdentityInterface(fields=["parcellation"]), name="parcellation_infosource")
    parcellation_infosource.iterables = ("parcellation", parcellations_dict.keys())

    bp_filter_infosource = Node(util.IdentityInterface(fields=["bp_freqs"]), name="bp_filter_infosource")
    bp_filter_infosource.iterables = ("bp_freqs", bp_freq_list)

    selectfiles = Node(
        nio.SelectFiles(
            {
                "parcellation_time_series": "{subject_id}/con_mat/parcellated_time_series/bp_{bp_freqs}/{parcellation}/parcellation_time_series.npy"
            },
            base_directory=preprocessed_data_dir,
        ),
        name="selectfiles",
    )
    selectfiles.inputs.subject_id = subject_id
    wf.connect(parcellation_infosource, "parcellation", selectfiles, "parcellation")
    wf.connect(bp_filter_infosource, "bp_freqs", selectfiles, "bp_freqs")

    fd_file = Node(
        nio.SelectFiles({"fd_p": "{subject_id}/QC/FD_P_ts"}, base_directory=preprocessed_data_dir), name="fd_file"
    )
    fd_file.inputs.subject_id = subject_id

    ##############
    ## CON MATS
    ##############
    ##############
    ## extract ts
    ##############

    get_good_trs = Node(
        util.Function(
            input_names=["fd_file", "fd_thresh"],
            output_names=["good_trs", "fd_scrubbed_file"],
            function=calc_metrics_utils.get_good_trs,
        ),
        name="get_good_trs",
    )
    wf.connect(fd_file, "fd_p", get_good_trs, "fd_file")
    get_good_trs.inputs.fd_thresh = fd_thresh

    parcellated_ts_scrubbed = Node(
        util.Function(
            input_names=["parcellation_time_series_file", "good_trs"],
            output_names=["parcellation_time_series_scrubbed"],
            function=calc_metrics_utils.parcellation_time_series_scrubbing,
        ),
        name="parcellated_ts_scrubbed",
    )

    wf.connect(selectfiles, "parcellation_time_series", parcellated_ts_scrubbed, "parcellation_time_series_file")
    wf.connect(get_good_trs, "good_trs", parcellated_ts_scrubbed, "good_trs")

    ##############
    ## get conmat
    ##############
    con_mat = Node(
        util.Function(
            input_names=["in_data", "extraction_method"],
            output_names=["matrix", "matrix_file"],
            function=calc_metrics_utils.calculate_connectivity_matrix,
        ),
        name="con_mat",
    )
    con_mat.inputs.extraction_method = "correlation"
    wf.connect(parcellated_ts_scrubbed, "parcellation_time_series_scrubbed", con_mat, "in_data")

    ##############
    ## ds
    ##############

    wf.connect(get_good_trs, "fd_scrubbed_file", ds, "[email protected]_scrubbed_file")
    fd_str = ("%.1f" % fd_thresh).replace(".", "_")
    wf.connect(con_mat, "matrix_file", ds, "con_mat.matrix_scrubbed_%[email protected]" % fd_str)

    # wf.write_graph(dotfilename=wf.name, graph2use='colored', format='pdf')  # 'hierarchical')
    # wf.write_graph(dotfilename=wf.name, graph2use='orig', format='pdf')
    # wf.write_graph(dotfilename=wf.name, graph2use='flat', format='pdf')

    if plugin_name == "CondorDAGMan":
        wf.run(plugin=plugin_name, plugin_args={"initial_specs": "request_memory = 1500"})
    if plugin_name == "MultiProc":
        wf.run(plugin=plugin_name, plugin_args={"n_procs": use_n_procs})
开发者ID:fliem,项目名称:LeiCA_LIFE,代码行数:104,代码来源:calc_metrics_scrubbing.py

示例12: Node

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import run [as 别名]
# SUBJECTS ITERATOR

baseDir = "/afs/cbs.mpg.de/projects/mar005_lsd-lemon-surf/probands"
subjects_list = [sub for sub in os.listdir(baseDir) if os.path.isdir(os.path.join(baseDir, sub))]

subjects_infosource = Node(util.IdentityInterface(fields=['subject_id']), name='subjects_infosource')
subjects_infosource.iterables = ('subject_id', subjects_list)

run_mean_dist = Node(util.Function(input_names=['sub'],
                                   output_names=[], #'filelist'
                                   function=meanDist), name='run_mean_dist')
wf.connect(subjects_infosource, 'subject_id', run_mean_dist, 'sub')
#wf.connect(run_mean_dist, 'filelist', ds, '[email protected]')



######################
# RUN
######################
wf.write_graph(dotfilename='mean_dist', graph2use='exec', format='pdf')

#
#import warnings

#with warnings.catch_warnings():
    #warnings.simplefilter("ignore")

wf.run(plugin='MultiProc', plugin_args={'n_procs': 5})
#wf.run(plugin='CondorDAGMan')

开发者ID:Lauckner,项目名称:distconnect,代码行数:31,代码来源:5_meanDist_workflow.py

示例13:

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import run [as 别名]
               (infosource, inputnode, [('subject_id', 'subject_id'),
                                        ('fs_subjects_dir', 'fs_subjects_dir')]),
               (selectfiles, inputnode, [('epi_mean', 'epi_mean'),
                                         ('se1', 'se1'),
                                         ('se_inv1','se_inv1'),
                                         ('se2', 'se2'),
                                         ('se_inv2', 'se_inv2'),
                                         ('anat_head','anat_head'),
                                         ('anat_brain', 'anat_brain'),
                                         ('wmseg', 'wmseg')
                                         #('mag', 'mag')
                                         ]),
               (infosource, sink, [('subject_id', 'container')]),
                (outputnode, sink, [('topup_field', '[email protected]_field'),
                                    ('applytopup', '[email protected]'),
                                    #('topup_fmap','[email protected]'),
                                    #('shiftmap','[email protected]'),
                                    #('topup_fullwarp','[email protected]'),
                                    ('epi2anat_mat','[email protected]_mat'),
                                    ('epi2anat_dat','[email protected]_dat'),
                                    ('epi2anat_itk','[email protected]_itk'),
                                    #('epireg','[email protected]'),
                                    ('topup_mean_coreg','[email protected]_mean_coreg'),
                                    ]) 
               ])

topup.run(plugin='CondorDAGMan')



开发者ID:juhuntenburg,项目名称:nonlinear_coreg,代码行数:29,代码来源:topup_applytopup.py

示例14: Node

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import run [as 别名]
                            out_type = 'nii.gz'), 
               name='wallmask_rh')
  
wallmask_lh = wallmask_rh.clone('wallmask_lh')
 
medwall.connect([(selectfiles, wallmask_rh, [('thickness_rh', 'in_file')]),
                 (selectfiles, wallmask_lh, [('thickness_lh', 'in_file')])
                 ])

addmasks2= Node(fsl.BinaryMaths(operation='add'),
               name='addmasks2')

medwall.connect([(wallmask_rh, addmasks2, [('binary_file', 'in_file')]),
                 (wallmask_lh, addmasks2, [('binary_file', 'operand_file')])])

'''
followed by
3dclust -savemask $out 0 20 $in
'''
  
# sink relevant files
sink = Node(nio.DataSink(parameterization=False,
                               base_directory=out_dir),
             name='sink')

medwall.connect([(morph_closing, sink, [('binary_file', '@fullmask')]),
                 (addmasks2, sink, [('out_file', '@alternativemask')])
                 ])

medwall.run(plugin='MultiProc', plugin_args={'n_procs' : 9})
开发者ID:fliem,项目名称:myelinconnect,代码行数:32,代码来源:medial_wall_masks.py

示例15: Node

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import run [as 别名]
                              output_names='fname', 
                              function=make_name),
                name='makename')

mni.connect([(subject_infosource, makename, [('subject_id', 'sub')])])

# apply all transforms
applytransform = Node(ants.ApplyTransforms(input_image_type = 3,
                                           #output_image='rest_preprocessed2mni.nii.gz',
                                           interpolation = 'NearestNeighbor',
                                           invert_transform_flags=[False, False]),
                      name='applytransform')
   
applytransform.inputs.reference_image=template
mni.connect([(selectfiles_1, applytransform, [('mask', 'input_image')]),
             (translist, applytransform, [('out', 'transforms')]),
             (makename, applytransform, [('fname', 'output_image')])
             ])


# sink
sink = Node(nio.DataSink(base_directory=out_dir,
                         parameterization=False),
                name='sink')

mni.connect([#(subject_infosource, sink, [(('subject_id', makebase, out_dir), 'base_directory')]),
             (applytransform, sink, [('output_image', '@tsnr2mni')])
             ])

mni.run(plugin='MultiProc', plugin_args={'n_procs' : 10})
开发者ID:NeuroanatomyAndConnectivity,项目名称:pipelines,代码行数:32,代码来源:mask_2mni.py


注:本文中的nipype.pipeline.engine.Workflow.run方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。