当前位置: 首页>>代码示例>>Python>>正文


Python Workflow.config['execution']['crashdump_dir']方法代码示例

本文整理汇总了Python中nipype.pipeline.engine.Workflow.config['execution']['crashdump_dir']方法的典型用法代码示例。如果您正苦于以下问题:Python Workflow.config['execution']['crashdump_dir']方法的具体用法?Python Workflow.config['execution']['crashdump_dir']怎么用?Python Workflow.config['execution']['crashdump_dir']使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在nipype.pipeline.engine.Workflow的用法示例。


在下文中一共展示了Workflow.config['execution']['crashdump_dir']方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: ants_ct_wf

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import config['execution']['crashdump_dir'] [as 别名]
def ants_ct_wf(subjects_id,
            preprocessed_data_dir,
            working_dir,
            ds_dir,
            template_dir,
            plugin_name):
    import os
    from nipype import config
    from nipype.pipeline.engine import Node, Workflow, MapNode
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio
    from nipype.interfaces.freesurfer.utils import ImageInfo



    #####################################
    # GENERAL SETTINGS
    #####################################
    wf = Workflow(name='ants_ct')
    wf.base_dir = os.path.join(working_dir)

    nipype_cfg = dict(logging=dict(workflow_level='DEBUG'), execution={'stop_on_first_crash': True,
                                                                       'remove_unnecessary_outputs': True,
                                                                       'job_finished_timeout': 120})
    config.update_config(nipype_cfg)
    wf.config['execution']['crashdump_dir'] = os.path.join(working_dir, 'crash')

    ds = Node(nio.DataSink(base_directory=ds_dir), name='ds')



    #####################################
    # GET DATA
    #####################################
    # GET SUBJECT SPECIFIC STRUCTURAL DATA
    in_data_templates = {
        't1w': '{subject_id}/raw_niftis/sMRI/t1w_reoriented.nii.gz',
    }

    in_data = Node(nio.SelectFiles(in_data_templates,
                                       base_directory=preprocessed_data_dir),
                       name="in_data")
    in_data.inputs.subject_id = subjects_id


    # GET NKI ANTs templates
    ants_templates_templates = {
        'brain_template': 'NKI/T_template.nii.gz',
        'brain_probability_mask': 'NKI/T_templateProbabilityMask.nii.gz',
        'segmentation_priors': 'NKI/Priors/*.nii.gz',
        't1_registration_template': 'NKI/T_template_BrainCerebellum.nii.gz'

    }

    ants_templates = Node(nio.SelectFiles(ants_templates_templates,
                                       base_directory=template_dir),
                       name="ants_templates")
开发者ID:fliem,项目名称:LeiCA,代码行数:59,代码来源:ants_ct.py

示例2: create_structural

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import config['execution']['crashdump_dir'] [as 别名]
def create_structural(subject, working_dir, data_dir, freesurfer_dir, out_dir, standard_brain):
    # main workflow
    struct_preproc = Workflow(name='anat_preproc')
    struct_preproc.base_dir = working_dir
    struct_preproc.config['execution']['crashdump_dir'] = struct_preproc.base_dir + "/crash_files"


    # workflow to get brain, head and wmseg from freesurfer and convert to nifti
    mgzconvert = create_mgzconvert_pipeline()
    mgzconvert.inputs.inputnode.fs_subjects_dir = freesurfer_dir
    mgzconvert.inputs.inputnode.fs_subject_id = subject

    normalize = create_normalize_pipeline()
    normalize.inputs.inputnode.standard = standard_brain

    # sink to store files
    sink = Node(nio.DataSink(base_directory=out_dir,
                             parameterization=False,
                             substitutions=[
                                 ('transform_Warped', 'T1_brain2mni')]),
                name='sink')

    # connections
    struct_preproc.connect(
        [(mgzconvert, normalize, [('outputnode.anat_brain', 'inputnode.anat')]),
         (mgzconvert, sink, [('outputnode.anat_head', '@head')]),
         (mgzconvert, sink, [('outputnode.anat_brain', '@brain')]),
         (mgzconvert, sink, [('outputnode.anat_brain_mask', '@mask')]),
         (normalize, sink, [('outputnode.anat2std', '@anat2std'),
                            ('outputnode.anat2std_transforms', '[email protected]_transforms'),
                            ('outputnode.std2anat_transforms', '[email protected]_transforms')])
         ])

    struct_preproc.write_graph(dotfilename='struct_preproc.dot', graph2use='colored', format='pdf', simple_form=True)
    # struct_preproc.run()
    struct_preproc.run(plugin='CondorDAGMan', plugin_args = {'initial_specs': 'request_memory = 1500'})  #
开发者ID:fBeyer89,项目名称:LIFE_Lemon_mod_mod,代码行数:38,代码来源:structural.py

示例3: open

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import config['execution']['crashdump_dir'] [as 别名]
   
apply_ts.connect([(inputnode, apply_topup, [('moco_ts', 'in_file'),
                                            ('topup_fullwarp', 'field_file')]),
                 (resamp_anat, apply_topup, [('out_file', 'ref_file')]),
                 (apply_topup, outputnode, [('out_file', 'topup_ts')])
                 ])
apply_topup.plugin_args={'initial_specs': 'request_memory = 8000'}



# set up workflow, in- and output
apply_ts.base_dir='/scr/kansas1/huntenburg/'
data_dir='/scr/jessica2/Schaare/LEMON/'
#out_dir = '/scr/kansas1/huntenburg/timeseries/'
#applywarp_linear.config['execution']={'remove_unnecessary_outputs': 'False'}
apply_ts.config['execution']['crashdump_dir'] = apply_ts.base_dir + "/crash_files"

# reading subjects from file
#subjects=['LEMON003']
subjects=[]
f = open('/scr/jessica2/Schaare/LEMON/done_freesurfer.txt','r')
for line in f:
    subjects.append(line.strip())
subjects.remove('LEMON007')
subjects.remove('LEMON027')


# create inforsource to iterate over subjects
infosource = Node(util.IdentityInterface(fields=['subject_id']), 
                  name='infosource')
infosource.iterables=('subject_id', subjects)
开发者ID:juhuntenburg,项目名称:nonlinear_coreg,代码行数:33,代码来源:apply_timeseries_fsl.py

示例4: Workflow

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import config['execution']['crashdump_dir'] [as 别名]
# directories
working_dir = '/scr/ilz3/myelinconnect/working_dir/' 
data_dir= '/scr/ilz3/myelinconnect/'
out_dir = '/scr/ilz3/myelinconnect/resting/preprocessed/'
freesurfer_dir = '/scr/ilz3/myelinconnect/freesurfer/' # freesurfer reconstruction of lowres is assumed

# set fsl output type to nii.gz
fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

# volumes to remove from each timeseries
vol_to_remove = 5

# main workflow
preproc = Workflow(name='func_preproc')
preproc.base_dir = working_dir
preproc.config['execution']['crashdump_dir'] = preproc.base_dir + "/crash_files"

# iterate over subjects
subject_infosource = Node(util.IdentityInterface(fields=['subject']), 
                  name='subject_infosource')
subject_infosource.iterables=[('subject', subjects_db)]

# iterate over sessions
session_infosource = Node(util.IdentityInterface(fields=['session']), 
                  name='session_infosource')
session_infosource.iterables=[('session', sessions)]

# select files
templates={'rest' : 'resting/raw/{subject}_{session}.nii.gz',
           'dicom':'resting/raw/example_dicoms/{subject}*/{session}/*',
           'uni_highres' : 'struct/uni/{subject}*UNI_Images_merged.nii.gz',
开发者ID:juhuntenburg,项目名称:myelinconnect,代码行数:33,代码来源:full_pipeline.py

示例5: Workflow

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import config['execution']['crashdump_dir'] [as 别名]
# read in subjects and file names
df=pd.read_csv('/scr/ilz3/myelinconnect/subjects.csv', header=0)
subjects_db=list(df['DB'])

# directories
working_dir = '/scr/ilz3/myelinconnect/working_dir/' 
data_dir= '/scr/ilz3/myelinconnect/struct/'
out_dir = '/scr/ilz3/myelinconnect/struct/medial_wall'
# set fsl output type to nii.gz
fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

# main workflow
medwall = Workflow(name='medwall')
medwall.base_dir = working_dir
medwall.config['execution']['crashdump_dir'] = medwall.base_dir + "/crash_files"

# iterate over subjects
subject_infosource = Node(util.IdentityInterface(fields=['subject']), 
                  name='subject_infosource')
subject_infosource.iterables=[('subject', subjects_db)]

# select files
templates={'sub_gm': 'medial_wall/input/{subject}*_sub_gm.nii.gz',
           'sub_csf': 'medial_wall/input/{subject}*_sub_csf.nii.gz',
           'thickness_rh' : 'thickness/{subject}*right_cortex_thick.nii.gz',
           'thickness_lh' : 'thickness/{subject}*left_cortex_thick.nii.gz',}

selectfiles = Node(nio.SelectFiles(templates, base_directory=data_dir),
                   name="selectfiles")
开发者ID:fliem,项目名称:myelinconnect,代码行数:31,代码来源:medial_wall_masks.py

示例6: Workflow

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import config['execution']['crashdump_dir'] [as 别名]
subjects.remove('KSMT')

sessions = ['rest1_1', 'rest1_2', 'rest2_1', 'rest2_2']

# directories
working_dir = '/scr/ilz3/myelinconnect/working_dir/' 
data_dir= '/scr/ilz3/myelinconnect/'
final_dir = '/scr/ilz3/myelinconnect/mappings/rest2highres/'

# set fsl output type to nii.gz
fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

# main workflow
mapping2struct = Workflow(name='mapping2struct')
mapping2struct.base_dir = working_dir
mapping2struct.config['execution']['crashdump_dir'] = mapping2struct.base_dir + "/crash_files"

# iterate over subjects
subject_infosource = Node(util.IdentityInterface(fields=['subject']), 
                  name='subject_infosource')
subject_infosource.iterables=[('subject', subjects)]

# iterate over sessions
session_infosource = Node(util.IdentityInterface(fields=['session']), 
                  name='session_infosource')
session_infosource.iterables=[('session', sessions)]

# select files
templates={'mapping': 'mappings/rest/fixed_hdr/corr_{subject}_{session}_roi_detrended_median_corrected_mapping_fixed.nii.gz',
           'epi2highres_lin_itk' : 'resting/preprocessed/{subject}/{session}/registration/epi2highres_lin.txt',
           'epi2highres_warp':'resting/preprocessed/{subject}/{session}/registration/transform0Warp.nii.gz',
开发者ID:juhuntenburg,项目名称:myelinconnect,代码行数:33,代码来源:project_mapping2struct.py

示例7: create_structural

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import config['execution']['crashdump_dir'] [as 别名]
def create_structural(subject, working_dir, data_dir, freesurfer_dir, out_dir,
                standard_brain):
    
    # main workflow
    struct_preproc = Workflow(name='mp2rage_preproc')
    struct_preproc.base_dir = working_dir
    struct_preproc.config['execution']['crashdump_dir'] = struct_preproc.base_dir + "/crash_files"
    
    # select files
    templates={'inv2': 'nifti/mp2rage/inv2.nii.gz',
               't1map': 'nifti/mp2rage/t1map.nii.gz',
               'uni': 'nifti/mp2rage/uni.nii.gz'}
    selectfiles = Node(nio.SelectFiles(templates,
                                       base_directory=data_dir),
                       name="selectfiles")
    
    # workflow for mp2rage background masking
    mp2rage=create_mp2rage_pipeline()
    
    # workflow to run freesurfer reconall
    reconall=create_reconall_pipeline()
    reconall.inputs.inputnode.fs_subjects_dir=freesurfer_dir
    reconall.inputs.inputnode.fs_subject_id=subject
    
    # workflow to get brain, head and wmseg from freesurfer and convert to nifti
    mgzconvert=create_mgzconvert_pipeline()
    
    # workflow to normalize anatomy to standard space
    normalize=create_normalize_pipeline()
    normalize.inputs.inputnode.standard = standard_brain
    
    #sink to store files
    sink = Node(nio.DataSink(base_directory=out_dir,
                             parameterization=False,
                             substitutions=[('outStripped', 'uni_stripped'),
                                            ('outMasked2', 'uni_masked'),
                                            ('outSignal2', 'background_mask'),
                                            ('outOriginal', 'uni_reoriented'),
                                            ('outMask', 'skullstrip_mask'),
                                            ('transform_Warped', 'T1_brain2mni')]),
                 name='sink')
    
    
    # connections
    struct_preproc.connect([(selectfiles, mp2rage, [('inv2', 'inputnode.inv2'),
                                                    ('t1map', 'inputnode.t1map'),
                                                    ('uni', 'inputnode.uni')]),
                            (mp2rage, reconall, [('outputnode.uni_masked', 'inputnode.anat')]),
                            (reconall, mgzconvert, [('outputnode.fs_subject_id', 'inputnode.fs_subject_id'),
                                                    ('outputnode.fs_subjects_dir', 'inputnode.fs_subjects_dir')]),
                            (mgzconvert, normalize, [('outputnode.anat_brain', 'inputnode.anat')]),
                            #(mp2rage, sink, [('outputnode.uni_masked', '[email protected]_masked'),
                            #                 ('outputnode.background_mask', '[email protected]_mask')
                            #                 ]),
                            (mgzconvert, sink, [('outputnode.anat_head', '[email protected]'),
                                                ('outputnode.anat_brain', '[email protected]'),
                                                ('outputnode.func_mask', '[email protected]_mask'),
                                                ('outputnode.wmedge', '[email protected]'),
                                                #('outputnode.wmseg', '[email protected]')
                                                ]),
                            (normalize, sink, [('outputnode.anat2std', '[email protected]'),
                                               ('outputnode.anat2std_transforms', '[email protected]_transforms'),
                                               ('outputnode.std2anat_transforms', '[email protected]_transforms')])
                            ])
    #struct_preproc.write_graph(dotfilename='struct_preproc.dot', graph2use='colored', format='pdf', simple_form=True)
    struct_preproc.run()
开发者ID:NeuroanatomyAndConnectivity,项目名称:pipelines,代码行数:68,代码来源:structural.py

示例8: calc_centrality_metrics

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import config['execution']['crashdump_dir'] [as 别名]
def calc_centrality_metrics(cfg):
    import os
    from nipype import config
    from nipype.pipeline.engine import Node, Workflow
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.freesurfer as freesurfer

    import CPAC.network_centrality.resting_state_centrality as cpac_centrality
    import CPAC.network_centrality.z_score as cpac_centrality_z_score


    # INPUT PARAMETERS
    dicom_dir = cfg['dicom_dir']
    preprocessed_data_dir = cfg['preprocessed_data_dir']

    working_dir = cfg['working_dir']
    freesurfer_dir = cfg['freesurfer_dir']
    template_dir = cfg['template_dir']
    script_dir = cfg['script_dir']
    ds_dir = cfg['ds_dir']

    subjects_list = cfg['subjects_list']
    TR_list = cfg['TR_list']

    use_n_procs = cfg['use_n_procs']
    plugin_name = cfg['plugin_name']



    #####################################
    # GENERAL SETTINGS
    #####################################
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
    freesurfer.FSCommand.set_default_subjects_dir(freesurfer_dir)

    wf = Workflow(name='LeiCA_metrics')
    wf.base_dir = os.path.join(working_dir)

    nipype_cfg = dict(logging=dict(workflow_level='DEBUG'), execution={'stop_on_first_crash': False,
                                                                       'remove_unnecessary_outputs': True,
                                                                       'job_finished_timeout': 120})
    config.update_config(nipype_cfg)
    wf.config['execution']['crashdump_dir'] = os.path.join(working_dir, 'crash')

    ds = Node(nio.DataSink(), name='ds')
    ds.inputs.substitutions = [('_TR_id_', 'TR_')]
    ds.inputs.regexp_substitutions = [('_subject_id_[A0-9]*/', ''), (
    '_z_score[0-9]*/', '')]  # , #('dc/_TR_id_[0-9]*/', ''), ('evc/_TR_id_[0-9]*/','')]

    #####################################
    # SET ITERATORS
    #####################################
    # GET SCAN TR_ID ITERATOR
    scan_infosource = Node(util.IdentityInterface(fields=['TR_id']), name='scan_infosource')
    scan_infosource.iterables = ('TR_id', TR_list)

    subjects_infosource = Node(util.IdentityInterface(fields=['subject_id']), name='subjects_infosource')
    subjects_infosource.iterables = ('subject_id', subjects_list)

    def add_subject_id_to_ds_dir_fct(subject_id, ds_path):
        import os
        out_path = os.path.join(ds_path, subject_id)
        return out_path

    add_subject_id_to_ds_dir = Node(util.Function(input_names=['subject_id', 'ds_path'],
                                                  output_names=['out_path'],
                                                  function=add_subject_id_to_ds_dir_fct),
                                    name='add_subject_id_to_ds_dir')
    wf.connect(subjects_infosource, 'subject_id', add_subject_id_to_ds_dir, 'subject_id')
    add_subject_id_to_ds_dir.inputs.ds_path = ds_dir

    wf.connect(add_subject_id_to_ds_dir, 'out_path', ds, 'base_directory')


    # get atlas data
    templates_atlases = {'GM_mask_MNI_2mm': 'SPM_GM/SPM_GM_mask_2mm.nii.gz',
                         'GM_mask_MNI_3mm': 'SPM_GM/SPM_GM_mask_3mm.nii.gz',
                         'FSL_MNI_3mm_template': 'MNI152_T1_3mm_brain.nii.gz',
                         'vmhc_symm_brain': 'cpac_image_resources/symmetric/MNI152_T1_2mm_brain_symmetric.nii.gz',
                         'vmhc_symm_brain_3mm': 'cpac_image_resources/symmetric/MNI152_T1_3mm_brain_symmetric.nii.gz',
                         'vmhc_symm_skull': 'cpac_image_resources/symmetric/MNI152_T1_2mm_symmetric.nii.gz',
                         'vmhc_symm_brain_mask_dil': 'cpac_image_resources/symmetric/MNI152_T1_2mm_brain_mask_symmetric_dil.nii.gz',
                         'vmhc_config_file_2mm': 'cpac_image_resources/symmetric/T1_2_MNI152_2mm_symmetric.cnf'
                         }

    selectfiles_anat_templates = Node(nio.SelectFiles(templates_atlases,
                                                      base_directory=template_dir),
                                      name="selectfiles_anat_templates")


    # GET SUBJECT SPECIFIC FUNCTIONAL AND STRUCTURAL DATA
    selectfiles_templates = {
        'epi_2_MNI_warp': '{subject_id}/rsfMRI_preprocessing/registration/epi_2_MNI_warp/TR_{TR_id}/*.nii.gz',
        'epi_mask': '{subject_id}/rsfMRI_preprocessing/masks/brain_mask_epiSpace/TR_{TR_id}/*.nii.gz',
        'preproc_epi_full_spectrum': '{subject_id}/rsfMRI_preprocessing/epis/01_denoised/TR_{TR_id}/*.nii.gz',
        'preproc_epi_bp': '{subject_id}/rsfMRI_preprocessing/epis/02_denoised_BP/TR_{TR_id}/*.nii.gz',
        'preproc_epi_bp_tNorm': '{subject_id}/rsfMRI_preprocessing/epis/03_denoised_BP_tNorm/TR_{TR_id}/*.nii.gz',
        'epi_2_struct_mat': '{subject_id}/rsfMRI_preprocessing/registration/epi_2_struct_mat/TR_{TR_id}/*.mat',
#.........这里部分代码省略.........
开发者ID:Yaqiongxiao,项目名称:LeiCA,代码行数:103,代码来源:calc_metrics.py

示例9: learning_predict_data_2samp_wf

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import config['execution']['crashdump_dir'] [as 别名]
def learning_predict_data_2samp_wf(working_dir,
                                   ds_dir,
                                   in_data_name_list,
                                   subjects_selection_crit_dict,
                                   subjects_selection_crit_names_list,
                                   aggregated_subjects_dir,
                                   target_list,
                                   use_n_procs,
                                   plugin_name,
                                   confound_regression=[False, True],
                                   run_cv=False,
                                   n_jobs_cv=1,
                                   run_tuning=False,
                                   run_2sample_training=False,
                                   aggregated_subjects_dir_nki=None,
                                   subjects_selection_crit_dict_nki=None,
                                   subjects_selection_crit_name_nki=None,
                                   reverse_split=False,
                                   random_state_nki=666,
                                   run_learning_curve=False,
                                   life_test_size=0.5):
    import os
    from nipype import config
    from nipype.pipeline.engine import Node, Workflow
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio
    from itertools import chain
    from learning_utils import aggregate_multimodal_metrics_fct, run_prediction_split_fct, \
        backproject_and_split_weights_fct, select_subjects_fct, select_multimodal_X_fct, learning_curve_plot
    import pandas as pd



    ###############################################################################################################
    # GENERAL SETTINGS

    wf = Workflow(name='learning_predict_data_2samp_wf')
    wf.base_dir = os.path.join(working_dir)

    nipype_cfg = dict(logging=dict(workflow_level='DEBUG'),
                      execution={'stop_on_first_crash': False,
                                 'remove_unnecessary_outputs': False,
                                 'job_finished_timeout': 120})
    config.update_config(nipype_cfg)
    wf.config['execution']['crashdump_dir'] = os.path.join(working_dir, 'crash')

    ds = Node(nio.DataSink(), name='ds')
    ds.inputs.base_directory = os.path.join(ds_dir, 'group_learning_prepare_data')

    ds.inputs.regexp_substitutions = [
        # ('subject_id_', ''),
        ('_parcellation_', ''),
        ('_bp_freqs_', 'bp_'),
        ('_extraction_method_', ''),
        ('_subject_id_[A0-9]*/', '')
    ]
    ds_pdf = Node(nio.DataSink(), name='ds_pdf')
    ds_pdf.inputs.base_directory = os.path.join(ds_dir, 'pdfs')
    ds_pdf.inputs.parameterization = False



    ###############################################################################################################
    # ensure in_data_name_list is list of lists
    in_data_name_list = [i if type(i) == list else [i] for i in in_data_name_list]
    in_data_name_list_unique = list(set(chain.from_iterable(in_data_name_list)))



    ###############################################################################################################
    # SET ITERATORS

    in_data_name_infosource = Node(util.IdentityInterface(fields=['in_data_name']), name='in_data_name_infosource')
    in_data_name_infosource.iterables = ('in_data_name', in_data_name_list_unique)

    multimodal_in_data_name_infosource = Node(util.IdentityInterface(fields=['multimodal_in_data_name']),
                                              name='multimodal_in_data_name_infosource')
    multimodal_in_data_name_infosource.iterables = ('multimodal_in_data_name', in_data_name_list)

    subject_selection_infosource = Node(util.IdentityInterface(fields=['selection_criterium']),
                                        name='subject_selection_infosource')
    subject_selection_infosource.iterables = ('selection_criterium', subjects_selection_crit_names_list)

    target_infosource = Node(util.IdentityInterface(fields=['target_name']), name='target_infosource')
    target_infosource.iterables = ('target_name', target_list)



    ###############################################################################################################
    # COMPILE LIFE DATA
    ###############################################################################################################

    ###############################################################################################################
    # GET INFO AND SELECT FILES
    df_all_subjects_pickle_file = os.path.join(aggregated_subjects_dir, 'df_all_subjects_pickle_file/df_all.pkl')
    df = pd.read_pickle(df_all_subjects_pickle_file)

    # build lookup dict for unimodal data
    X_file_template = 'X_file/_in_data_name_{in_data_name}/vectorized_aggregated_data.npy'
    info_file_template = 'unimodal_backprojection_info_file/_in_data_name_{in_data_name}/unimodal_backprojection_info.pkl'
#.........这里部分代码省略.........
开发者ID:fliem,项目名称:LeiCA_LIFE,代码行数:103,代码来源:learning_predict_data_wf.py

示例10: extract_values_mask

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import config['execution']['crashdump_dir'] [as 别名]
def extract_values_mask(working_dir, data_dir, out_dir, preproc_dir,
                            subject_id, mask_list, scan_group_list):
                        
                              
    afni.base.AFNICommand.set_default_output_type('NIFTI_GZ')
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    ########################
    ### SPECIFY WORKFLOW ###
    ########################

    # Create a local_metrics workflow
    extract_vals_wf = Workflow(name='extract_vals_wf')
    extract_vals_wf.base_dir =  working_dir
    extract_vals_wf.config['execution']['crashdump_dir'] = os.path.join(working_dir, 'crash')
    
    
    #####################
    ### SPECIFY NODES ###
    #####################
    
    ### INPUT ###
    
    # Infosource - a function free node to iterate over the list scans 
    scan_group_infosource = Node(util.IdentityInterface(fields=['scan_group']), 
                               name='scan_group_infosource')
    scan_group_infosource.iterables= [('scan_group', scan_group_list)]             
                                    
                          
    # select Z maps, 2 versions of func (bpf: band pass filtered; no_bpf:no temporal filtering)
    templates={'z_map_le_amy': 'MRI/' + subject_id + '/analysis/sca/nilearn/{scan_group}/left_amygdala/fwhm_6/corr_map_le_amy_z.nii.gz',
               'z_map_ri_amy': 'MRI/' + subject_id + '/analysis/sca/nilearn/{scan_group}/right_amygdala/fwhm_6/corr_map_ri_amy_z.nii.gz',
               }
    select_zmaps = Node(nio.SelectFiles(templates,
                                       base_directory = data_dir),
                       name="select_zmaps")
    extract_vals_wf.connect(scan_group_infosource, 'scan_group', select_zmaps, 'scan_group')
        
    
#    roi_infosource = Node(util.IdentityInterface(fields=['epi_mask']), 
#                               name='roi_infosource')
#    roi_infosource.iterables= [('epi_mask',roi_list)]                                  
    
    
#    # select files, 2 versions of func (bpf: band pass filtered; no_bpf:no temporal filtering)
#    templates={'epi_mask' : 'templates/harvard_oxford/{epi_mask}'
#               }
#    selectmasks = Node(nio.SelectFiles(templates,
#                                       base_directory = project_dir),
#                       name="selectmasks")
                     

    
    ### ANALYSIS NODES ###
    
    def get_vals(le_amy_nii):
                   
        from nipype.interfaces.afni import ROIStats
        import numpy as np
        import os        
           #from nipype.interfaces.fsl import ImageMeants        
            #from nilearn.input_data import NiftiMasker, NiftiMapsMasker
            #import numpy as np
            #import os
            
        ROIStats(mask = '/scr/nil3/reinelt/NECOS/templates/interaction_masks/mask_intact_bl2vs1_str_vs_con_le_amy.nii.gz',
                 quiet = True)
        mean_le_amy = ROIStats.run(le_amy_nii)
                                  
                                  
            # initialize  an empty file & "fill" it with the calculated value, necessary becaus nipype need file types or so... aehm hmm 
        mean_le_amy_file = os.path.abspath('mean_le_amy.txt')
        np.savetxt(mean_le_amy_file, np.atleast_1d(mean_le_amy))         
                      
        return mean_le_amy_file
                              
 

    vals_inside_mask = Node(util.Function(input_names = ['le_amy_nii'],
                                           output_names = ['mean_le_amy_file'],      
                                           function = get_vals),
                    name='sca_prob_seeds')                      
    extract_vals_wf.connect(select_zmaps, 'z_map_le_amy', vals_inside_mask, 'le_amy_nii')     
                        
    
      
#    vals_inside_mask = Node(afni.ROIStats(),
#                            name = 'vals_inside_mask')
#    vals_inside_mask.inputs.mask = '/scr/nil3/reinelt/NECOS/templates/interaction_masks/mask_intact_bl2vs1_str_vs_con_le_amy.nii.gz'
#                                 
#    extract_vals_wf.connect(select_zmaps, 'z_map_le_amy', vals_inside_mask, 'in_file')     
  
    ### OUTPUT ###
        
    # Node to sink data (function DataSink from nipype.interfaces.io)
    datasink = Node(nio.DataSink(base_directory = out_dir,
                                 substitutions=[('_scan_id_', ''),
                                                ('_epi_mask_',''),
                                                ('rest_preprocessed2mni_2mm_maths.nii.gz','temporal_std_2mni_2mm.nii.gz')    
                                                 ]),
#.........这里部分代码省略.........
开发者ID:JanisReinelt,项目名称:NECOS,代码行数:103,代码来源:necos_extract_vals.py

示例11: Workflow

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import config['execution']['crashdump_dir'] [as 别名]
version='version_5'

'''beware that if additional inputs to epi_t1_nonlinear.py are necessary this needs to
be specified in the two if-statements below '''




# inputs and directories
nonreg = Workflow(name='nonreg')
nonreg.base_dir='/scr/kansas1/huntenburg/nonlinear/'+version+'/working_dir/'
data_dir='/scr/jessica2/Schaare/LEMON/'
fs_subjects_dir='/scr/jessica2/Schaare/LEMON/freesurfer/'
out_dir = '/scr/jessica2/Schaare/LEMON/preprocessed/'
#nonreg.config['execution']={'remove_unnecessary_outputs': 'False'}
nonreg.config['execution']['crashdump_dir'] = nonreg.base_dir + "/crash_files"

if version == 'version_3' or version == 'version_4' or version == 'version_6' or version == 'version_7' or version == 'version_13' or version == 'version_14' or version == 'version_15' or version == 'version_18' or version == 'version_19' :
    fmap_mask='/scr/kansas1/huntenburg/nonlinear/fmap_mask/fmap_mask_1922_fillh_blur4_bin01_masked.nii.gz'


# reading subjects from file
subjects=[]
f = open('/scr/jessica2/Schaare/LEMON/done_freesurfer.txt','r')
for line in f:
    subjects.append(line.strip())
    
subjects=['LEMON001']


# create inforsource to iterate over subjects
开发者ID:juhuntenburg,项目名称:nonlinear_coreg,代码行数:33,代码来源:nonlinear.py

示例12: create_lemon_resting

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import config['execution']['crashdump_dir'] [as 别名]
def create_lemon_resting(subject, working_dir, data_dir, freesurfer_dir, out_dir, 
                      vol_to_remove, TR, epi_resolution, highpass, lowpass,
                      echo_space, te_diff, pe_dir):

    # set fsl output type to nii.gz
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
    
    # main workflow
    func_preproc = Workflow(name='lemon_resting')
    func_preproc.base_dir = working_dir
    func_preproc.config['execution']['crashdump_dir'] = func_preproc.base_dir + "/crash_files"
    
    # select files
    templates={'func': 'nifti/lemon_resting/rest.nii.gz',
               'fmap_phase' : 'nifti/lemon_resting/fmap_phase.nii.gz',
               'fmap_mag' : 'nifti/lemon_resting/fmap_mag.nii.gz',
               'anat_head' : 'preprocessed/anat/T1.nii.gz',
               'anat_brain' : 'preprocessed/anat/T1_brain.nii.gz',
               'func_mask' : 'preprocessed/anat/func_mask.nii.gz'
               }
    selectfiles = Node(nio.SelectFiles(templates,
                                       base_directory=data_dir),
                       name="selectfiles")
    
    # node to remove first volumes
    remove_vol = Node(util.Function(input_names=['in_file','t_min'],
                                    output_names=["out_file"],
                                    function=strip_rois_func),
                      name='remove_vol')
    remove_vol.inputs.t_min = vol_to_remove
    
    # workflow for motion correction
    moco=create_moco_pipeline()
    
    # workflow for fieldmap correction and coregistration
    fmap_coreg=create_fmap_coreg_pipeline()
    fmap_coreg.inputs.inputnode.fs_subjects_dir=freesurfer_dir
    fmap_coreg.inputs.inputnode.fs_subject_id=subject
    fmap_coreg.inputs.inputnode.echo_space=echo_space
    fmap_coreg.inputs.inputnode.te_diff=te_diff
    fmap_coreg.inputs.inputnode.pe_dir=pe_dir
    
    # workflow for applying transformations to timeseries
    transform_ts = create_transform_pipeline()
    transform_ts.inputs.inputnode.resolution=epi_resolution
    
    # workflow to denoise timeseries
    denoise = create_denoise_pipeline()
    denoise.inputs.inputnode.highpass_sigma= 1./(2*TR*highpass)
    denoise.inputs.inputnode.lowpass_sigma= 1./(2*TR*lowpass)
    # https://www.jiscmail.ac.uk/cgi-bin/webadmin?A2=ind1205&L=FSL&P=R57592&1=FSL&9=A&I=-3&J=on&d=No+Match%3BMatch%3BMatches&z=4
    denoise.inputs.inputnode.tr = TR
    
    #sink to store files
    sink = Node(nio.DataSink(parameterization=False,
                             base_directory=out_dir,
                             substitutions=[('fmap_phase_fslprepared', 'fieldmap'),
                                            ('fieldmap_fslprepared_fieldmap_unmasked_vsm', 'shiftmap'),
                                            ('plot.rest_coregistered', 'outlier_plot'),
                                            ('filter_motion_comp_norm_compcor_art_dmotion', 'nuissance_matrix'),
                                            ('rest_realigned.nii.gz_abs.rms', 'rest_realigned_abs.rms'),
                                            ('rest_realigned.nii.gz.par','rest_realigned.par'),
                                            ('rest_realigned.nii.gz_rel.rms', 'rest_realigned_rel.rms'),
                                            ('rest_realigned.nii.gz_abs_disp', 'abs_displacement_plot'),
                                            ('rest_realigned.nii.gz_rel_disp', 'rel_displacment_plot'),
                                            ('art.rest_coregistered_outliers', 'outliers'),
                                            ('global_intensity.rest_coregistered', 'global_intensity'),
                                            ('norm.rest_coregistered', 'composite_norm'),
                                            ('stats.rest_coregistered', 'stats'),
                                            ('rest_denoised_bandpassed_norm.nii.gz', 'rest_preprocessed.nii.gz')]),
                 name='sink')
    
    
    # connections
    func_preproc.connect([(selectfiles, remove_vol, [('func', 'in_file')]),
                          (remove_vol, moco, [('out_file', 'inputnode.epi')]),
                          (selectfiles, fmap_coreg, [('fmap_phase', 'inputnode.phase'),
                                                     ('fmap_mag', 'inputnode.mag'),
                                                     ('anat_head', 'inputnode.anat_head'),
                                                     ('anat_brain', 'inputnode.anat_brain')
                                                     ]),
                          (moco, fmap_coreg, [('outputnode.epi_mean', 'inputnode.epi_mean')]),
                          (remove_vol, transform_ts, [('out_file', 'inputnode.orig_ts')]),
                          (selectfiles, transform_ts, [('anat_head', 'inputnode.anat_head')]),
                          (moco, transform_ts, [('outputnode.mat_moco', 'inputnode.mat_moco')]),
                          (fmap_coreg, transform_ts, [('outputnode.fmap_fullwarp', 'inputnode.fullwarp')]),
                          (selectfiles, denoise, [('func_mask', 'inputnode.brain_mask'),
                                                  ('anat_brain', 'inputnode.anat_brain')]),
                          (moco, denoise, [('outputnode.par_moco', 'inputnode.moco_par')]),
                          (fmap_coreg, denoise, [('outputnode.epi2anat_dat', 'inputnode.epi2anat_dat'),
                                                 ('outputnode.unwarped_mean_epi2fmap', 'inputnode.unwarped_mean')]),
                          (transform_ts, denoise, [('outputnode.trans_ts','inputnode.epi_coreg')]),
                          (moco, sink, [#('outputnode.epi_moco', '[email protected]_ts'),
                                        ('outputnode.par_moco', '[email protected]'),
                                        ('outputnode.rms_moco', '[email protected]'),
                                        ('outputnode.mat_moco', '[email protected]'),
                                        ('outputnode.epi_mean', '[email protected]'),
                                        ('outputnode.rotplot', '[email protected]'),
                                        ('outputnode.transplot', '[email protected]'),
                                        ('outputnode.dispplots', '[email protected]'),
#.........这里部分代码省略.........
开发者ID:NeuroanatomyAndConnectivity,项目名称:pipelines,代码行数:103,代码来源:lemon_resting.py

示例13: create_lemon_resting

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import config['execution']['crashdump_dir'] [as 别名]
def create_lemon_resting(subject, working_dir, data_dir, freesurfer_dir, out_dir,
                         vol_to_remove, TR, epi_resolution, highpass, lowpass,
                         echo_space, te_diff, pe_dir, standard_brain, standard_brain_resampled, standard_brain_mask,
                         standard_brain_mask_resampled, fwhm_smoothing):
    # set fsl output type to nii.gz
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
    # main workflow
    func_preproc = Workflow(name='lemon_resting')
    func_preproc.base_dir = working_dir
    func_preproc.config['execution']['crashdump_dir'] = func_preproc.base_dir + "/crash_files"
    # select files
    templates = {'func': 'raw_data/{subject}/func/EPI_t2.nii',
                 'fmap_phase': 'raw_data/{subject}/unwarp/B0_ph.nii',
                 'fmap_mag': 'raw_data/{subject}/unwarp/B0_mag.nii',
                 'anat_head': 'preprocessed/{subject}/structural/T1.nii.gz',  # either with mod or without
                 'anat_brain': 'preprocessed/{subject}/structural/brain.nii.gz',
                 # new version with brain_extraction from freesurfer  #T1_brain_brain.nii.gz',
                 'brain_mask': 'preprocessed/{subject}/structural/T1_brain_mask.nii.gz',  # T1_brain_brain_mask.nii.gz',
                 'ants_affine': 'preprocessed/{subject}/structural/transforms2mni/transform0GenericAffine.mat',
                 'ants_warp': 'preprocessed/{subject}/structural/transforms2mni/transform1Warp.nii.gz'
                 }

    selectfiles = Node(nio.SelectFiles(templates,
                                       base_directory=data_dir),
                       name="selectfiles")
    selectfiles.inputs.subject = subject


    # node to remove first volumes
    remove_vol = Node(util.Function(input_names=['in_file', 't_min'],
                                    output_names=["out_file"],
                                    function=strip_rois_func),
                      name='remove_vol')
    remove_vol.inputs.t_min = vol_to_remove
    # workflow for motion correction
    moco = create_moco_pipeline()

    # workflow for fieldmap correction and coregistration
    fmap_coreg = create_fmap_coreg_pipeline()
    fmap_coreg.inputs.inputnode.fs_subjects_dir = freesurfer_dir
    fmap_coreg.inputs.inputnode.fs_subject_id = subject
    fmap_coreg.inputs.inputnode.echo_space = echo_space
    fmap_coreg.inputs.inputnode.te_diff = te_diff
    fmap_coreg.inputs.inputnode.pe_dir = pe_dir

    # workflow for applying transformations to timeseries
    transform_ts = create_transform_pipeline()
    transform_ts.inputs.inputnode.resolution = epi_resolution


    # workflow to denoise timeseries
    denoise = create_denoise_pipeline()
    denoise.inputs.inputnode.highpass_sigma = 1. / (2 * TR * highpass)
    denoise.inputs.inputnode.lowpass_sigma = 1. / (2 * TR * lowpass)
    # https://www.jiscmail.ac.uk/cgi-bin/webadmin?A2=ind1205&L=FSL&P=R57592&1=FSL&9=A&I=-3&J=on&d=No+Match%3BMatch%3BMatches&z=4
    denoise.inputs.inputnode.tr = TR

    # workflow to transform timeseries to MNI
    ants_registration = create_ants_registration_pipeline()
    ants_registration.inputs.inputnode.ref = standard_brain_resampled
    ants_registration.inputs.inputnode.tr_sec = TR

    # FL added fullspectrum
    # workflow to transform fullspectrum timeseries to MNI
    ants_registration_full = create_ants_registration_pipeline('ants_registration_full')
    ants_registration_full.inputs.inputnode.ref = standard_brain_resampled
    ants_registration_full.inputs.inputnode.tr_sec = TR

    # workflow to smooth
    smoothing = create_smoothing_pipeline()
    smoothing.inputs.inputnode.fwhm = fwhm_smoothing

    # visualize registration results
    visualize = create_visualize_pipeline()
    visualize.inputs.inputnode.mni_template = standard_brain_resampled



    # sink to store files
    sink = Node(nio.DataSink(parameterization=False,
                             base_directory=out_dir,
                             substitutions=[('fmap_phase_fslprepared', 'fieldmap'),
                                            ('fieldmap_fslprepared_fieldmap_unmasked_vsm', 'shiftmap'),
                                            ('plot.rest_coregistered', 'outlier_plot'),
                                            ('filter_motion_comp_norm_compcor_art_dmotion', 'nuissance_matrix'),
                                            ('rest_realigned.nii.gz_abs.rms', 'rest_realigned_abs.rms'),
                                            ('rest_realigned.nii.gz.par', 'rest_realigned.par'),
                                            ('rest_realigned.nii.gz_rel.rms', 'rest_realigned_rel.rms'),
                                            ('rest_realigned.nii.gz_abs_disp', 'abs_displacement_plot'),
                                            ('rest_realigned.nii.gz_rel_disp', 'rel_displacment_plot'),
                                            ('art.rest_coregistered_outliers', 'outliers'),
                                            ('global_intensity.rest_coregistered', 'global_intensity'),
                                            ('norm.rest_coregistered', 'composite_norm'),
                                            ('stats.rest_coregistered', 'stats'),
                                            ('rest_denoised_bandpassed_norm.nii.gz',
                                             'rest_preprocessed_nativespace.nii.gz'),
                                            ('rest_denoised_bandpassed_norm_trans.nii.gz',
                                             'rest_mni_unsmoothed.nii.gz'),
                                            ('rest_denoised_bandpassed_norm_trans_smooth.nii',
                                             'rest_mni_smoothed.nii'),
#.........这里部分代码省略.........
开发者ID:fBeyer89,项目名称:LIFE_Lemon_mod_mod,代码行数:103,代码来源:lemon_resting.py

示例14: learning_prepare_data_wf

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import config['execution']['crashdump_dir'] [as 别名]
def learning_prepare_data_wf(working_dir,
                             ds_dir,
                             template_dir,
                             df_file,
                             in_data_name_list,
                             data_lookup_dict,
                             use_n_procs,
                             plugin_name):
    import os
    from nipype import config
    from nipype.pipeline.engine import Node, Workflow, MapNode, JoinNode
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio
    from nipype.interfaces.freesurfer.utils import ImageInfo
    from utils import aggregate_data, vectorize_data
    from itertools import chain




    # ensure in_data_name_list is list of lists
    in_data_name_list = [i if type(i) == list else [i] for i in in_data_name_list]
    in_data_name_list_unique = list(set(chain.from_iterable(in_data_name_list)))

    #####################################
    # GENERAL SETTINGS
    #####################################
    wf = Workflow(name='learning_prepare_data_wf')
    wf.base_dir = os.path.join(working_dir)

    nipype_cfg = dict(logging=dict(workflow_level='DEBUG'), execution={'stop_on_first_crash': True,
                                                                       'remove_unnecessary_outputs': False,
                                                                       'job_finished_timeout': 120})
    config.update_config(nipype_cfg)
    wf.config['execution']['crashdump_dir'] = os.path.join(working_dir, 'crash')

    ds = Node(nio.DataSink(), name='ds')
    ds.inputs.base_directory = os.path.join(ds_dir, 'group_learning_prepare_data')

    ds.inputs.regexp_substitutions = [
        # ('subject_id_', ''),
        ('_parcellation_', ''),
        ('_bp_freqs_', 'bp_'),
        ('_extraction_method_', ''),
        ('_subject_id_[A0-9]*/', '')
    ]
    ds_pdf = Node(nio.DataSink(), name='ds_pdf')
    ds_pdf.inputs.base_directory = os.path.join(ds_dir, 'pdfs')
    ds_pdf.inputs.parameterization = False

    # get atlas data
    templates_atlases = {'GM_mask_MNI_2mm': 'SPM_GM/SPM_GM_mask_2mm.nii.gz',
                         'GM_mask_MNI_3mm': 'SPM_GM/SPM_GM_mask_3mm.nii.gz',
                         'brain_mask_MNI_3mm': 'cpac_image_resources/MNI_3mm/MNI152_T1_3mm_brain_mask.nii.gz',
                         'brain_template_MNI_3mm': 'cpac_image_resources/MNI_3mm/MNI152_T1_3mm.nii.gz'
                         }

    selectfiles_anat_templates = Node(nio.SelectFiles(templates_atlases,
                                                      base_directory=template_dir),
                                      name="selectfiles_anat_templates")

    #####################################
    # SET ITERATORS
    #####################################
    # SUBJECTS ITERATOR
    in_data_name_infosource = Node(util.IdentityInterface(fields=['in_data_name']), name='in_data_name_infosource')
    in_data_name_infosource.iterables = ('in_data_name', in_data_name_list_unique)

    mulitmodal_in_data_name_infosource = Node(util.IdentityInterface(fields=['multimodal_in_data_name']),
                                              name='mulitmodal_in_data_name_infosource')
    mulitmodal_in_data_name_infosource.iterables = ('multimodal_in_data_name', in_data_name_list)

    subjects_selection_crit_dict = {}
    subjects_selection_crit_dict['adult_healthy_F'] = ["df[df.sex == \'F\']", 'df[df.no_axis_1]', 'df[df.age >= 18]']
    subjects_selection_crit_dict['adult_F'] = ["df[df.sex == \'F\']", 'df[df.age >= 18]']
    subjects_selection_crit_dict['F'] = ["df[df.sex == \'F\']"]

    subjects_selection_crit_dict['adult_healthy_M'] = ["df[df.sex == \'M\']", 'df[df.no_axis_1]', 'df[df.age >= 18]']
    subjects_selection_crit_dict['adult_M'] = ["df[df.sex == \'M\']", 'df[df.age >= 18]']
    subjects_selection_crit_dict['adult'] = ['df[df.age >= 18]']
    # subjects_selection_crit_names_list = subjects_selection_crit_dict.keys()
    subjects_selection_crit_names_list = ['adult_F']

    subject_selection_infosource = Node(util.IdentityInterface(fields=['selection_criterium']),
                                        name='subject_selection_infosource')
    subject_selection_infosource.iterables = ('selection_criterium', subjects_selection_crit_names_list)

    def out_name_str_fct(selection_criterium, in_data_name):
        return selection_criterium + '_' + in_data_name

    out_name_str = Node(util.Function(input_names=['selection_criterium', 'in_data_name'],
                                      output_names=['out_name_str'],
                                      function=out_name_str_fct),
                        name='out_name_str')
    wf.connect(in_data_name_infosource, 'in_data_name', out_name_str, 'in_data_name')
    wf.connect(subject_selection_infosource, 'selection_criterium', out_name_str, 'selection_criterium')

    def get_subjects_info_fct(df_file, subjects_selection_crit_dict, selection_criterium):
        import pandas as pd
        import os
#.........这里部分代码省略.........
开发者ID:fliem,项目名称:LeiCA,代码行数:103,代码来源:learning_prepare_data_wf.py

示例15: run_tbss_wf

# 需要导入模块: from nipype.pipeline.engine import Workflow [as 别名]
# 或者: from nipype.pipeline.engine.Workflow import config['execution']['crashdump_dir'] [as 别名]
def run_tbss_wf(subject_id,
                working_dir,
                ds_dir,
                use_n_procs,
                plugin_name,
                dMRI_templates,
                in_path):
    import os
    from nipype import config
    from nipype.pipeline.engine import Node, Workflow, MapNode, JoinNode
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio
    from nipype.interfaces import fsl, dipy

    from LeiCA.dMRI.nipype_11_workflows_dmri_fsl_snapshot.artifacts import hmc_pipeline, ecc_pipeline
    from LeiCA.dMRI.nipype_11_workflows_dmri_fsl_snapshot.utils import b0_average, extract_bval
    from LeiCA.dMRI.nipype_11_workflows_dmri_fsl_snapshot.tbss import create_tbss_all
    from nipype.workflows.dmri.dipy.denoise import nlmeans_pipeline
    from diffusion_utils import apply_hmc_and_ecc

    #####################################
    # GENERAL SETTINGS
    #####################################
    wf = Workflow(name='tbss_wf')
    wf.base_dir = os.path.join(working_dir)

    nipype_cfg = dict(logging=dict(workflow_level='DEBUG'), execution={'stop_on_first_crash': True,
                                                                       'remove_unnecessary_outputs': False,
                                                                       'job_finished_timeout': 15})
    config.update_config(nipype_cfg)
    wf.config['execution']['crashdump_dir'] = os.path.join(working_dir, 'crash')

    ds = Node(nio.DataSink(base_directory=ds_dir), name='ds')

    ds.inputs.regexp_substitutions = [
        ('_subject_id_[A0-9]*/', ''),
    ]


    # GET SUBJECT SPECIFIC FUNCTIONAL DATA
    selectfiles = Node(nio.SelectFiles(dMRI_templates, base_directory=in_path), name="selectfiles")
    selectfiles.inputs.subject_id = subject_id

    #####################################
    # WF
    #####################################


    def _bvals_with_nodiff_0_fct(in_bval, lowbval):
        ''' returns bval file with 0 in place of lowbvals
        '''
        import os
        import numpy as np
        bvals = np.loadtxt(in_bval)
        bvals[bvals <= lowbval] = 0
        bval_file_zero = os.path.abspath('bval_0.bval')
        np.savetxt(bval_file_zero, bvals)
        return bval_file_zero

    # CREATE BVALS FILES WITH 0 IN LOWBVAL SLICES. FOR ECC ONLY
    bvals_with_nodiff_0 = Node(util.Function(input_names=['in_bval', 'lowbval'],
                                             output_names=['bval_file_zero'],
                                             function=_bvals_with_nodiff_0_fct), name='bvals_with_nodiff_0')
    wf.connect(selectfiles, 'bval_file', bvals_with_nodiff_0, 'in_bval')
    bvals_with_nodiff_0.inputs.lowbval = 5

    ##
    # GET B0 MASK
    b0_4d_init_0 = Node(util.Function(input_names=['in_dwi', 'in_bval', 'b'], output_names=['out_file'],
                                      function=extract_bval), name='b0_4d_init_0')
    wf.connect(selectfiles, 'dMRI_data', b0_4d_init_0, 'in_dwi')
    #wf.connect(selectfiles, 'bval_file', b0_4d_init_0, 'in_bval')
    wf.connect(bvals_with_nodiff_0, 'bval_file_zero', b0_4d_init_0, 'in_bval')
    b0_4d_init_0.inputs.b = 'nodiff'

    first_b0 = Node(fsl.ExtractROI(t_min=0, t_size=1), name='first_b0')
    wf.connect(b0_4d_init_0, 'out_file', first_b0, 'in_file')

    flirt = Node(fsl.FLIRT(dof=6, out_file='b0_moco.nii.gz'), name='flirt')
    wf.connect(b0_4d_init_0, 'out_file', flirt, 'in_file')
    wf.connect(first_b0, 'roi_file', flirt, 'reference')

    mean_b0_moco_init_0 = Node(fsl.MeanImage(), name='mean_b0_moco_init_0')
    wf.connect(flirt, 'out_file', mean_b0_moco_init_0, 'in_file')

    b0_mask_init_0 = Node(fsl.BET(frac=0.3, mask=True, robust=True), name='b0_mask_init_0')
    wf.connect(mean_b0_moco_init_0, 'out_file', b0_mask_init_0, 'in_file')



    # HEAD MOTION CORRECTION PIPELINE
    hmc = hmc_pipeline()
    wf.connect(selectfiles, 'dMRI_data', hmc, 'inputnode.in_file')
    #wf.connect(selectfiles, 'bval_file', hmc, 'inputnode.in_bval')
    wf.connect(bvals_with_nodiff_0, 'bval_file_zero', hmc, 'inputnode.in_bval')
    wf.connect(selectfiles, 'bvec_file', hmc, 'inputnode.in_bvec')
    wf.connect(b0_mask_init_0, 'mask_file', hmc, 'inputnode.in_mask')
    hmc.inputs.inputnode.ref_num = 0

    wf.connect(hmc, 'outputnode.out_file', ds, 'moco')
#.........这里部分代码省略.........
开发者ID:Yaqiongxiao,项目名称:LeiCA,代码行数:103,代码来源:tbss.py


注:本文中的nipype.pipeline.engine.Workflow.config['execution']['crashdump_dir']方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。