本文整理汇总了Python中nipype.pipeline.engine.Node.plugin_args方法的典型用法代码示例。如果您正苦于以下问题:Python Node.plugin_args方法的具体用法?Python Node.plugin_args怎么用?Python Node.plugin_args使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类nipype.pipeline.engine.Node
的用法示例。
在下文中一共展示了Node.plugin_args方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: create_normalize_pipeline
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import plugin_args [as 别名]
def create_normalize_pipeline(name='normalize'):
# workflow
normalize = Workflow(name='normalize')
# Define nodes
inputnode = Node(interface=util.IdentityInterface(fields=['epi_coreg',
'tr']),
name='inputnode')
outputnode = Node(interface=util.IdentityInterface(fields=[
'normalized_file']),
name='outputnode')
# time-normalize scans
normalize_time = Node(util.Function(input_names=['in_file', 'tr'],
output_names=['out_file'],
function=time_normalizer),
name='normalize_time')
normalize_time.plugin_args = {'submit_specs': 'request_memory = 17000'}
normalize.connect([(inputnode, normalize_time, [('tr', 'tr')]),
(inputnode, normalize_time, [('epi_coreg', 'in_file')]),
(normalize_time, outputnode, [('out_file', 'normalized_file')])
])
# time-normalize scans
return normalize
示例2: create_converter_structural_pipeline
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import plugin_args [as 别名]
def create_converter_structural_pipeline(working_dir, ds_dir, name="converter_struct"):
# initiate workflow
converter_wf = Workflow(name=name)
converter_wf.base_dir = os.path.join(working_dir, "LeiCA_resting")
# set fsl output
fsl.FSLCommand.set_default_output_type("NIFTI_GZ")
# inputnode
inputnode = Node(util.IdentityInterface(fields=["t1w_dicom"]), name="inputnode")
outputnode = Node(util.IdentityInterface(fields=["t1w"]), name="outputnode")
niftisink = Node(nio.DataSink(), name="niftisink")
niftisink.inputs.base_directory = os.path.join(ds_dir, "raw_niftis")
# convert to nifti
# todo check if geometry bugs attac. use dcm2nii?
converter_t1w = Node(DcmStack(embed_meta=True), name="converter_t1w")
converter_t1w.plugin_args = {"submit_specs": "request_memory = 2000"}
converter_t1w.inputs.out_format = "t1w"
converter_wf.connect(inputnode, "t1w_dicom", converter_t1w, "dicom_files")
# reorient to standard orientation
reor_2_std = Node(fsl.Reorient2Std(), name="reor_2_std")
converter_wf.connect(converter_t1w, "out_file", reor_2_std, "in_file")
converter_wf.connect(reor_2_std, "out_file", outputnode, "t1w")
# save original niftis
converter_wf.connect(reor_2_std, "out_file", niftisink, "sMRI")
converter_wf.write_graph(dotfilename="converter_struct", graph2use="flat", format="pdf")
return converter_wf
示例3: create_reconall_pipeline
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import plugin_args [as 别名]
def create_reconall_pipeline(name='reconall'):
reconall = Workflow(name='reconall')
# inputnode
inputnode = Node(util.IdentityInterface(fields=['anat',
'fs_subjects_dir',
'fs_subject_id'
]),
name='inputnode')
outputnode = Node(util.IdentityInterface(fields=['fs_subjects_dir',
'fs_subject_id']),
name='outputnode')
# run reconall
recon_all = Node(fs.ReconAll(args='-autorecon2 -nuiterations 7 -no-isrunning -hippo-subfields'),
name="recon_all")
# recon_all.inputs.directive= 'autorecon2-wm' # -autorecon3
recon_all.plugin_args = {'submit_specs': 'request_memory = 9000'}
# function to replace / in subject id string with a _
def sub_id(sub_id):
return sub_id.replace('/', '_')
reconall.connect([(inputnode, recon_all, [('fs_subjects_dir', 'subjects_dir'),
('anat', 'T1_files'),
(('fs_subject_id', sub_id), 'subject_id')]),
(recon_all, outputnode, [('subject_id', 'fs_subject_id'),
('subjects_dir', 'fs_subjects_dir')])
])
return reconall
示例4: create_converter_diffusion_pipeline
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import plugin_args [as 别名]
def create_converter_diffusion_pipeline(working_dir, ds_dir, name="converter_diffusion"):
# initiate workflow
converter_wf = Workflow(name=name)
converter_wf.base_dir = os.path.join(working_dir, "LeiCA_resting")
# set fsl output
fsl.FSLCommand.set_default_output_type("NIFTI_GZ")
# inputnode
inputnode = Node(util.IdentityInterface(fields=["dMRI_dicom"]), name="inputnode")
outputnode = Node(util.IdentityInterface(fields=["dMRI"]), name="outputnode")
niftisink = Node(nio.DataSink(), name="niftisink")
niftisink.inputs.base_directory = os.path.join(ds_dir, "raw_niftis")
#######
converter_dMRI = Node(Dcm2nii(), name="converter_dMRI")
converter_dMRI.inputs.gzip_output = True
converter_dMRI.inputs.nii_output = True
converter_dMRI.inputs.anonymize = False
converter_dMRI.plugin_args = {"submit_specs": "request_memory = 2000"}
converter_wf.connect(inputnode, "dMRI_dicom", converter_dMRI, "source_names")
dMRI_rename = Node(util.Rename(format_string="DTI_mx_137.nii.gz"), name="dMRI_rename")
converter_wf.connect(converter_dMRI, "converted_files", dMRI_rename, "in_file")
bvecs_rename = Node(util.Rename(format_string="DTI_mx_137.bvecs"), name="bvecs_rename")
converter_wf.connect(converter_dMRI, "bvecs", bvecs_rename, "in_file")
bvals_rename = Node(util.Rename(format_string="DTI_mx_137.bvals"), name="bvals_rename")
converter_wf.connect(converter_dMRI, "bvals", bvals_rename, "in_file")
# reorient to standard orientation
reor_2_std = Node(fsl.Reorient2Std(), name="reor_2_std")
converter_wf.connect(dMRI_rename, "out_file", reor_2_std, "in_file")
converter_wf.connect(reor_2_std, "out_file", outputnode, "dMRI")
# save original niftis
converter_wf.connect(reor_2_std, "out_file", niftisink, "[email protected]")
converter_wf.connect(bvals_rename, "out_file", niftisink, "[email protected]")
converter_wf.connect(bvecs_rename, "out_file", niftisink, "[email protected]")
converter_wf.write_graph(dotfilename="converter_struct", graph2use="flat", format="pdf")
return converter_wf
示例5: create_registration_pipeline
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import plugin_args [as 别名]
def create_registration_pipeline(working_dir, freesurfer_dir, ds_dir, name='registration'):
"""
find transformations between struct, funct, and MNI
"""
# initiate workflow
reg_wf = Workflow(name=name)
reg_wf.base_dir = os.path.join(working_dir, 'LeiCA_resting', 'rsfMRI_preprocessing')
# set fsl output
fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
freesurfer.FSCommand.set_default_subjects_dir(freesurfer_dir)
# inputnode
inputnode = Node(util.IdentityInterface(fields=['initial_mean_epi_moco',
't1w',
't1w_brain',
'subject_id',
'wm_mask_4_bbr',
'struct_brain_mask']),
name='inputnode')
outputnode = Node(util.IdentityInterface(fields=['struct_2_MNI_warp',
'epi_2_struct_mat',
'struct_2_epi_mat',
'epi_2_MNI_warp',
'MNI_2_epi_warp',
'fs_2_struct_mat',
'mean_epi_structSpace',
'mean_epi_MNIspace',
'struct_MNIspace']),
name='outputnode')
ds = Node(nio.DataSink(base_directory=ds_dir), name='ds')
ds.inputs.substitutions = [('_TR_id_', 'TR_')]
##########################################
# TOC REGISTRATION MATS AND WARPS
##########################################
# I. STRUCT -> MNI
## 1. STRUCT -> MNI with FLIRT
## 2. CALC. WARP STRUCT -> MNI with FNIRT
# II.EPI -> STRUCT
## 3. calc EPI->STRUCT initial registration
## 4. run EPI->STRUCT via bbr
## 5. INVERT to get: STRUCT -> EPI
# III. COMBINE I. & II.: EPI -> MNI
## 6. COMBINE MATS: EPI -> MNI
## 7. MNI -> EPI
##########################################
# CREATE REGISTRATION MATS AND WARPS
##########################################
# I. STRUCT -> MNI
##########################################
# 1. REGISTER STRUCT -> MNI with FLIRT
struct_2_MNI_mat = Node(fsl.FLIRT(dof=12), name='struct_2_MNI_mat')
struct_2_MNI_mat.inputs.reference = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz')
reg_wf.connect(inputnode, 't1w_brain', struct_2_MNI_mat, 'in_file')
reg_wf.connect(struct_2_MNI_mat, 'out_matrix_file', outputnode, 'struct_2_MNI_mat_flirt')
# 2. CALC. WARP STRUCT -> MNI with FNIRT
# cf. wrt. 2mm
# https://www.jiscmail.ac.uk/cgi-bin/webadmin?A2=ind1311&L=FSL&P=R86108&1=FSL&9=A&J=on&d=No+Match%3BMatch%3BMatches&z=4
struct_2_MNI_warp = Node(fsl.FNIRT(), name='struct_2_MNI_warp')
struct_2_MNI_warp.inputs.config_file = 'T1_2_MNI152_2mm'
struct_2_MNI_warp.inputs.ref_file = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz')
struct_2_MNI_warp.inputs.field_file = 'struct_2_MNI_warp.nii.gz'
struct_2_MNI_warp.plugin_args = {'submit_specs': 'request_memory = 4000'}
reg_wf.connect(inputnode, 't1w', struct_2_MNI_warp, 'in_file')
reg_wf.connect(struct_2_MNI_mat, 'out_matrix_file', struct_2_MNI_warp, 'affine_file')
reg_wf.connect(struct_2_MNI_warp, 'field_file', ds, 'registration.struct_2_MNI_warp')
reg_wf.connect(struct_2_MNI_warp, 'field_file', outputnode, 'struct_2_MNI_warp')
reg_wf.connect(struct_2_MNI_warp, 'warped_file', outputnode, 'struct_MNIspace')
reg_wf.connect(struct_2_MNI_warp, 'warped_file', ds, 'registration.struct_MNIspace')
# II.EPI -> STRUCT (via bbr)
##########################################
# 3. calc EPI->STRUCT initial registration with flirt dof=6 and corratio
epi_2_struct_flirt6_mat = Node(fsl.FLIRT(dof=6, cost='corratio'), name='epi_2_struct_flirt6_mat')
epi_2_struct_flirt6_mat.inputs.out_file = 'epi_structSpace_flirt6.nii.gz'
reg_wf.connect(inputnode, 't1w_brain', epi_2_struct_flirt6_mat, 'reference')
reg_wf.connect(inputnode, 'initial_mean_epi_moco', epi_2_struct_flirt6_mat, 'in_file')
# 4. run EPI->STRUCT via bbr
bbr_shedule = os.path.join(os.getenv('FSLDIR'), 'etc/flirtsch/bbr.sch')
#.........这里部分代码省略.........
示例6: Node
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import plugin_args [as 别名]
# apply fmap fullwarp
apply_fmap = Node(fsl.ApplyWarp(interp='spline',
relwarp=True,
out_file='fmap_ts.nii.gz',
datatype='float'),
name='apply_fmap')
apply_ts.connect([(inputnode, apply_fmap, [('moco_ts', 'in_file'),
('fmap_fullwarp', 'field_file')]),
(resamp_anat, apply_fmap, [('out_file', 'ref_file')]),
(apply_fmap, outputnode, [('out_file', 'fmap_ts')])
])
apply_fmap.plugin_args={'initial_specs': 'request_memory = 8000'}
# apply topup fullwarp
apply_topup = Node(fsl.ApplyWarp(interp='spline',
relwarp=True,
out_file='topup_ts.nii.gz',
datatype='float'),
name='apply_topup')
apply_ts.connect([(inputnode, apply_topup, [('moco_ts', 'in_file'),
('topup_fullwarp', 'field_file')]),
(resamp_anat, apply_topup, [('out_file', 'ref_file')]),
(apply_topup, outputnode, [('out_file', 'topup_ts')])
])
apply_topup.plugin_args={'initial_specs': 'request_memory = 8000'}
示例7: create_nonlinear_pipeline
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import plugin_args [as 别名]
def create_nonlinear_pipeline(name='nonlinear'):
# workflow
nonlinear=Workflow(name='nonlinear')
# inputnode
inputnode=Node(util.IdentityInterface(fields=['t1_highres',
'epi2highres_lin',
'epi2highres_lin_itk',
'fov_mask',
'brain_mask',
#'highres2lowres_itk'
]),
name='inputnode')
# outputnode
outputnode=Node(util.IdentityInterface(fields=['epi2highres_warp',
'epi2highres_invwarp',
'epi2highres_nonlin',
]),
name='outputnode')
#
# brainmask = Node(ants.ApplyTransforms(dimension=3,
# invert_transform_flags=[True],
# interpolation = 'NearestNeighbor'),
# name='brainmask')
#
dil_brainmask = Node(fs.Binarize(min=0.5,
out_type = 'nii.gz',
dilate=15),
name='dil_brainmask')
mask_epi = Node(fsl.ApplyMask(out_file='epi2highres_lin_masked.nii.gz'),
name='mask_epi')
nonlinear.connect([#(inputnode, brainmask, [('brain_mask', 'input_image'),
# ('t1_highres', 'reference_image'),
# ('highres2lowres_itk', 'transforms')]),
#(brainmask, dil_brainmask, [('output_image', 'in_file')]),
(inputnode, dil_brainmask, [('brain_mask', 'in_file')]),
(dil_brainmask, mask_epi, [('binary_file', 'mask_file')]),
(inputnode, mask_epi, [('epi2highres_lin', 'in_file')])
])
# transform fov mask and apply to t1
transform_fov = Node(ants.ApplyTransforms(dimension=3,
#invert_transform_flags=[True, False],
output_image='fov_mask_highres.nii.gz',
interpolation = 'NearestNeighbor'),
'transform_fov')
dilate_fov = Node(fs.Binarize(min=0.5,
dilate=5,
binary_file='fov_mask_highres_dil.nii.gz'),
name='dilate_fov')
mask_t1 = Node(fsl.ApplyMask(out_file='t1_fov_masked.nii.gz'),
name='mask_t1')
nonlinear.connect([(inputnode, transform_fov, [('fov_mask', 'input_image'),
('t1_highres', 'reference_image'),
('epi2highres_lin_itk', 'transforms')]),
(transform_fov, dilate_fov, [('output_image', 'in_file')]),
(dilate_fov, mask_t1, [('binary_file', 'mask_file')]),
(inputnode, mask_t1, [('t1_highres', 'in_file')]),
])
# normalization with ants
antsreg = Node(interface = ants.registration.Registration(dimension = 3,
metric = ['CC'],
metric_weight = [1.0],
radius_or_number_of_bins = [4],
sampling_strategy = ['None'],
transforms = ['SyN'],
args = '-g 0.1x1x0.1',
transform_parameters = [(0.10,3,0)],
number_of_iterations = [[50,20,10]],
convergence_threshold = [1e-06],
convergence_window_size = [10],
shrink_factors = [[4,2,1]],
smoothing_sigmas = [[2,1,0]],
sigma_units = ['vox'],
use_estimate_learning_rate_once = [True],
use_histogram_matching = [True],
collapse_output_transforms=True,
output_inverse_warped_image = True,
output_warped_image = True,
interpolation = 'BSpline'),
name = 'antsreg')
antsreg.plugin_args={'submit_specs': 'request_memory = 20000'}
nonlinear.connect([(mask_epi, antsreg, [('out_file', 'moving_image')]),
(mask_t1, antsreg, [('out_file', 'fixed_image')]),
(antsreg, outputnode, [('reverse_transforms', 'epi2highres_invwarp'),
('forward_transforms', 'epi2highres_warp'),
('warped_image', 'epi2highres_nonlin')])
#.........这里部分代码省略.........
示例8: Node
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import plugin_args [as 别名]
# make filelist
translist = Node(util.Merge(2),
name='translist')
mni.connect([(selectfiles, translist, [('affine', 'in2'),
('warp', 'in1')])])
# apply all transforms
applytransform = Node(ants.ApplyTransforms(input_image_type = 3,
#output_image='rest_preprocessed2mni.nii.gz',
interpolation = 'BSpline',
invert_transform_flags=[False, False]),
name='applytransform')
applytransform.inputs.reference_image=template
applytransform.plugin_args={'submit_specs': 'request_memory = 30000'}
mni.connect([(selectfiles, applytransform, [('rest', 'input_image')]),
(translist, applytransform, [('out', 'transforms')])
])
# tune down image to float
changedt = Node(fsl.ChangeDataType(output_datatype='float',
out_file='rest_preprocessed2mni.nii.gz'),
name='changedt')
changedt.plugin_args={'submit_specs': 'request_memory = 30000'}
mni.connect([(applytransform, changedt, [('output_image', 'in_file')])])
# make base directory
def makebase(subject_id, out_dir):
return out_dir%subject_id
示例9: Node
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import plugin_args [as 别名]
report = Node(Function(input_names=['subject_id',
'tsnr_file',
'realignment_parameters_file',
'mean_epi_file',
'mask_file',
'reg_file',
'fssubjects_dir',
'similarity_distribution',
'mean_FD_distribution',
'tsnr_distributions',
'output_file'],
output_names=['out'],
function = create_report), name="report_%s"%(subject_id).replace(".", "_"))
report.inputs.subject_id = subject_id
report.inputs.tsnr_file = tsnr_file
report.inputs.realignment_parameters_file = realignment_parameters_file
report.inputs.mean_epi_file = mean_epi_file
report.inputs.mask_file = mask_file
report.inputs.reg_file = reg_file
report.inputs.fssubjects_dir = fssubjects_dir
report.inputs.similarity_distribution = similarity_distribution
report.inputs.mean_FD_distribution = mean_FD_distribution
report.inputs.tsnr_distributions = tsnr_distributions
report.inputs.output_file = output_file
report.plugin_args={'override_specs': 'request_memory = 4000'}
wf.add_nodes([report])
wf.run(plugin="CondorDAGMan")
示例10: create_structural
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import plugin_args [as 别名]
def create_structural(subject, working_dir, data_dir, freesurfer_dir, out_dir):
'''
Workflow to run brackground masking and then freesurfer recon-all
on "lowres" MP2RAGE data
'''
# main workflow
struct_preproc = Workflow(name='mp2rage_preproc')
struct_preproc.base_dir = working_dir
struct_preproc.config['execution']['crashdump_dir'] = struct_preproc.base_dir + "/crash_files"
# select files
templates={'inv2': 'raw/mp2rage/inv2.nii.gz',
't1map': 'raw/mp2rage/t1map.nii.gz',
'uni': 'raw/mp2rage/uni.nii.gz'}
selectfiles = Node(nio.SelectFiles(templates,
base_directory=data_dir),
name="selectfiles")
# mp2rage background masking
background = Node(JistIntensityMp2rageMasking(outMasked=True,
outMasked2=True,
outSignal2=True),
name='background')
# workflow to run freesurfer reconall
# function to replace / in subject id string with a _
def sub_id(sub_id):
return sub_id.replace('/','_')
recon_all = Node(fs.ReconAll(args='-nuiterations 7 -no-isrunning'),
name="recon_all")
recon_all.plugin_args={'submit_specs': 'request_memory = 9000'}
recon_all.inputs.subjects_dir=freesurfer_dir
recon_all.inputs.subject_id=sub_id(subject)
#sink to store files
sink = Node(nio.DataSink(base_directory=out_dir,
parameterization=False,
substitutions=[('outStripped', 'uni_stripped'),
('outMasked2', 'uni_masked'),
('outSignal2', 'background_mask'),
('outOriginal', 'uni_reoriented'),
('outMask', 'skullstrip_mask'),
('transform_Warped', 'T1_brain2std')]),
name='sink')
# connections
struct_preproc.connect([(selectfiles, background, [('inv2', 'inSecond'),
('t1map', 'inQuantitative'),
('uni', 'inT1weighted')]),
(background, recon_all, [('outMasked2','T1files')]),
(background, sink, [('outMasked2','[email protected]_masked'),
('outSignal2','[email protected]_mask')]),
])
#struct_preproc.write_graph(dotfilename='struct_preproc.dot', graph2use='colored', format='pdf', simple_form=True)
return struct_preproc
示例11: create_converter_functional_pipeline
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import plugin_args [as 别名]
def create_converter_functional_pipeline(working_dir, ds_dir, name="converter_funct"):
# initiate workflow
converter_wf = Workflow(name=name)
converter_wf.base_dir = os.path.join(working_dir, "LeiCA_resting")
# set fsl output
fsl.FSLCommand.set_default_output_type("NIFTI_GZ")
# I/O NODE
inputnode = Node(util.IdentityInterface(fields=["epi_dicom", "out_format"]), name="inputnode")
outputnode = Node(util.IdentityInterface(fields=["epi", "TR_ms"]), name="outputnode")
niftisink = Node(nio.DataSink(), name="niftisink")
niftisink.inputs.base_directory = os.path.join(ds_dir, "raw_niftis")
niftisink.inputs.substitutions = [("_TR_id_", "TR_")]
# convert to nifti
# todo check if geometry bugs attac. use dcm2nii?
converter_epi = Node(DcmStack(embed_meta=True), name="converter_epi")
converter_epi.plugin_args = {"submit_specs": "request_memory = 2000"}
def reformat_filename_fct(TR_str):
return "rsfMRI_" + TR_str
reformat_filename = Node(
util.Function(input_names=["TR_str"], output_names=["filename"], function=reformat_filename_fct),
name="reformat_filename",
)
converter_wf.connect(inputnode, "out_format", reformat_filename, "TR_str")
converter_wf.connect(inputnode, "epi_dicom", converter_epi, "dicom_files")
converter_wf.connect(reformat_filename, "filename", converter_epi, "out_format")
# reorient to standard orientation
reor_2_std = Node(fsl.Reorient2Std(), name="reor_2_std")
converter_wf.connect(converter_epi, "out_file", reor_2_std, "in_file")
converter_wf.connect(reor_2_std, "out_file", outputnode, "epi")
# save original niftis
converter_wf.connect(reor_2_std, "out_file", niftisink, "rsfMRI")
# GET TR FROM .nii
def check_TR_fct(TR):
print " "
print "check_TR_fct checks validity of TR"
print ("imported TR is %s" % TR)
print " "
try:
float(TR)
except ValueError:
isvalid_TR = 0
raise Exception("ERROR: TR COULD NOT AUTOMATICALLY BE EXTRACTED FROM EPI.\nEXECUTION STOPPED")
else:
isvalid_TR = 1
print "TR is valid"
if isvalid_TR:
if float(TR <= 0):
raise Exception("ERROR: TR NOT VALID (<=0).\nEXECUTION STOPPED")
return float(TR)
get_TR = Node(ImageInfo(), name="get_TR")
converter_wf.connect(reor_2_std, "out_file", get_TR, "in_file")
check_TR = Node(util.Function(input_names=["TR"], output_names=["TR_ms"], function=check_TR_fct), name="check_TR")
converter_wf.connect(get_TR, "TR", check_TR, "TR")
converter_wf.connect(check_TR, "TR_ms", outputnode, "TR_ms")
converter_wf.write_graph(dotfilename=converter_wf.name, graph2use="flat", format="pdf")
return converter_wf
示例12: create_sca_pipeline
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import plugin_args [as 别名]
def create_sca_pipeline(working_dir, rois_list, ds_dir, name='sca'):
afni.base.AFNICommand.set_default_output_type('NIFTI_GZ')
fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
sca_wf = Workflow(name=name)
sca_wf.base_dir = os.path.join(working_dir, 'LeiCA_resting')
# inputnode
inputnode = Node(util.IdentityInterface(fields=['rs_preprocessed',
'epi_2_MNI_warp']),
name='inputnode')
# outputnode
outputnode = Node(util.IdentityInterface(fields=['functional_mask',
'seed_based_z']),
name='outputnode')
ds = Node(nio.DataSink(base_directory=ds_dir), name='ds')
ds.inputs.substitutions = [('_TR_id_', 'TR_')]
epi_MNIspace = Node(fsl.ApplyWarp(), name='epi_MNIspace')
epi_MNIspace.inputs.ref_file = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz')
epi_MNIspace.plugin_args = {'submit_specs': 'request_memory = 4000'}
sca_wf.connect(inputnode, 'rs_preprocessed', epi_MNIspace, 'in_file')
sca_wf.connect(inputnode, 'epi_2_MNI_warp' , epi_MNIspace, 'field_file')
epi_mask = Node(interface=afni.Automask(), name='epi_mask')
sca_wf.connect(epi_MNIspace, 'out_file', epi_mask, 'in_file')
sca_wf.connect(epi_mask, 'out_file', outputnode, 'functional_mask')
roi_infosource = Node(util.IdentityInterface(fields=['roi']), name='roi_infosource')
roi_infosource.iterables = ('roi', rois_list)
point = Node(afni.Calc(), name='point')
point.inputs.in_file_a = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz')
point.inputs.outputtype = 'NIFTI_GZ'
point.inputs.out_file = 'roi_point.nii.gz'
def roi2exp(coord):
return 'step(4-(x%+d)*(x%+d)-(y%+d)*(y%+d)-(z%+d)*(z%+d))'%(coord[0], coord[0], coord[1], coord[1], -coord[2], -coord[2])
sca_wf.connect(roi_infosource, ('roi', roi2exp), point, 'expr')
def format_filename(roi_str):
import string
valid_chars = '-_.%s%s' % (string.ascii_letters, string.digits)
return 'roi_'+''.join(c for c in str(roi_str).replace(',','_') if c in valid_chars)+'_roi.nii.gz'
sphere = Node(fsl.ImageMaths(), name='sphere')
sphere.inputs.out_data_type = 'float'
sphere.inputs.op_string = '-kernel sphere 8 -fmean -bin'
sca_wf.connect(point, 'out_file', sphere, 'in_file')
sca_wf.connect(roi_infosource, ('roi', format_filename), sphere, 'out_file')
#fixme
# smoothing = Node(fsl.maths.IsotropicSmooth(), name='smoothing')
# smoothing.iterables = ('fwhm', [1, 6])
# sca_wf.connect(epi_MNIspace, 'out_file', smoothing, 'in_file')
extract_timeseries = Node(afni.Maskave(), name='extract_timeseries')
extract_timeseries.inputs.quiet = True
sca_wf.connect(sphere, 'out_file', extract_timeseries, 'mask')
#fixme
sca_wf.connect(epi_MNIspace, 'out_file', extract_timeseries, 'in_file')
#sca_wf.connect(smoothing, 'out_file', extract_timeseries, 'in_file')
correlation_map = Node(afni.Fim(), name='correlation_map')
correlation_map.inputs.out = 'Correlation'
correlation_map.inputs.outputtype = 'NIFTI_GZ'
correlation_map.inputs.out_file = 'corr_map.nii.gz'
sca_wf.connect(extract_timeseries, 'out_file', correlation_map, 'ideal_file')
sca_wf.connect(epi_MNIspace, 'out_file', correlation_map, 'in_file')
z_trans = Node(interface=afni.Calc(), name='z_trans')
z_trans.inputs.expr = 'log((1+a)/(1-a))/2'
z_trans.inputs.outputtype = 'NIFTI_GZ'
sca_wf.connect(correlation_map, 'out_file', z_trans, 'in_file_a')
sca_wf.connect(z_trans, 'out_file', outputnode, 'seed_based_z')
sca_wf.connect(z_trans, 'out_file', ds, 'sca.seed_based_z')
# # plot rs corr on surf
# plot_rs = Node(interface=util.Function(input_names=['in_file', 'thr_list','roi_coords'],
# output_names=['out_file_list'],
# function=plot_rs_surf),
# name='plot_rs')
# plot_rs.inputs.thr_list = [(.2,1)]
# sca_wf.connect(correlation_map, 'out_file', plot_rs, 'in_file')
# sca_wf.connect(roi_infosource, 'roi', plot_rs, 'roi_coords')
sca_wf.write_graph(dotfilename='sca', graph2use='flat', format='pdf')
return sca_wf
示例13: Node
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import plugin_args [as 别名]
transformlist = Node(interface=Function(input_names=['string1', 'string2'],
output_names=['transformlist'],
function=makelist),
name='transformlist')
apply_ts.connect([(inputnode, transformlist, [('nonlin_epi2anat_warp', 'string2'),
('nonlin_anat2epi_itk', 'string1')
])
])
nonlin_apply = Node(ants.ApplyTransforms(input_image_type=3,
output_image='nonlin_ts.nii.gz',
invert_transform_flags=[True,False],
interpolation = 'BSpline'),
'nonlin_apply')
nonlin_apply.plugin_args={'initial_specs': 'request_memory = 22000'}
apply_ts.connect([(inputnode, nonlin_apply, [('moco_ts', 'input_image'),
#('anat_head', 'reference_image')
]),
(resamp_anat, nonlin_apply, [('out_file', 'reference_image')]),
(transformlist, nonlin_apply, [('transformlist', 'transforms')]),
(nonlin_apply, outputnode, [('output_image', 'nonlin_ts')])
])
# set up workflow, in- and output
apply_ts.base_dir='/scr/kansas1/huntenburg/'
data_dir='/scr/jessica2/Schaare/LEMON/'
#out_dir = '/scr/kansas1/huntenburg/timeseries/'
#applywarp_linear.config['execution']={'remove_unnecessary_outputs': 'False'}
apply_ts.config['execution']['crashdump_dir'] = apply_ts.base_dir + "/crash_files"
示例14: makelist
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import plugin_args [as 别名]
name="selectfiles")
# make filelist
def makelist(in1, in2, in3, in4):
return [in1, in2, in3, in4]
make_list = Node(util.Function(input_names=['in1', 'in2', 'in3', 'in4'],
output_names=['file_list'],
function=makelist),
name='make_list')
# concatenate scans
concatenate=Node(fsl.Merge(dimension='t',
merged_file='rest_concatenated.nii.gz'),
name='concatenate')
concatenate.plugin_args={'submit_specs': 'request_memory = 20000'}
# sink
sink = Node(nio.DataSink(base_directory=out_dir,
parameterization=False),
name='sink')
concat.connect([(selectfiles, make_list, [('rest1a', 'in1'),
('rest1b', 'in2'),
('rest2a', 'in3'),
('rest2b', 'in4')]),
(make_list, concatenate, [('file_list', 'in_files')]),
(concatenate, sink, [('merged_file', '@rest_concat')])
])
concat.run()
示例15: calc_local_metrics
# 需要导入模块: from nipype.pipeline.engine import Node [as 别名]
# 或者: from nipype.pipeline.engine.Node import plugin_args [as 别名]
#.........这里部分代码省略.........
# creat MNI 2 epi warp
MNI_2_epi_warp = Node(fsl.InvWarp(), name='MNI_2_epi_warp')
MNI_2_epi_warp.inputs.reference = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz')
wf.connect(selectfiles, 'epi_mask', MNI_2_epi_warp, 'reference')
wf.connect(selectfiles, 'epi_2_MNI_warp', MNI_2_epi_warp, 'warp')
# # CREATE GM MASK IN EPI SPACE
# GM_mask_epiSpace = Node(fsl.ApplyWarp(), name='GM_mask_epiSpace')
# GM_mask_epiSpace.inputs.out_file = 'GM_mask_epiSpace.nii.gz'
#
# wf.connect(selectfiles_anat_templates, 'GM_mask_MNI_2mm', GM_mask_epiSpace, 'in_file')
# wf.connect(selectfiles, 'epi_mask', GM_mask_epiSpace, 'ref_file')
# wf.connect(MNI_2_epi_warp, 'inverse_warp', GM_mask_epiSpace, 'field_file')
# wf.connect(GM_mask_epiSpace, 'out_file', ds, 'GM_mask_epiSpace')
# fixme
# # CREATE TS IN MNI SPACE
# # is it ok to apply the 2mm warpfield to the 3mm template?
# # seems ok: https://www.jiscmail.ac.uk/cgi-bin/webadmin?A2=ind0904&L=FSL&P=R14011&1=FSL&9=A&J=on&d=No+Match%3BMatch%3BMatches&z=4
# epi_bp_MNIspace_3mm = Node(fsl.ApplyWarp(), name='epi_bp_MNIspace_3mm')
# epi_bp_MNIspace_3mm.inputs.interp = 'spline'
# epi_bp_MNIspace_3mm.plugin_args = {'submit_specs': 'request_memory = 4000'}
# wf.connect(selectfiles_anat_templates, 'FSL_MNI_3mm_template', epi_bp_MNIspace_3mm, 'ref_file')
# wf.connect(selectfiles, 'preproc_epi_bp', epi_bp_MNIspace_3mm, 'in_file')
# wf.connect(selectfiles, 'epi_2_MNI_warp', epi_bp_MNIspace_3mm, 'field_file')
# CREATE EPI MASK IN MNI SPACE
epi_mask_MNIspace_3mm = Node(fsl.ApplyWarp(), name='epi_mask_MNIspace_3mm')
epi_mask_MNIspace_3mm.inputs.interp = 'nn'
epi_mask_MNIspace_3mm.plugin_args = {'submit_specs': 'request_memory = 4000'}
wf.connect(selectfiles_anat_templates, 'FSL_MNI_3mm_template', epi_mask_MNIspace_3mm, 'ref_file')
wf.connect(selectfiles, 'epi_mask', epi_mask_MNIspace_3mm, 'in_file')
wf.connect(selectfiles, 'epi_2_MNI_warp', epi_mask_MNIspace_3mm, 'field_file')
wf.connect(epi_mask_MNIspace_3mm, 'out_file', ds, 'epi_mask_MNIspace_3mm')
#####################
# CALCULATE METRICS
#####################
# f/ALFF
alff = cpac_alff.create_alff('alff')
alff.inputs.hp_input.hp = 0.01
alff.inputs.lp_input.lp = 0.1
wf.connect(selectfiles, 'preproc_epi_full_spectrum', alff, 'inputspec.rest_res')
# wf.connect(GM_mask_epiSpace, 'out_file', alff, 'inputspec.rest_mask')
wf.connect(selectfiles, 'epi_mask', alff, 'inputspec.rest_mask')
wf.connect(alff, 'outputspec.alff_img', ds, 'alff.alff')
wf.connect(alff, 'outputspec.falff_img', ds, 'alff.falff')
# f/ALFF 2 MNI
# fixme spline or default?
alff_MNIspace_3mm = Node(fsl.ApplyWarp(), name='alff_MNIspace_3mm')
alff_MNIspace_3mm.inputs.interp = 'spline'
alff_MNIspace_3mm.plugin_args = {'submit_specs': 'request_memory = 4000'}
wf.connect(selectfiles_anat_templates, 'FSL_MNI_3mm_template', alff_MNIspace_3mm, 'ref_file')
wf.connect(alff, 'outputspec.alff_img', alff_MNIspace_3mm, 'in_file')
wf.connect(selectfiles, 'epi_2_MNI_warp', alff_MNIspace_3mm, 'field_file')
wf.connect(alff_MNIspace_3mm, 'out_file', ds, 'alff.alff_MNI_3mm')