当前位置: 首页>>代码示例>>Python>>正文


Python nipype.Workflow类代码示例

本文整理汇总了Python中nipype.Workflow的典型用法代码示例。如果您正苦于以下问题:Python Workflow类的具体用法?Python Workflow怎么用?Python Workflow使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了Workflow类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: create_confound_extraction_workflow

def create_confound_extraction_workflow(name="confounds", wm_components=6):
    """Extract nuisance variables from anatomical sources."""
    inputnode = Node(IdentityInterface(["timeseries", "brain_mask", "reg_file", "subject_id"]), "inputs")

    # Find the subject's Freesurfer segmentation
    # Grab the Freesurfer aparc+aseg file as an anatomical brain mask
    getaseg = Node(
        io.SelectFiles({"aseg": "{subject_id}/mri/aseg.mgz"}, base_directory=os.environ["SUBJECTS_DIR"]), "getaseg"
    )

    # Select and erode the white matter to get deep voxels
    selectwm = Node(fs.Binarize(erode=3, wm=True), "selectwm")

    # Transform the mask into functional space
    transform = MapNode(fs.ApplyVolTransform(inverse=True, interp="nearest"), ["reg_file", "source_file"], "transform")

    # Extract eigenvariates of the timeseries from WM and whole brain
    extract = MapNode(ExtractConfounds(n_components=wm_components), ["timeseries", "brain_mask", "wm_mask"], "extract")

    outputnode = Node(IdentityInterface(["confound_file"]), "outputs")

    confounds = Workflow(name)
    confounds.connect(
        [
            (inputnode, getaseg, [("subject_id", "subject_id")]),
            (getaseg, selectwm, [("aseg", "in_file")]),
            (selectwm, transform, [("binary_file", "target_file")]),
            (inputnode, transform, [("reg_file", "reg_file"), ("timeseries", "source_file")]),
            (transform, extract, [("transformed_file", "wm_mask")]),
            (inputnode, extract, [("timeseries", "timeseries"), ("brain_mask", "brain_mask")]),
            (extract, outputnode, [("out_file", "confound_file")]),
        ]
    )

    return confounds
开发者ID:kellyhennigan,项目名称:lyman,代码行数:35,代码来源:preproc.py

示例2: create_reg_workflow

def create_reg_workflow(name="reg", space="mni", regtype="model"):
    """Flexibly register files into one of several common spaces."""
    if regtype == "model":
        fields = ["copes", "varcopes", "ss_files"]
    elif regtype == "timeseries":
        fields = ["timeseries"]
    fields.extend(["masks", "affines"])

    if space == "mni":
        fields.append("warpfield")

    inputnode = Node(IdentityInterface(fields), "inputnode")

    func = globals()["%s_%s_transform" % (space, regtype)]

    transform = Node(Function(fields, ["out_files"],
                              func, imports),
                     "transform")

    regflow = Workflow(name=name)

    outputnode = Node(IdentityInterface(["out_files"]), "outputnode")
    for field in fields:
        regflow.connect(inputnode, field, transform, field)
    regflow.connect(transform, "out_files", outputnode, "out_files")

    return regflow, inputnode, outputnode
开发者ID:toddt,项目名称:lyman,代码行数:27,代码来源:registration.py

示例3: create_bbregister_workflow

def create_bbregister_workflow(name="bbregister",
                               contrast_type="t2",
                               partial_brain=False):
    """Find a linear transformation to align the EPI file with the anatomy."""
    in_fields = ["subject_id", "source_file"]
    if partial_brain:
        in_fields.append("whole_brain_template")
    inputnode = Node(IdentityInterface(in_fields), "inputs")

    # Estimate the registration to Freesurfer conformed space
    func2anat = MapNode(fs.BBRegister(contrast_type=contrast_type,
                                      init="fsl",
                                      epi_mask=True,
                                      registered_file=True,
                                      out_reg_file="func2anat_tkreg.dat",
                                      out_fsl_file="func2anat_flirt.mat"),
                        "source_file",
                        "func2anat")

    # Make an image for quality control on the registration
    report = MapNode(Function(["subject_id", "in_file"],
                              ["out_file"],
                              write_coreg_plot,
                              imports),
                           "in_file",
                           "coreg_report")

    # Define the workflow outputs
    outputnode = Node(IdentityInterface(["tkreg_mat", "flirt_mat", "report"]),
                      "outputs")

    bbregister = Workflow(name=name)

    # Connect the registration
    bbregister.connect([
        (inputnode, func2anat,
            [("subject_id", "subject_id"),
             ("source_file", "source_file")]),
        (inputnode, report,
            [("subject_id", "subject_id")]),
        (func2anat, report,
            [("registered_file", "in_file")]),
        (func2anat, outputnode,
            [("out_reg_file", "tkreg_mat")]),
        (func2anat, outputnode,
            [("out_fsl_file", "flirt_mat")]),
        (report, outputnode,
            [("out_file", "report")]),
        ])

    # Possibly connect the full_fov image
    if partial_brain:
        bbregister.connect([
            (inputnode, func2anat,
                [("whole_brain_template", "intermediate_file")]),
                ])

    return bbregister
开发者ID:toddt,项目名称:lyman,代码行数:58,代码来源:preproc.py

示例4: create_surface_projection_workflow

def create_surface_projection_workflow(name="surfproj", exp_info=None):
    """Project the group mask and thresholded zstat file onto the surface."""
    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    inputnode = Node(IdentityInterface(["zstat_file", "mask_file"]), "inputs")

    # Sample the zstat image to the surface
    hemisource = Node(IdentityInterface(["mni_hemi"]), "hemisource")
    hemisource.iterables = ("mni_hemi", ["lh", "rh"])

    zstatproj = Node(freesurfer.SampleToSurface(
        sampling_method=exp_info["sampling_method"],
        sampling_range=exp_info["sampling_range"],
        sampling_units=exp_info["sampling_units"],
        smooth_surf=exp_info["surf_smooth"],
        subject_id="fsaverage",
        mni152reg=True,
        target_subject="fsaverage"),
        "zstatproj")

    # Sample the mask to the surface
    maskproj = Node(freesurfer.SampleToSurface(
        sampling_range=exp_info["sampling_range"],
        sampling_units=exp_info["sampling_units"],
        subject_id="fsaverage",
        mni152reg=True,
        target_subject="fsaverage"),
        "maskproj")
    if exp_info["sampling_method"] == "point":
        maskproj.inputs.sampling_method = "point"
    else:
        maskproj.inputs.sampling_method = "max"

    outputnode = Node(IdentityInterface(["surf_zstat",
                                         "surf_mask"]), "outputs")

    # Define and connect the workflow
    proj = Workflow(name)
    proj.connect([
        (inputnode, zstatproj,
            [("zstat_file", "source_file")]),
        (inputnode, maskproj,
            [("mask_file", "source_file")]),
        (hemisource, zstatproj,
            [("mni_hemi", "hemi")]),
        (hemisource, maskproj,
            [("mni_hemi", "hemi")]),
        (zstatproj, outputnode,
            [("out_file", "surf_zstat")]),
        (maskproj, outputnode,
            [("out_file", "surf_mask")]),
        ])

    return proj
开发者ID:boydmeredith,项目名称:lyman,代码行数:55,代码来源:mixedfx.py

示例5: test_serial_input

def test_serial_input(tmpdir):
    tmpdir.chdir()
    wd = os.getcwd()
    from nipype import MapNode, Function, Workflow

    def func1(in1):
        return in1
    n1 = MapNode(Function(input_names=['in1'],
                          output_names=['out'],
                          function=func1),
                 iterfield=['in1'],
                 name='n1')
    n1.inputs.in1 = [1, 2, 3]

    w1 = Workflow(name='test')
    w1.base_dir = wd
    w1.add_nodes([n1])
    # set local check
    w1.config['execution'] = {'stop_on_first_crash': 'true',
                              'local_hash_check': 'true',
                              'crashdump_dir': wd,
                              'poll_sleep_duration': 2}

    # test output of num_subnodes method when serial is default (False)
    assert n1.num_subnodes() == len(n1.inputs.in1)

    # test running the workflow on default conditions
    w1.run(plugin='MultiProc')

    # test output of num_subnodes method when serial is True
    n1._serial = True
    assert n1.num_subnodes() == 1

    # test running the workflow on serial conditions
    w1.run(plugin='MultiProc')
开发者ID:mick-d,项目名称:nipype,代码行数:35,代码来源:test_engine.py

示例6: create_bbregister_workflow

def create_bbregister_workflow(name="bbregister", contrast_type="t2", partial_brain=False, init_with="fsl"):
    """Find a linear transformation to align the EPI file with the anatomy."""
    in_fields = ["subject_id", "timeseries"]
    if partial_brain:
        in_fields.append("whole_brain_template")
    inputnode = Node(IdentityInterface(in_fields), "inputs")

    # Take the mean over time to get a target volume
    meanvol = MapNode(fsl.MeanImage(), "in_file", "meanvol")

    # Do a rough skullstrip using BET
    skullstrip = MapNode(fsl.BET(), "in_file", "bet")

    # Estimate the registration to Freesurfer conformed space
    func2anat = MapNode(
        fs.BBRegister(
            contrast_type=contrast_type,
            init=init_with,
            epi_mask=True,
            registered_file=True,
            out_reg_file="func2anat_tkreg.dat",
            out_fsl_file="func2anat_flirt.mat",
        ),
        "source_file",
        "func2anat",
    )

    # Make an image for quality control on the registration
    report = MapNode(CoregReport(), "in_file", "coreg_report")

    # Define the workflow outputs
    outputnode = Node(IdentityInterface(["tkreg_mat", "flirt_mat", "report"]), "outputs")

    bbregister = Workflow(name=name)

    # Connect the registration
    bbregister.connect(
        [
            (inputnode, func2anat, [("subject_id", "subject_id")]),
            (inputnode, report, [("subject_id", "subject_id")]),
            (inputnode, meanvol, [("timeseries", "in_file")]),
            (meanvol, skullstrip, [("out_file", "in_file")]),
            (skullstrip, func2anat, [("out_file", "source_file")]),
            (func2anat, report, [("registered_file", "in_file")]),
            (func2anat, outputnode, [("out_reg_file", "tkreg_mat")]),
            (func2anat, outputnode, [("out_fsl_file", "flirt_mat")]),
            (report, outputnode, [("out_file", "report")]),
        ]
    )

    # Possibly connect the full_fov image
    if partial_brain:
        bbregister.connect([(inputnode, func2anat, [("whole_brain_template", "intermediate_file")])])

    return bbregister
开发者ID:kellyhennigan,项目名称:lyman,代码行数:55,代码来源:preproc.py

示例7: create_filtering_workflow

def create_filtering_workflow(name="filter",
                              hpf_cutoff=128,
                              TR=2,
                              output_name="timeseries"):
    """Scale and high-pass filter the timeseries."""
    inputnode = Node(IdentityInterface(["timeseries", "mask_file"]),
                     "inputs")

    # Grand-median scale within the brain mask
    scale = MapNode(ScaleTimeseries(statistic="median", target=10000),
                    ["in_file", "mask_file"],
                    "scale")

    # Gaussian running-line filter
    hpf_sigma = (hpf_cutoff / 2.0) / TR
    filter = MapNode(fsl.TemporalFilter(highpass_sigma=hpf_sigma),
                     "in_file",
                     "filter")

    # Possibly replace the mean
    # (In later versions of FSL, the highpass filter removes the
    # mean component. Put it back, but be flexible so this isn't
    # broken on older versions of FSL).
    replacemean = MapNode(ReplaceMean(output_name=output_name),
                          ["orig_file", "filtered_file"],
                          "replacemean")

    # Compute a final mean functional volume
    meanfunc = MapNode(fsl.MeanImage(out_file="mean_func.nii.gz"),
                       "in_file", "meanfunc")

    outputnode = Node(IdentityInterface(["timeseries",
                                         "mean_file"]), "outputs")

    filtering = Workflow(name)
    filtering.connect([
        (inputnode, scale,
            [("timeseries", "in_file"),
             ("mask_file", "mask_file")]),
        (scale, filter,
            [("out_file", "in_file")]),
        (scale, replacemean,
            [("out_file", "orig_file")]),
        (filter, replacemean,
            [("out_file", "filtered_file")]),
        (replacemean, meanfunc,
            [("out_file", "in_file")]),
        (replacemean, outputnode,
            [("out_file", "timeseries")]),
        (meanfunc, outputnode,
            [("out_file", "mean_file")]),
        ])

    return filtering
开发者ID:boydmeredith,项目名称:lyman,代码行数:54,代码来源:preproc.py

示例8: workflow_spec

def workflow_spec(name="{workflow_name}", exp_info=None):
    """Return a Nipype workflow for MR processing.

    Parameters
    ----------
    name : string
        workflow object name
    exp_info : dict
        dictionary with experimental information
    """
    workflow = Workflow(name)

    if exp_info is None:
        exp_info = fitz.default_experiment_parameters()

    # Define the inputs for the preprocessing workflow
    in_fields = [""]  # "timeseries"]

    inputnode = Node(IdentityInterface(in_fields), "inputs")

    """
    # Define Actual Nipype Nodes, Workflows, etc.
    # e.g. The start of an example SPM preproc workflow
    # --------------------------------------------------

    slicetiming = pe.Node(interface=spm.SliceTiming(), name="slicetiming")
    slicetiming.inputs.ref_slice = 1
    realign = pe.Node(interface=spm.Realign(), name="realign")
    realign.inputs.register_to_mean = True
    """
    workflow.connect([
        """
        (inputnode, slicetiming,
            [('timeseries', 'in_files')]),
        (slicetiming, realign,
            [('timecorrected_files', 'in_files')]),
        """
    ])

    output_fields = [""]  # realigned_files", "realignment_parameters"]

    outputnode = Node(IdentityInterface(output_fields), "outputs")

    workflow.connect([
        """
        (realign, outputnode,
            [("realigned_files", "realigned_files"),
             ("realignment_parameters", "realignment_parameters")]),
        """
    ])

    # Return the workflow itself and input and output nodes.
    return workflow, inputnode, outputnode
开发者ID:kastman,项目名称:fitz,代码行数:53,代码来源:workflow_template.py

示例9: create_reg_workflow

def create_reg_workflow(name="reg", space="mni",
                        regtype="model", method="fsl",
                        residual=False, cross_exp=False):
    """Flexibly register files into one of several common spaces."""

    # Define the input fields flexibly
    if regtype == "model":
        fields = ["copes", "varcopes", "sumsquares"]
    elif regtype == "timeseries":
        fields = ["timeseries"]

    if cross_exp:
        fields.extend(["first_rigid"])

    fields.extend(["means", "masks", "rigids"])

    if space == "mni":
        fields.extend(["affine", "warpfield"])
    else:
        fields.extend(["tkreg_rigid"])

    inputnode = Node(IdentityInterface(fields), "inputnode")

    # Grap the correct interface class dynamically
    interface_name = "{}{}Registration".format(space.upper(),
                                               regtype.capitalize())
    reg_interface = globals()[interface_name]
    transform = Node(reg_interface(method=method), "transform")

    # Sanity check on inputs
    if regtype == "model" and residual:
        raise ValueError("residual and regtype=model does not make sense")

    # Set the kind of timeseries
    if residual:
        transform.inputs.residual = True

    outputnode = Node(IdentityInterface(["out_files"]), "outputnode")

    # Define the workflow
    regflow = Workflow(name=name)

    # Connect the inputs programatically
    for field in fields:
        regflow.connect(inputnode, field, transform, field)

    # The transform node only ever has one output
    regflow.connect(transform, "out_files", outputnode, "out_files")

    return regflow, inputnode, outputnode
开发者ID:kellyhennigan,项目名称:lyman,代码行数:50,代码来源:registration.py

示例10: test_execute

    def test_execute(self, lyman_dir, execdir):

        info = frontend.info(lyman_dir=lyman_dir)

        def f(x):
            return x ** 2
        assert f(2) == 4

        n1 = Node(Function("x", "y", f), "n1")
        n2 = Node(Function("x", "y", f), "n2")

        wf = Workflow("test", base_dir=info.cache_dir)
        wf.connect(n1, "y", n2, "x")
        wf.inputs.n1.x = 2

        cache_dir = execdir.join("cache").join("test")

        class args(object):
            graph = False
            n_procs = 1
            debug = False
            clear_cache = True
            execute = True

        frontend.execute(wf, args, info)
        assert not cache_dir.exists()

        args.debug = True
        frontend.execute(wf, args, info)
        assert cache_dir.exists()

        args.debug = False
        info.remove_cache = False
        frontend.execute(wf, args, info)
        assert cache_dir.exists()

        args.execute = False
        res = frontend.execute(wf, args, info)
        assert res is None

        args.execute = True
        fname = str(execdir.join("graph").join("workflow.dot"))
        args.graph = fname
        res = frontend.execute(wf, args, info)
        assert res == fname[:-4] + ".svg"

        args.graph = True
        args.stage = "preproc"
        res = frontend.execute(wf, args, info)
        assert res == cache_dir.join("preproc.svg")
开发者ID:mwaskom,项目名称:lyman,代码行数:50,代码来源:test_frontend.py

示例11: create_unwarp_workflow

def create_unwarp_workflow(name="unwarp", fieldmap_pe=("y", "y-")):
    """Unwarp functional timeseries using reverse phase-blipped images."""
    inputnode = Node(IdentityInterface(["timeseries", "fieldmap"]), "inputs")

    # Calculate the shift field
    # Note that setting readout_times to 1 will give a fine
    # map of the field, but the units will be off
    # Since we don't write out the map of the field itself, it does
    # not seem worth it to add another parameter for the readout times.
    # (It does require that they are the same, but when wouldn't they be?)
    topup = MapNode(
        fsl.TOPUP(encoding_direction=fieldmap_pe, readout_times=[1] * len(fieldmap_pe)), ["in_file"], "topup"
    )

    # Unwarp the timeseries
    applytopup = MapNode(
        fsl.ApplyTOPUP(method="jac", in_index=[1]),
        ["in_files", "in_topup_fieldcoef", "in_topup_movpar", "encoding_file"],
        "applytopup",
    )

    # Make a figure summarize the unwarping
    report = MapNode(UnwarpReport(), ["orig_file", "corrected_file"], "unwarp_report")

    # Define the outputs
    outputnode = Node(IdentityInterface(["timeseries", "report"]), "outputs")

    # Define and connect the workflow
    unwarp = Workflow(name)
    unwarp.connect(
        [
            (inputnode, topup, [("fieldmap", "in_file")]),
            (inputnode, applytopup, [("timeseries", "in_files")]),
            (
                topup,
                applytopup,
                [
                    ("out_fieldcoef", "in_topup_fieldcoef"),
                    ("out_movpar", "in_topup_movpar"),
                    ("out_enc_file", "encoding_file"),
                ],
            ),
            (inputnode, report, [("fieldmap", "orig_file")]),
            (topup, report, [("out_corrected", "corrected_file")]),
            (applytopup, outputnode, [("out_corrected", "timeseries")]),
            (report, outputnode, [("out_file", "report")]),
        ]
    )

    return unwarp
开发者ID:kellyhennigan,项目名称:lyman,代码行数:50,代码来源:preproc.py

示例12: make_simple_workflow

def make_simple_workflow():

    wf = Workflow(name="test")

    node1 = Node(IdentityInterface(fields=["foo"]), name="node1")
    node2 = MapNode(IdentityInterface(fields=["foo"]),
                    name="node2", iterfield=["foo"])
    node3 = Node(IdentityInterface(fields=["foo"]), name="node3")

    wf.connect([
        (node1, node2, [("foo", "foo")]),
        (node2, node3, [("foo", "foo")]),
        ])

    return wf, node1, node2, node3
开发者ID:boydmeredith,项目名称:lyman,代码行数:15,代码来源:test_graphutils.py

示例13: test_serial_input

def test_serial_input():
    cwd = os.getcwd()
    wd = mkdtemp()
    os.chdir(wd)
    from nipype import MapNode, Function, Workflow

    def func1(in1):
        return in1
    n1 = MapNode(Function(input_names=['in1'],
                          output_names=['out'],
                          function=func1),
                 iterfield=['in1'],
                 name='n1')
    n1.inputs.in1 = [1, 2, 3]

    w1 = Workflow(name='test')
    w1.base_dir = wd
    w1.add_nodes([n1])
    # set local check
    w1.config['execution'] = {'stop_on_first_crash': 'true',
                              'local_hash_check': 'true',
                              'crashdump_dir': wd,
                              'poll_sleep_duration': 2}

    # test output of num_subnodes method when serial is default (False)
    yield assert_equal, n1.num_subnodes(), len(n1.inputs.in1)

    # test running the workflow on default conditions
    error_raised = False
    try:
        w1.run(plugin='MultiProc')
    except Exception as e:
        from nipype.pipeline.engine.base import logger
        logger.info('Exception: %s' % str(e))
        error_raised = True
    yield assert_false, error_raised

    # test output of num_subnodes method when serial is True
    n1._serial = True
    yield assert_equal, n1.num_subnodes(), 1

    # test running the workflow on serial conditions
    error_raised = False
    try:
        w1.run(plugin='MultiProc')
    except Exception as e:
        from nipype.pipeline.engine.base import logger
        logger.info('Exception: %s' % str(e))
        error_raised = True
    yield assert_false, error_raised

    os.chdir(cwd)
    rmtree(wd)
开发者ID:jvarada,项目名称:nipype,代码行数:53,代码来源:test_engine.py

示例14: create_realignment_workflow

def create_realignment_workflow(name="realignment", temporal_interp=True, TR=2, slice_order="up", interleaved=True):
    """Motion and slice-time correct the timeseries and summarize."""
    inputnode = Node(IdentityInterface(["timeseries"]), "inputs")

    # Get the middle volume of each run for motion correction
    extractref = MapNode(ExtractRealignmentTarget(), "in_file", "extractref")

    # Motion correct to middle volume of each run
    mcflirt = MapNode(
        fsl.MCFLIRT(cost="normcorr", interpolation="spline", save_mats=True, save_rms=True, save_plots=True),
        ["in_file", "ref_file"],
        "mcflirt",
    )

    # Optionally emoporally interpolate to correct for slice time differences
    if temporal_interp:
        slicetime = MapNode(fsl.SliceTimer(time_repetition=TR), "in_file", "slicetime")

        if slice_order == "down":
            slicetime.inputs.index_dir = True
        elif slice_order != "up":
            raise ValueError("slice_order must be 'up' or 'down'")

        if interleaved:
            slicetime.inputs.interleaved = True

    # Generate a report on the motion correction
    mcreport = MapNode(RealignmentReport(), ["target_file", "realign_params", "displace_params"], "mcreport")

    # Define the outputs
    outputnode = Node(IdentityInterface(["timeseries", "example_func", "report", "motion_file"]), "outputs")

    # Define and connect the sub workflow
    realignment = Workflow(name)

    realignment.connect(
        [
            (inputnode, extractref, [("timeseries", "in_file")]),
            (inputnode, mcflirt, [("timeseries", "in_file")]),
            (extractref, mcflirt, [("out_file", "ref_file")]),
            (extractref, mcreport, [("out_file", "target_file")]),
            (mcflirt, mcreport, [("par_file", "realign_params"), ("rms_files", "displace_params")]),
            (extractref, outputnode, [("out_file", "example_func")]),
            (mcreport, outputnode, [("realign_report", "report"), ("motion_file", "motion_file")]),
        ]
    )

    if temporal_interp:
        realignment.connect(
            [
                (mcflirt, slicetime, [("out_file", "in_file")]),
                (slicetime, outputnode, [("slice_time_corrected_file", "timeseries")]),
            ]
        )
    else:
        realignment.connect([(mcflirt, outputnode, [("out_file", "timeseries")])])

    return realignment
开发者ID:kellyhennigan,项目名称:lyman,代码行数:58,代码来源:preproc.py

示例15: create_filtering_workflow

def create_filtering_workflow(name="filter",
                              hpf_cutoff=128,
                              TR=2,
                              output_name="timeseries"):
    """Scale and high-pass filter the timeseries."""
    inputnode = Node(IdentityInterface(["timeseries", "mask_file"]),
                     "inputs")

    # Grand-median scale within the brain mask
    scale = MapNode(Function(["in_file",
                              "mask_file"],
                             ["out_file"],
                             scale_timeseries,
                             imports),
                    ["in_file", "mask_file"],
                    "scale")

    # Gaussian running-line filter
    hpf_sigma = (hpf_cutoff / 2.0) / TR
    filter = MapNode(fsl.TemporalFilter(highpass_sigma=hpf_sigma,
                                        out_file=output_name + ".nii.gz"),
                     "in_file",
                     "filter")

    outputnode = Node(IdentityInterface(["timeseries"]), "outputs")

    filtering = Workflow(name)
    filtering.connect([
        (inputnode, scale,
            [("timeseries", "in_file"),
             ("mask_file", "mask_file")]),
        (scale, filter,
            [("out_file", "in_file")]),
        (filter, outputnode,
            [("out_file", "timeseries")]),
        ])

    return filtering
开发者ID:toddt,项目名称:lyman,代码行数:38,代码来源:preproc.py


注:本文中的nipype.Workflow类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。