当前位置: 首页>>代码示例>>Python>>正文


Python vtks.read_vtk函数代码示例

本文整理汇总了Python中mindboggle.mio.vtks.read_vtk函数的典型用法代码示例。如果您正苦于以下问题:Python read_vtk函数的具体用法?Python read_vtk怎么用?Python read_vtk使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了read_vtk函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: main

def main():
    zernike_fn = zernike

    parser = argparse.ArgumentParser()
    parser.add_argument('--debug', nargs='?', default=None, const='debug', choices=['debug', 'info', 'warning', 'error', 'critical']) 
    parser.add_argument('vtk_file', nargs='?', default=None)
    parser.add_argument('-o', '--order', type=int, default=3)
    parser.add_argument('-p', '--profile', nargs='?', default=None, const='stdout')
    parser.add_argument('-t', '--timecall', default=False, action='store_true')
    parser.add_argument('-v', '--validate', default=False, action='store_true')
    ns = parser.parse_args()

    if ns.debug is not None:
        logging.basicConfig(level=getattr(logging, ns.debug.upper()))

#    if ns.profile is not None:
#        filename = ns.profile
#        if ns.profile == 'stdout':
#            filename = None
#        zernike_fn = profilehooks.profile(zernike_fn, immediate=False, filename=filename)

#    if ns.timecall:
#        zernike_fn = profilehooks.timecall(zernike_fn)

    if ns.vtk_file is not None:
        points, indices, lines, faces, depths, scalar_names, npoints, \
            input_vtk = read_vtk(ns.vtk_file)
        print('{0} {1}'.format(len(faces), len(points)))
        X = zernike_fn(points, faces, order=ns.order, scale_input=True)
        if ns.validate:
            Y = zernike_fn(points, faces, order=ns.order, scale_input=True, pl_cls=MultiprocPipeline)
            assert np.allclose(X, Y)
    else:
        example1()
开发者ID:akeshavan,项目名称:mindboggle,代码行数:34,代码来源:__main__.py

示例2: propagate_fundus_lines

def propagate_fundus_lines(surf_file, fundus_lines_file, thickness_file):
    """Propagate fundus lines to tile the surface.

    Parameters
    ----------
    surf_file: file containing the surface geometry in vtk format
    fundus_lines_file: file containing scalars representing fundus lines
    thickness_file: file containing cortical thickness scalar data
    (for masking out the medial wall only)

    Returns
    -------
    scalars indicating whether each vertex is part of the closed
    fundus lines or not
    """
    from mindboggle.mio.vtks import read_vtk, read_scalars

    points, indices, lines, faces, fundus_lines, scalar_names, num_points, \
        input_vtk = read_vtk(surf_file, return_first=True, return_array=True)

    fundus_lines, _ = read_scalars(fundus_lines_file)
    fundus_line_indices = [i for i, x in enumerate(fundus_lines) if x > 0.5]

    thickness, _ = read_scalars(thickness_file,
                             return_first=True, return_array=True)

    return propagate_fundus_lines(
        points, faces, fundus_line_indices, thickness)
开发者ID:akeshavan,项目名称:mindboggle,代码行数:28,代码来源:propagate_fundus_lines.py

示例3: spectrum_from_file

def spectrum_from_file(vtk_file, spectrum_size=10, exclude_labels=[-1],
                       normalization=None, area_file='', verbose=False):
    """
    Compute Laplace-Beltrami spectrum of a 3D shape in a VTK file.

    Parameters
    ----------
    vtk_file : string
        the input vtk file
    spectrum_size : integer
        number of eigenvalues to be computed (the length of the spectrum)
    exclude_labels : list of integers
        labels to be excluded
    normalization : string
        the method used to normalize eigenvalues ('area' or None)
        if "area", use area of the 2D structure as in Reuter et al. 2006
    area_file :  string
        name of VTK file with surface area scalar values
    verbose : bool
        print statements?

    Returns
    -------
    spectrum : list of floats
        first spectrum_size of Laplace-Beltrami spectrum

    Examples
    --------
    >>> # Spectrum for entire left hemisphere of Twins-2-1:
    >>> import numpy as np
    >>> from mindboggle.shapes.laplace_beltrami import spectrum_from_file
    >>> from mindboggle.shapes.laplace_beltrami import spectrum_per_label
    >>> from mindboggle.mio.fetch_data import prep_tests
    >>> urls, fetch_data = prep_tests()
    >>> vtk_file = fetch_data(urls['left_freesurfer_labels'], '', '.vtk')
    >>> spectrum = spectrum_from_file(vtk_file, spectrum_size=6)
    >>> print(np.array_str(np.array(spectrum[1::]),
    ...                    precision=5, suppress_small=True))
    [ 0.00013  0.00027  0.00032  0.00047  0.00058]

    """
    from mindboggle.mio.vtks import read_vtk, read_scalars
    from mindboggle.shapes.laplace_beltrami import spectrum_of_largest

    points, indices, lines, faces, scalars, scalar_names, npoints, \
            input_vtk = read_vtk(vtk_file)

    # Area file:
    if area_file:
        areas, u1 = read_scalars(area_file)
    else:
        areas = None

    spectrum = spectrum_of_largest(points, faces, spectrum_size,
                                   exclude_labels, normalization, areas,
                                   verbose)

    return spectrum
开发者ID:liob,项目名称:mindboggle,代码行数:58,代码来源:laplace_beltrami.py

示例4: decimate_file

def decimate_file(input_vtk, reduction=0.5, smooth_steps=100,
                  save_vtk=True, output_vtk=''):
    """
    Decimate vtk triangular mesh file with vtk.vtkDecimatePro.

    Parameters
    ----------
    input_vtk : string
        input vtk file with triangular surface mesh
    reduction : float
        fraction of mesh faces to remove
    do_smooth : Boolean
        smooth after decimation?
    save_vtk : Boolean
        output decimated vtk file?
    output_vtk : string
        output decimated vtk file name

    Returns
    -------
    output_vtk : string
        output decimated vtk file

    Examples
    --------
    >>> import os
    >>> from mindboggle.guts.mesh import decimate_file
    >>> from mindboggle.mio.plots import plot_surfaces
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> input_vtk = os.path.join(path, 'arno', 'labels', 'label22.vtk')
    >>> #input_vtk='/drop/MB/data/arno/labels/lh.labels.DKT31.manual.vtk'
    >>> save_vtk = True
    >>> output_vtk = ''
    >>> reduction = 0.5
    >>> smooth_steps = 0
    >>> decimate_file(input_vtk, reduction, smooth_steps, save_vtk, output_vtk)
    >>> # View:
    >>> plot_surfaces('decimated.vtk') # doctest: +SKIP

    """
    from mindboggle.mio.vtks import read_vtk
    from mindboggle.guts.mesh import decimate

    if not save_vtk:
        raise NotImplementedError()

    # Read VTK surface mesh file:
    points, indices, lines, faces, scalars, scalar_names, npoints, \
            input_vtk = read_vtk(input_vtk)

    # Decimate vtk triangular mesh with vtk.vtkDecimatePro
    points, faces, scalars, output_vtk = decimate(points, faces, reduction,
                                                  smooth_steps, scalars,
                                                  save_vtk, output_vtk)
    return output_vtk
开发者ID:nicholsn,项目名称:mindboggle,代码行数:55,代码来源:mesh.py

示例5: downsample_vtk

def downsample_vtk(vtk_file, sample_rate):
    """Sample rate: number between 0 and 1."""
    from mindboggle.mio.vtks import read_vtk, write_vtk
    from mindboggle.guts.mesh import decimate_file

    if (sample_rate < 0 or sample_rate > 1):
        raise ValueError('0 <= sample_rate <= 1; you input %f' % sample_rate)

    # Downsample
    decimate_file(vtk_file, reduction=1 - sample_rate, output_vtk=vtk_file, save_vtk=True, smooth_steps=0)

    # Hack to re-save in
    vtk_data = read_vtk(vtk_file)
    write_vtk(vtk_file, *vtk_data[:-2])
开发者ID:gitter-badger,项目名称:roygbiv,代码行数:14,代码来源:__init__.py

示例6: extract_folds

def extract_folds(depth_file, min_vertices=10000, min_fold_size=50, 
                  do_fill_holes=False, min_hole_depth=0.001, 
                  save_file=False):
    """
    Use depth to extract folds from a triangular surface mesh.

    Steps ::
        1. Compute histogram of depth measures.
        2. Define a depth threshold and find the deepest vertices.
        3. Segment deep vertices as an initial set of folds.
        4. Remove small folds.
        5. Find and fill holes in the folds (optional).
        6. Renumber folds.

    Step 2 ::
        To extract an initial set of deep vertices from the surface mesh,
        we anticipate that there will be a rapidly decreasing distribution
        of low depth values (on the outer surface) with a long tail
        of higher depth values (in the folds), so we smooth the histogram's
        bin values, convolve to compute slopes, and find the depth value
        for the first bin with slope = 0. This is our threshold.

    Step 5 ::
        The folds could have holes in areas shallower than the depth threshold.
        Calling fill_holes() could accidentally include very shallow areas
        (in an annulus-shaped fold, for example), so we include the argument
        exclude_range to check for any values from zero to min_hole_depth;
        holes are not filled if they contains values within this range.

    Parameters
    ----------
    depth_file : string
        surface mesh file in VTK format with faces and depth scalar values
    min_fold_size : integer
        minimum fold size (number of vertices)
    do_fill_holes : Boolean
        fill holes in the folds?
    min_hole_depth : float
        largest non-zero depth value that will stop a hole from being filled
    save_file : Boolean
        save output VTK file?

    Returns
    -------
    folds : list of integers
        fold numbers for all vertices (-1 for non-fold vertices)
    n_folds :  int
        number of folds
    depth_threshold :  float
        threshold defining the minimum depth for vertices to be in a fold
    bins :  list of integers
        histogram bins: each is the number of vertices within a range of depth values
    bin_edges :  list of floats
        histogram bin edge values defining the bin ranges of depth values
    folds_file : string (if save_file)
        name of output VTK file with fold IDs (-1 for non-fold vertices)

    Examples
    --------
    >>> import os
    >>> import numpy as np
    >>> import pylab
    >>> from scipy.ndimage.filters import gaussian_filter1d
    >>> from mindboggle.mio.vtks import read_scalars
    >>> from mindboggle.guts.mesh import find_neighbors_from_file
    >>> from mindboggle.mio.plots import plot_surfaces
    >>> from mindboggle.features.folds import extract_folds
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> depth_file = 'travel_depth.vtk' #os.path.join(path, 'arno', 'shapes', 'lh.pial.travel_depth.vtk')
    >>> neighbor_lists = find_neighbors_from_file(depth_file)
    >>> min_vertices = 10000
    >>> min_fold_size = 50
    >>> do_fill_holes = False #True
    >>> min_hole_depth = 0.001
    >>> save_file = True
    >>> #
    >>> folds, n_folds, thr, bins, bin_edges, folds_file = extract_folds(depth_file,
    >>>     min_vertices, min_fold_size, do_fill_holes, min_hole_depth, save_file)
    >>> #
    >>> # View folds:
    >>> plot_surfaces('folds.vtk')
    >>> # Plot histogram and depth threshold:
    >>> depths, name = read_scalars(depth_file)
    >>> nbins = np.round(len(depths) / 100.0)
    >>> a,b,c = pylab.hist(depths, bins=nbins)
    >>> pylab.plot(thr*np.ones((100,1)), np.linspace(0, max(bins), 100), 'r.')
    >>> pylab.show()
    >>> # Plot smoothed histogram:
    >>> bins_smooth = gaussian_filter1d(bins.tolist(), 5)
    >>> pylab.plot(range(len(bins)), bins, '.', range(len(bins)), bins_smooth,'-')
    >>> pylab.show()

    """
    import os
    import sys
    import numpy as np
    from time import time
    from scipy.ndimage.filters import gaussian_filter1d
    from mindboggle.mio.vtks import rewrite_scalars, read_vtk
    from mindboggle.guts.mesh import find_neighbors
#.........这里部分代码省略.........
开发者ID:thomasyu888,项目名称:mindboggle,代码行数:101,代码来源:folds.py

示例7: extract_sulci


#.........这里部分代码省略.........

    >>> folds_or_file, name = read_scalars(folds_file, True, True)
    >>> output_file = 'extract_sulci_fold7_2sulci.vtk'
    >>> # Limit number of folds to speed up the test:
    >>> limit_folds = True
    >>> if limit_folds:
    ...     fold_numbers = [7] #[4, 6]
    ...     i0 = [i for i,x in enumerate(folds_or_file) if x not in fold_numbers]
    ...     folds_or_file[i0] = background_value
    >>> sulci, n_sulci, sulci_file = extract_sulci(labels_file, folds_or_file,
    ...     hemi, min_boundary, sulcus_names, save_file, output_file,
    ...     background_value, verbose)
    >>> n_sulci  # 23 # (if not limit_folds)
    2
    >>> lens = [len([x for x in sulci if x==y])
    ...         for y in np.unique(sulci) if y != -1]
    >>> lens[0:10]  # [6358, 3288, 7612, 5205, 4414, 6251, 3493, 2566, 4436, 739] # (if not limit_folds)
    [369, 93]

    View result without background (skip test):

    >>> from mindboggle.mio.plots import plot_surfaces # doctest: +SKIP
    >>> from mindboggle.mio.vtks import rewrite_scalars # doctest: +SKIP
    >>> output = 'extract_sulci_fold7_2sulci_no_background.vtk'
    >>> rewrite_scalars(sulci_file, output, sulci,
    ...                 'sulci', sulci) # doctest: +SKIP
    >>> plot_surfaces(output) # doctest: +SKIP

    """
    import os
    from time import time
    import numpy as np

    from mindboggle.mio.vtks import read_scalars, read_vtk, rewrite_scalars
    from mindboggle.guts.mesh import find_neighbors
    from mindboggle.guts.segment import extract_borders, propagate, segment_regions
    from mindboggle.mio.labels import DKTprotocol

    # Load fold numbers if folds_or_file is a string:
    if isinstance(folds_or_file, str):
        folds, name = read_scalars(folds_or_file)
    elif isinstance(folds_or_file, list):
        folds = folds_or_file
    elif isinstance(folds_or_file, np.ndarray):
        folds = folds_or_file.tolist()

    dkt = DKTprotocol()

    if hemi == "lh":
        pair_lists = dkt.left_sulcus_label_pair_lists
    elif hemi == "rh":
        pair_lists = dkt.right_sulcus_label_pair_lists
    else:
        raise IOError("Warning: hemisphere not properly specified ('lh' or 'rh').")

    # Load points, faces, and neighbors:
    points, indices, lines, faces, labels, scalar_names, npoints, input_vtk = read_vtk(labels_file)
    neighbor_lists = find_neighbors(faces, npoints)

    # Array of sulcus IDs for fold vertices, initialized as -1.
    # Since we do not touch gyral vertices and vertices whose labels
    # are not in the label list, or vertices having only one label,
    # their sulcus IDs will remain -1:
    sulci = background_value * np.ones(npoints)

    # ------------------------------------------------------------------------
开发者ID:nipy,项目名称:mindboggle,代码行数:67,代码来源:sulci.py

示例8: spectrum_per_label

def spectrum_per_label(vtk_file, spectrum_size=10, exclude_labels=[-1],
                       normalization='area', area_file='',
                       largest_segment=True):
    """
    Compute Laplace-Beltrami spectrum per labeled region in a file.

    Parameters
    ----------
    vtk_file : string
        name of VTK surface mesh file containing index scalars (labels)
    spectrum_size : integer
        number of eigenvalues to be computed (the length of the spectrum)
    exclude_labels : list of integers
        labels to be excluded
    normalization : string
        the method used to normalize eigenvalues ('area' or None)
        if "area", use area of the 2D structure as in Reuter et al. 2006
    area_file :  string (optional)
        name of VTK file with surface area scalar values
    largest_segment :  Boolean
        compute spectrum only for largest segment with a given label?

    Returns
    -------
    spectrum_lists : list of lists
        first eigenvalues for each label's Laplace-Beltrami spectrum
    label_list : list of integers
        list of unique labels for which spectra are obtained

    Examples
    --------
    >>> # Uncomment "if label==22:" below to run example:
    >>> # Spectrum for Twins-2-1 left postcentral (22) pial surface:
    >>> import os
    >>> from mindboggle.shapes.laplace_beltrami import spectrum_per_label
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> vtk_file = os.path.join(path, 'arno', 'labels', 'lh.labels.DKT31.manual.vtk')
    >>> area_file = os.path.join(path, 'arno', 'shapes', 'lh.pial.area.vtk')
    >>> spectrum_size = 6
    >>> exclude_labels = [0]  #[-1]
    >>> largest_segment = True
    >>> spectrum_per_label(vtk_file, spectrum_size, exclude_labels, None,
    >>>                    area_file, largest_segment)
    ([[6.3469513010430304e-18,
       0.0005178862383467463,
       0.0017434911095630772,
       0.003667561767487686,
       0.005429017880363784,
       0.006309346984678924]],
     [22])

    """
    from mindboggle.mio.vtks import read_vtk, read_scalars
    from mindboggle.guts.mesh import remove_faces, reindex_faces_points
    from mindboggle.shapes.laplace_beltrami import fem_laplacian,\
        spectrum_of_largest

    # Read VTK surface mesh file:
    faces, u1, u2, points, u4, labels, u5, u6 = read_vtk(vtk_file)

    # Area file:
    if area_file:
        areas, u1 = read_scalars(area_file)
    else:
        areas = None

    # Loop through labeled regions:
    ulabels = []
    [ulabels.append(int(x)) for x in labels if x not in ulabels
     if x not in exclude_labels]
    label_list = []
    spectrum_lists = []
    for label in ulabels:
      #if label == 22:
      #  print("DEBUG: COMPUTE FOR ONLY ONE LABEL")

        # Determine the indices per label:
        Ilabel = [i for i,x in enumerate(labels) if x == label]
        print('{0} vertices for label {1}'.format(len(Ilabel), label))

        # Remove background faces:
        pick_faces = remove_faces(faces, Ilabel)
        pick_faces, pick_points, o1 = reindex_faces_points(pick_faces, points)

        # Compute Laplace-Beltrami spectrum for the label:
        if largest_segment:
            exclude_labels_inner = [-1]
            spectrum = spectrum_of_largest(pick_points, pick_faces,
                                           spectrum_size,
                                           exclude_labels_inner,
                                           normalization, areas)
        else:
            spectrum = fem_laplacian(pick_points, pick_faces,
                                     spectrum_size, normalization)

        # Append to a list of lists of spectra:
        spectrum_lists.append(spectrum)
        label_list.append(label)

    return spectrum_lists, label_list
开发者ID:thomasyu888,项目名称:mindboggle,代码行数:100,代码来源:laplace_beltrami.py

示例9: zernike_moments_per_label

def zernike_moments_per_label(vtk_file, order=10, exclude_labels=[-1],
                              scale_input=True,
                              decimate_fraction=0, decimate_smooth=25):
    """
    Compute the Zernike moments per labeled region in a file.

    Optionally decimate the input mesh.

    Parameters
    ----------
    vtk_file : string
        name of VTK surface mesh file containing index scalars (labels)
    order : integer
        number of moments to compute
    exclude_labels : list of integers
        labels to be excluded
    scale_input : Boolean
        translate and scale each object so it is bounded by a unit sphere?
        (this is the expected input to zernike_moments())
    decimate_fraction : float
        fraction of mesh faces to remove for decimation (1 for no decimation)
    decimate_smooth : integer
        number of smoothing steps for decimation

    Returns
    -------
    descriptors_lists : list of lists of floats
        Zernike descriptors per label
    label_list : list of integers
        list of unique labels for which moments are computed

    Examples
    --------
    >>> # Uncomment "if label==22:" below to run example:
    >>> # Twins-2-1 left postcentral (22) pial surface:
    >>> import os
    >>> from mindboggle.shapes.zernike.zernike import zernike_moments_per_label
    >>> path = os.path.join(os.environ['HOME'], 'mindboggled', 'OASIS-TRT-20-1')
    >>> vtk_file = os.path.join(path, 'labels', 'left_surface', 'relabeled_classifier.vtk')
    >>> order = 3
    >>> exclude_labels = [-1, 0]
    >>> scale_input = True
    >>> zernike_moments_per_label(vtk_file, order, exclude_labels, scale_input)
    ([[0.00528486237819844,
       0.009571754617699853,
       0.0033489494903015944,
       0.00875603468268444,
       0.0015879536633349918,
       0.0008080165707033097]],
     [22])


    ([[0.0018758013185778298,
       0.001757973693050823,
       0.002352403177686726,
       0.0032281044369938286,
       0.002215900343702539,
       0.0019646380916703856]],
     [14.0])
    Arthur Mikhno's result:
    1.0e+07 *
    0.0000
    0.0179
    0.0008
    4.2547
    0.0534
    4.4043



    """
    import numpy as np
    from mindboggle.mio.vtks import read_vtk
    from mindboggle.guts.mesh import remove_faces
    from mindboggle.shapes.zernike.zernike import zernike_moments

    min_points_faces = 4

    #-------------------------------------------------------------------------
    # Read VTK surface mesh file:
    #-------------------------------------------------------------------------
    faces, u1,u2, points, u3, labels, u4,u5 = read_vtk(vtk_file)

    #-------------------------------------------------------------------------
    # Loop through labeled regions:
    #-------------------------------------------------------------------------
    ulabels = [x for x in np.unique(labels) if x not in exclude_labels]
    label_list = []
    descriptors_lists = []
    for label in ulabels:
      #if label == 1022:  # 22:
      #    print("DEBUG: COMPUTE FOR ONLY ONE LABEL")

        #---------------------------------------------------------------------
        # Determine the indices per label:
        #---------------------------------------------------------------------
        Ilabel = [i for i,x in enumerate(labels) if x == label]
        print('  {0} vertices for label {1}'.format(len(Ilabel), label))
        if len(Ilabel) > min_points_faces:

#.........这里部分代码省略.........
开发者ID:thomasyu888,项目名称:mindboggle,代码行数:101,代码来源:zernike.py

示例10: write_vertex_measures

def write_vertex_measures(output_table, labels_or_file, sulci=[], fundi=[],
        affine_transform_files=[], inverse_booleans=[],
        transform_format='itk',
        area_file='', mean_curvature_file='', travel_depth_file='',
        geodesic_depth_file='', freesurfer_thickness_file='',
        freesurfer_curvature_file='', freesurfer_sulc_file=''):
    """
    Make a table of shape values per vertex.

    Note ::
        This function is tailored for Mindboggle outputs.

    Parameters
    ----------
    output_table : string
        output file (full path)
    labels_or_file : list or string
        label number for each vertex or name of VTK file with index scalars
    sulci :  list of integers
        indices to sulci, one per vertex, with -1 indicating no sulcus
    fundi :  list of integers
        indices to fundi, one per vertex, with -1 indicating no fundus
    affine_transform_files : list of strings
        affine transform files to standard space
    inverse_booleans : list of of zeros and ones
        for each transform, 1 to take the inverse, else 0
    transform_format : string
        format for transform file
        Ex: 'txt' for text, 'itk' for ITK, and 'mat' for Matlab format
    area_file :  string
        name of VTK file with surface area scalar values
    mean_curvature_file :  string
        name of VTK file with mean curvature scalar values
    travel_depth_file :  string
        name of VTK file with travel depth scalar values
    geodesic_depth_file :  string
        name of VTK file with geodesic depth scalar values
    freesurfer_thickness_file :  string
        name of VTK file with FreeSurfer thickness scalar values
    freesurfer_curvature_file :  string
        name of VTK file with FreeSurfer curvature (curv) scalar values
    freesurfer_sulc_file :  string
        name of VTK file with FreeSurfer convexity (sulc) scalar values

    Returns
    -------
    output_table : table file name for vertex shape values

    Examples
    --------
    >>> import os
    >>> from mindboggle.mio.vtks import read_scalars
    >>> from mindboggle.mio.tables import write_vertex_measures
    >>> #
    >>> output_table = ''#vertex_shapes.csv'
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> labels_or_file = os.path.join(path, 'arno', 'labels', 'lh.labels.DKT25.manual.vtk')
    >>> sulci_file = os.path.join(path, 'arno', 'features', 'sulci.vtk')
    >>> fundi_file = os.path.join(path, 'arno', 'features', 'fundi.vtk')
    >>> sulci, name = read_scalars(sulci_file)
    >>> fundi, name = read_scalars(fundi_file)
    >>> affine_transform_files = [os.path.join(path, 'arno', 'mri',
    >>>     't1weighted_brain.MNI152Affine.txt')]
    >>> inverse_booleans = [1]
    >>> transform_format = 'itk'
    >>> swap_xy = True
    >>> area_file = os.path.join(path, 'arno', 'shapes', 'lh.pial.area.vtk')
    >>> mean_curvature_file = os.path.join(path, 'arno', 'shapes', 'lh.pial.mean_curvature.vtk')
    >>> travel_depth_file = os.path.join(path, 'arno', 'shapes', 'lh.pial.travel_depth.vtk')
    >>> geodesic_depth_file = os.path.join(path, 'arno', 'shapes', 'lh.pial.geodesic_depth.vtk')
    >>> freesurfer_thickness_file = ''
    >>> freesurfer_curvature_file = ''
    >>> freesurfer_sulc_file = ''
    >>> #
    >>> write_vertex_measures(output_table, labels_or_file, sulci, fundi,
    >>>     affine_transform_files, inverse_booleans, transform_format, area_file,
    >>>     mean_curvature_file, travel_depth_file, geodesic_depth_file,
    >>>     freesurfer_thickness_file, freesurfer_curvature_file, freesurfer_sulc_file)

    """
    import os
    import numpy as np
    import pandas as pd

    from mindboggle.mio.vtks import read_scalars, read_vtk, \
        apply_affine_transforms

    # Make sure inputs are lists:
    if isinstance(labels_or_file, np.ndarray):
        labels = [int(x) for x in labels_or_file]
    elif isinstance(labels_or_file, list):
        labels = labels_or_file
    elif isinstance(labels_or_file, str):
        labels, name = read_scalars(labels_or_file)
    if isinstance(sulci, np.ndarray):
        sulci = [int(x) for x in sulci]
    if isinstance(fundi, np.ndarray):
        fundi = [int(x) for x in fundi]

    if not labels and not sulci and not fundi:
#.........这里部分代码省略.........
开发者ID:nicholsn,项目名称:mindboggle,代码行数:101,代码来源:tables.py

示例11: evaluate_deep_features

def evaluate_deep_features(features_file, labels_file, sulci_file='', hemi='',
                           excludeIDs=[-1], output_vtk_name='', verbose=True):
    """
    Evaluate deep surface features by computing the minimum distance from each
    label border vertex to all of the feature vertices in the same sulcus,
    and from each feature vertex to all of the label border vertices in the
    same sulcus.  The label borders run along the deepest parts of sulci
    and correspond to fundi in the DKT cortical labeling protocol.

    Parameters
    ----------
    features_file : string
        VTK surface file with feature numbers for vertex scalars
    labels_file : string
        VTK surface file with label numbers for vertex scalars
    sulci_file : string
        VTK surface file with sulcus numbers for vertex scalars
    excludeIDs : list of integers
        feature/sulcus/label IDs to exclude (background set to -1)
    output_vtk_name : Boolean
        if not empty, output a VTK file beginning with output_vtk_name that
        contains a surface with mean distances as scalars
    verbose : Boolean
        print mean distances to standard output?

    Returns
    -------
    feature_to_border_mean_distances : numpy array [number of features x 1]
        mean distance from each feature to sulcus label border
    feature_to_border_sd_distances : numpy array [number of features x 1]
        standard deviations of feature-to-border distances
    feature_to_border_distances_vtk : string
        VTK surface file containing feature-to-border distances
    border_to_feature_mean_distances : numpy array [number of features x 1]
        mean distances from each sulcus label border to feature
    border_to_feature_sd_distances : numpy array [number of features x 1]
        standard deviations of border-to-feature distances
    border_to_feature_distances_vtk : string
        VTK surface file containing border-to-feature distances

    """
    import os
    import sys
    import numpy as np
    from mindboggle.mio.vtks import read_vtk, read_scalars, write_vtk
    from mindboggle.guts.mesh import find_neighbors, remove_faces
    from mindboggle.guts.segment import extract_borders
    from mindboggle.guts.compute import source_to_target_distances
    from mindboggle.mio.labels import DKTprotocol

    dkt = DKTprotocol()
    #-------------------------------------------------------------------------
    # Load labels, features, and sulci:
    #-------------------------------------------------------------------------
    faces, lines, indices, points, npoints, labels, scalar_names, \
        input_vtk = read_vtk(labels_file, True, True)
    features, name = read_scalars(features_file, True, True)
    if sulci_file:
        sulci, name = read_scalars(sulci_file, True, True)
        # List of indices to sulcus vertices:
        sulcus_indices = [i for i,x in enumerate(sulci) if x != -1]
        segmentIDs = sulci
        sulcus_faces = remove_faces(faces, sulcus_indices)
    else:
        sulcus_indices = range(len(labels))
        segmentIDs = []
        sulcus_faces = faces

    #-------------------------------------------------------------------------
    # Prepare neighbors, label pairs, border IDs, and outputs:
    #-------------------------------------------------------------------------
    # Calculate neighbor lists for all points:
    print('Find neighbors for all vertices...')
    neighbor_lists = find_neighbors(faces, npoints)

    # Find label border points in any of the sulci:
    print('Find label border points in any of the sulci...')
    border_indices, border_label_tuples, unique_border_label_tuples = \
        extract_borders(sulcus_indices, labels, neighbor_lists,
                        ignore_values=[], return_label_pairs=True)
    if not len(border_indices):
        sys.exit('There are no label border points!')

    # Initialize an array of label border IDs
    # (label border vertices that define sulci in the labeling protocol):
    print('Build an array of label border IDs...')
    label_borders = -1 * np.ones(npoints)

    if hemi == 'lh':
        nsulcus_lists = len(dkt.left_sulcus_label_pair_lists)
    else:
        nsulcus_lists = len(dkt.right_sulcus_label_pair_lists)
    feature_to_border_mean_distances = -1 * np.ones(nsulcus_lists)
    feature_to_border_sd_distances = -1 * np.ones(nsulcus_lists)
    border_to_feature_mean_distances = -1 * np.ones(nsulcus_lists)
    border_to_feature_sd_distances = -1 * np.ones(nsulcus_lists)
    feature_to_border_distances_vtk = ''
    border_to_feature_distances_vtk = ''

    #-------------------------------------------------------------------------
#.........这里部分代码省略.........
开发者ID:thomasyu888,项目名称:mindboggle,代码行数:101,代码来源:evaluate_features.py

示例12: find_depth_threshold

def find_depth_threshold(depth_file, min_vertices=10000, verbose=False):
    """
    Find depth threshold to extract folds from a triangular surface mesh.

    Steps ::
        1. Compute histogram of depth measures.
        2. Define a depth threshold and find the deepest vertices.
           To extract an initial set of deep vertices from the surface mesh,
           we anticipate that there will be a rapidly decreasing distribution
           of low depth values (on the outer surface) with a long tail
           of higher depth values (in the folds), so we smooth the histogram's
           bin values, convolve to compute slopes, and find the depth value
           for the first bin with slope = 0. This is our threshold.

    Parameters
    ----------
    depth_file : string
        surface mesh file in VTK format with faces and depth scalar values
    min_vertices : integer
        minimum number of vertices
    verbose : bool
        print statements?

    Returns
    -------
    depth_threshold :  float
        threshold defining the minimum depth for vertices to be in a fold
    bins :  list of integers
        histogram bins: each is the number of vertices within a range of depth values
    bin_edges :  list of floats
        histogram bin edge values defining the bin ranges of depth values

    Examples
    --------
    >>> import numpy as np
    >>> from mindboggle.features.folds import find_depth_threshold
    >>> from mindboggle.mio.fetch_data import prep_tests
    >>> urls, fetch_data = prep_tests()
    >>> depth_file = fetch_data(urls['left_travel_depth'], '', '.vtk')
    >>> min_vertices = 10000
    >>> verbose = False
    >>> depth_threshold, bins, bin_edges = find_depth_threshold(depth_file,
    ...     min_vertices, verbose)
    >>> np.float("{0:.{1}f}".format(depth_threshold, 5))
    2.36089

    View threshold histogram plots (skip test):

    >>> def vis():
    ...     import numpy as np
    ...     import pylab
    ...     from scipy.ndimage.filters import gaussian_filter1d
    ...     from mindboggle.mio.vtks import read_scalars
    ...     # Plot histogram and depth threshold:
    ...     depths, name = read_scalars(depth_file)
    ...     nbins = np.round(len(depths) / 100.0)
    ...     a,b,c = pylab.hist(depths, bins=nbins)
    ...     pylab.plot(depth_threshold * np.ones((100,1)),
    ...                np.linspace(0, max(bins), 100), 'r.')
    ...     pylab.title('Histogram of depth values with threshold')
    ...     pylab.xlabel('Depth')
    ...     pylab.ylabel('Number of vertices')
    ...     pylab.show()
    ...     # Plot smoothed histogram:
    ...     bins_smooth = gaussian_filter1d(bins.tolist(), 5)
    ...     pylab.plot(list(range(len(bins))), bins, '.',
    ...                list(range(len(bins))), bins_smooth,'-')
    ...     pylab.title('Smoothed histogram of depth values')
    ...     pylab.show()
    >>> vis() # doctest: +SKIP

    """
    import numpy as np
    from scipy.ndimage.filters import gaussian_filter1d

    from mindboggle.mio.vtks import read_vtk

    # ------------------------------------------------------------------------
    # Load depth values for all vertices:
    # ------------------------------------------------------------------------
    points, indices, lines, faces, depths, scalar_names, npoints, \
        input_vtk = read_vtk(depth_file, return_first=True, return_array=True)

    # ------------------------------------------------------------------------
    # Compute histogram of depth measures:
    # ------------------------------------------------------------------------
    if npoints > min_vertices:
        nbins = np.int(np.round(npoints / 100.0))
    else:
        raise IOError("  Expecting at least {0} vertices to create "
                      "depth histogram".format(min_vertices))
    bins, bin_edges = np.histogram(depths, bins=nbins)

    # ------------------------------------------------------------------------
    # Anticipating that there will be a rapidly decreasing distribution
    # of low depth values (on the outer surface) with a long tail of higher
    # depth values (in the folds), smooth the bin values (Gaussian), convolve
    # to compute slopes, and find the depth for the first bin with slope = 0.
    # ------------------------------------------------------------------------
    bins_smooth = gaussian_filter1d(bins.tolist(), 5)
#.........这里部分代码省略.........
开发者ID:akeshavan,项目名称:mindboggle,代码行数:101,代码来源:folds.py

示例13: extract_fundi


#.........这里部分代码省略.........
    ...     i0 = [i for i,x in enumerate(folds) if x not in fold_numbers]
    ...     folds[i0] = -1
    >>> min_separation = 10
    >>> erode_ratio = 0.10
    >>> erode_min_size = 10
    >>> save_file = True
    >>> output_file = 'extract_fundi_fold4.vtk'
    >>> background_value = -1
    >>> verbose = False
    >>> o1, o2, fundus_per_fold_file = extract_fundi(folds, curv_file,
    ...     depth_file, min_separation, erode_ratio, erode_min_size,
    ...     save_file, output_file, background_value, verbose)
    >>> lens = [len([x for x in o1 if x == y])
    ...         for y in np.unique(o1) if y != background_value]
    >>> lens[0:10] # [66, 2914, 100, 363, 73, 331, 59, 30, 1, 14] # (if not limit_folds)
    [73]

    View result without background (skip test):

    >>> from mindboggle.mio.plots import plot_surfaces # doctest: +SKIP
    >>> from mindboggle.mio.vtks import rewrite_scalars # doctest: +SKIP
    >>> rewrite_scalars(fundus_per_fold_file,
    ...                 'extract_fundi_fold4_no_background.vtk', o1,
    ...                 'fundus_per_fold', folds) # doctest: +SKIP
    >>> plot_surfaces('extract_fundi_fold4_no_background.vtk') # doctest: +SKIP

    """

    # Extract a skeleton to connect endpoints in a fold:
    import os
    import numpy as np
    from time import time

    from mindboggle.mio.vtks import read_scalars, read_vtk, rewrite_scalars
    from mindboggle.guts.compute import median_abs_dev
    from mindboggle.guts.paths import find_max_values
    from mindboggle.guts.mesh import find_neighbors_from_file
    from mindboggle.guts.mesh import find_complete_faces
    from mindboggle.guts.paths import find_outer_endpoints
    from mindboggle.guts.paths import connect_points_erosion

    if isinstance(folds, list):
        folds = np.array(folds)

    # Load values, inner anchor threshold, and neighbors:
    if os.path.isfile(curv_file):
        points, indices, lines, faces, curvs, scalar_names, npoints, \
            input_vtk = read_vtk(curv_file, True, True)
    else:
        raise IOError("{0} doesn't exist!".format(curv_file))
    if os.path.isfile(curv_file):
        depths, name = read_scalars(depth_file, True, True)
    else:
        raise IOError("{0} doesn't exist!".format(depth_file))
    values = curvs * depths
    values0 = [x for x in values if x > 0]
    thr = np.median(values0) + 2 * median_abs_dev(values0)
    neighbor_lists = find_neighbors_from_file(curv_file)

    # ------------------------------------------------------------------------
    # Loop through folds:
    # ------------------------------------------------------------------------
    t1 = time()
    skeletons = []
    unique_fold_IDs = [x for x in np.unique(folds) if x != background_value]
开发者ID:liob,项目名称:mindboggle,代码行数:66,代码来源:fundi.py

示例14: zernike_moments_per_label

def zernike_moments_per_label(vtk_file, order=10, exclude_labels=[-1],
                              scale_input=True, decimate_fraction=0,
                              decimate_smooth=25, verbose=False):
    """
    Compute the Zernike moments per labeled region in a file.

    Optionally decimate the input mesh.

    Parameters
    ----------
    vtk_file : string
        name of VTK surface mesh file containing index scalars (labels)
    order : integer
        number of moments to compute
    exclude_labels : list of integers
        labels to be excluded
    scale_input : bool
        translate and scale each object so it is bounded by a unit sphere?
        (this is the expected input to zernike_moments())
    decimate_fraction : float
        fraction of mesh faces to remove for decimation (1 for no decimation)
    decimate_smooth : integer
        number of smoothing steps for decimation
    verbose : bool
        print statements?

    Returns
    -------
    descriptors_lists : list of lists of floats
        Zernike descriptors per label
    label_list : list of integers
        list of unique labels for which moments are computed

    Examples
    --------
    >>> # Zernike moments per label of a FreeSurfer-labeled left cortex.
    >>> # Uncomment "if label==22:" below to run example
    >>> # for left postcentral (22) pial surface:
    >>> import numpy as np
    >>> from mindboggle.shapes.zernike.zernike import zernike_moments_per_label
    >>> from mindboggle.mio.fetch_data import prep_tests
    >>> urls, fetch_data = prep_tests()
    >>> vtk_file = fetch_data(urls['left_freesurfer_labels'])
    >>> order = 3
    >>> exclude_labels = [-1]
    >>> scale_input = True
    >>> verbose = False
    >>> descriptors_lists, label_list = zernike_moments_per_label(vtk_file,
    ...     order, exclude_labels, scale_input, verbose)
    >>> label_list[0:10]
    [999, 1001, 1002, 1003, 1005, 1006, 1007, 1008, 1009, 1010]
    >>> print(np.array_str(np.array(descriptors_lists[0]),
    ...                    precision=5, suppress_small=True))
    [ 0.00587  0.01143  0.0031   0.00881  0.00107  0.00041]
    >>> print(np.array_str(np.array(descriptors_lists[1]),
    ...                    precision=5, suppress_small=True))
    [ 0.00004  0.00009  0.00003  0.00009  0.00002  0.00001]
    >>> print(np.array_str(np.array(descriptors_lists[2]),
    ...                    precision=5, suppress_small=True))
    [ 0.00144  0.00232  0.00128  0.00304  0.00084  0.00051]
    >>> print(np.array_str(np.array(descriptors_lists[3]),
    ...                    precision=5, suppress_small=True))
    [ 0.00393  0.006    0.00371  0.00852  0.00251  0.00153]
    >>> print(np.array_str(np.array(descriptors_lists[4]),
    ...                    precision=5, suppress_small=True))
    [ 0.00043  0.0003   0.00095  0.00051  0.00115  0.00116]

    """
    import numpy as np
    from mindboggle.mio.vtks import read_vtk
    from mindboggle.guts.mesh import keep_faces
    from mindboggle.shapes.zernike.zernike import zernike_moments

    min_points_faces = 4

    #-------------------------------------------------------------------------
    # Read VTK surface mesh file:
    #-------------------------------------------------------------------------
    points, indices, lines, faces, labels, scalar_names, npoints, \
            input_vtk = read_vtk(vtk_file)

    #-------------------------------------------------------------------------
    # Loop through labeled regions:
    #-------------------------------------------------------------------------
    ulabels = [x for x in np.unique(labels) if x not in exclude_labels]
    label_list = []
    descriptors_lists = []
    for label in ulabels:
      #if label == 1022:  # 22:
      #    print("DEBUG: COMPUTE FOR ONLY ONE LABEL")

        #---------------------------------------------------------------------
        # Determine the indices per label:
        #---------------------------------------------------------------------
        Ilabel = [i for i,x in enumerate(labels) if x == label]
        if verbose:
          print('  {0} vertices for label {1}'.format(len(Ilabel), label))

        if len(Ilabel) > min_points_faces:

#.........这里部分代码省略.........
开发者ID:rmehta1987,项目名称:mindboggle,代码行数:101,代码来源:zernike.py

示例15: write_average_face_values_per_label

def write_average_face_values_per_label(input_indices_vtk,
        input_values_vtk='', area_file='', output_stem='',
        exclude_values=[-1], background_value=-1, verbose=False):
    """
    Write out a separate VTK file for each integer
    in (the first) scalar list of an input VTK file.
    Optionally write the values drawn from a second VTK file.

    Parameters
    ----------
    input_indices_vtk : string
        path of the input VTK file that contains indices as scalars
    input_values_vtk : string
        path of the input VTK file that contains values as scalars
    output_stem : string
        path and stem of the output VTK file
    exclude_values : list or array
        values to exclude
    background_value : integer or float
        background value in output VTK files
    scalar_name : string
        name of a lookup table of scalars values
    verbose : bool
        print statements?

    Examples
    --------
    >>> import os
    >>> from mindboggle.mio.tables import write_average_face_values_per_label
    >>> from mindboggle.mio.fetch_data import prep_tests
    >>> urls, fetch_data = prep_tests()
    >>> input_indices_vtk = fetch_data(urls['left_freesurfer_labels'])
    >>> input_values_vtk = fetch_data(urls['left_mean_curvature'])
    >>> area_file = fetch_data(urls['left_area'])
    >>> output_stem = 'labels_thickness'
    >>> exclude_values = [-1]
    >>> background_value = -1
    >>> verbose = False
    >>> write_average_face_values_per_label(input_indices_vtk,
    ...     input_values_vtk, area_file, output_stem, exclude_values,
    ...     background_value, verbose)

    View vtk file (skip test):

    >>> from mindboggle.mio.plots import plot_surfaces
    >>> example_vtk = os.path.join(os.getcwd(), output_stem + '0.vtk')
    >>> plot_surfaces(example_vtk) # doctest: +SKIP

    """
    import os
    import numpy as np
    import pandas as pd

    from mindboggle.mio.vtks import read_scalars, read_vtk, write_vtk
    from mindboggle.guts.mesh import keep_faces

    # Load VTK file:
    points, indices, lines, faces, scalars, scalar_names, npoints, \
        input_vtk = read_vtk(input_indices_vtk, True, True)
    if area_file:
        area_scalars, name = read_scalars(area_file, True, True)
    if verbose:
        print("Explode the scalar list in {0}".
            format(os.path.basename(input_indices_vtk)))
    if input_values_vtk != input_indices_vtk:
        if verbose:
            print("Explode the scalar list of values in {0} "
                  "with the scalar list of indices in {1}".
                format(os.path.basename(input_values_vtk),
                       os.path.basename(input_indices_vtk)))

    # Loop through unique (non-excluded) scalar values:
    unique_scalars = [int(x) for x in np.unique(scalars)
                      if x not in exclude_values]
    for scalar in unique_scalars:

        keep_indices = [x for sublst in faces for x in sublst]
        new_faces = keep_faces(faces, keep_indices)

        # Create array and indices for scalar value:
        select_scalars = np.copy(scalars)
        select_scalars[scalars != scalar] = background_value
        scalar_indices = [i for i,x in enumerate(select_scalars) if x==scalar]
        if verbose:
            print("  Scalar {0}: {1} vertices".format(scalar,
                                                      len(scalar_indices)))

        #---------------------------------------------------------------------
        # For each face, average vertex values:
        #---------------------------------------------------------------------
        output_table = os.path.join(os.getcwd(),
                                    output_stem+str(scalar)+'.csv')
        columns = []
        for face in new_faces:
            values = []
            for index in face:
                if area_file:
                    values.append(scalars[index] / area_scalars[index])
                else:
                    values.append(scalars[index])
#.........这里部分代码省略.........
开发者ID:rmehta1987,项目名称:mindboggle,代码行数:101,代码来源:tables.py


注:本文中的mindboggle.mio.vtks.read_vtk函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。