当前位置: 首页>>代码示例>>Python>>正文


Python NiftiMasker.transform方法代码示例

本文整理汇总了Python中nilearn.input_data.NiftiMasker.transform方法的典型用法代码示例。如果您正苦于以下问题:Python NiftiMasker.transform方法的具体用法?Python NiftiMasker.transform怎么用?Python NiftiMasker.transform使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在nilearn.input_data.NiftiMasker的用法示例。


在下文中一共展示了NiftiMasker.transform方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: preprocess

# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import transform [as 别名]
def preprocess(num, subj, subj_dir, subj_warp_dir, force_warp=False):
    bold_path = 'BOLD/task001_run00%i/bold_dico_bold7Tp1_to_subjbold7Tp1.nii.gz' % (num+1)
    bold_path = os.path.join(DATA_DIR, subj, bold_path)
    template_path = os.path.join(DATA_DIR, 'templates', 'grpbold7Tp1', 'brain.nii.gz')
    warp_path = os.path.join(DATA_DIR, subj, 'templates', 'bold7Tp1', 'in_grpbold7Tp1', 'subj2tmpl_warp.nii.gz')

    output_path = os.path.join(subj_warp_dir, 'run00%i.nii.gz' % num)

    if force_warp or not os.path.exists(output_path):
        print 'Warping image #%i...' % num
        subprocess.call(['fsl5.0-applywarp', '-i', bold_path, '-o', output_path, '-r', template_path, '-w', warp_path, '-d', 'float'])
    else:
        print 'Reusing cached warp image #%i' % num

    print 'Loading image #%i...' % num
    bold = load(output_path)

    masker = NiftiMasker(load(MASK_FILE))
    # masker = niftimasker(load(MASK_FILE), detrend=true, smoothing_fwhm=4.0,
    #                     high_pass=0.01, t_r=2.0, standardize=true)
    masker.fit()
    print 'Removing confounds from image #%i...' % num
    data = masker.transform(bold, confounds(num, subj))
    print 'Detrending image #%i...' % num
    filtered = np.float32(savgol_filter(data, 61, 5, axis=0))
    img = masker.inverse_transform(data-filtered)
    print 'Smoothing image #%i...' % num
    img = image.smooth_img(img, 4.0)
    print 'Saving image #%i...' % num
    save(img, os.path.join(subj_dir, 'run00%i.nii.gz' % num))
    print 'Finished with image #%i' % num
开发者ID:kshmelkov,项目名称:forrestgump,代码行数:33,代码来源:preprocessing.py

示例2: load_data

# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import transform [as 别名]
def load_data():
    with open(expanduser('~/data/HCP_unmasked/data.json'), 'r') as f:
        data = json.load(f)
        for this_data in data:
            this_data['array'] += '.npy'
        mask_img = expanduser('~/data/HCP_mask/mask_img.nii.gz')
    masker = NiftiMasker(mask_img=mask_img, smoothing_fwhm=4,
                         standardize=True)
    masker.fit()
    smith2009 = fetch_atlas_smith_2009()
    init = smith2009.rsn70
    dict_init = masker.transform(init)
    return masker, dict_init, sorted(data, key=lambda t: t['filename'])
开发者ID:BigR-Lab,项目名称:modl,代码行数:15,代码来源:hcp_analysis.py

示例3: SmoothResampleMasker

# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import transform [as 别名]
class SmoothResampleMasker(BaseMasker):

    def __init__(self, mask_img=None, smoothing_fwhm=None, resampling=None, searchlight=False):

        self.mask_img = mask_img
        self.smoothing_fwhm = smoothing_fwhm
        self.resampling = resampling
        self.searchlight = searchlight

        self.masker = None

    def fit(self):

        if self.resampling is not None:
            self.mask_img = resample_img(self.mask_img, target_affine=np.diag(self.resampling * np.ones(3)))
        self.masker = NiftiMasker(mask_img=self.mask_img)
        self.masker.fit()

        return self

    def transform(self, imgs, confounds=None):

        smooth_prefix = '' if self.smoothing_fwhm is None else 's%g' % self.smoothing_fwhm
        resample_prefix = '' if self.smoothing_fwhm is None else 'r%g' % self.smoothing_fwhm

        if not isinstance(imgs, list):
            imgs = [imgs]

        path_first = imgs[0] if isinstance(imgs[0], str) else imgs[0].get_filename()

        path_first_resampled = os.path.join(os.path.dirname(path_first), resample_prefix + os.path.basename(path_first))
        path_first_smoothed = os.path.join(os.path.dirname(path_first), smooth_prefix + resample_prefix + os.path.basename(path_first))

        if self.resampling is not None and self.smoothing_fwhm is not None:
            if self.resampling is not None:
                if not os.path.exists(path_first_resampled) and not os.path.exists(path_first_smoothed):
                    imgs = resample_img(imgs, target_affine=np.diag(self.resampling * np.ones(3)))
                else:
                    imgs = []
            if self.smoothing_fwhm is not None:
                if not os.path.exists(path_first_smoothed):
                    imgs = smooth_img(imgs, self.smoothing_fwhm)
                else:
                    imgs = []
        else:
            imgs = [check_niimg_3d(img) for img in imgs]

        return self.masker.transform(imgs)
开发者ID:m-guggenmos,项目名称:decog,代码行数:50,代码来源:masker.py

示例4: test_multi_pca_score

# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import transform [as 别名]
def test_multi_pca_score():
    shape = (6, 8, 10, 5)
    affine = np.eye(4)
    rng = np.random.RandomState(0)

    # Create a "multi-subject" dataset
    imgs = []
    for i in range(8):
        this_img = rng.normal(size=shape)
        imgs.append(nibabel.Nifti1Image(this_img, affine))

    mask_img = nibabel.Nifti1Image(np.ones(shape[:3], dtype=np.int8), affine)

    # Assert that score is between zero and one
    multi_pca = MultiPCA(mask=mask_img, random_state=0, memory_level=0,
                         n_components=3)
    multi_pca.fit(imgs)
    s = multi_pca.score(imgs)
    assert_true(np.all(s <= 1))
    assert_true(np.all(0 <= s))

    # Assert that score does not fail with single subject data
    multi_pca = MultiPCA(mask=mask_img, random_state=0, memory_level=0,
                         n_components=3)
    multi_pca.fit(imgs[0])
    s = multi_pca.score(imgs[0])
    assert_true(isinstance(s, float))
    assert(0. <= s <= 1.)

    # Assert that score is one for n_components == n_sample
    # in single subject configuration
    multi_pca = MultiPCA(mask=mask_img, random_state=0, memory_level=0,
                         n_components=5)
    multi_pca.fit(imgs[0])
    s = multi_pca.score(imgs[0])
    assert_almost_equal(s, 1., 1)

    # Per component score
    multi_pca = MultiPCA(mask=mask_img, random_state=0, memory_level=0,
                         n_components=5)
    multi_pca.fit(imgs[0])
    masker = NiftiMasker(mask_img).fit()
    s = multi_pca._raw_score(masker.transform(imgs[0]), per_component=True)
    assert_equal(s.shape, (5,))
    assert_true(np.all(s <= 1))
    assert_true(np.all(0 <= s))
开发者ID:AlexandreAbraham,项目名称:nilearn,代码行数:48,代码来源:test_multi_pca.py

示例5: get_init_objective

# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import transform [as 别名]
def get_init_objective(output_dir):
    mask, func_filenames = get_hcp_data(raw=True)

    masker = NiftiMasker(mask_img=mask, smoothing_fwhm=None,
                         standardize=False)
    masker.fit()

    rsn70 = fetch_atlas_smith_2009().rsn70
    components = masker.transform(rsn70)
    print(components.shape)
    enet_scale(components.T, inplace=True)
    print(np.sum(np.abs(components), axis=1))
    test_data = func_filenames[(-n_test_records * 2)::2]

    n_samples, n_voxels = np.load(test_data[-1], mmap_mode='r').shape
    X = np.empty((n_test_records * n_samples, n_voxels))

    for i, this_data in enumerate(test_data):
        X[i * n_samples:(i + 1) * n_samples] = np.load(this_data,
                                                       mmap_mode='r')
    exp_var = {}
    for alpha in [1e-2, 1e-3, 1e-4]:
        exp_var[alpha] = objective_function(X, components, alpha)
    json.dump(open(join(output_dir, 'init_objective.json'), 'r'))
开发者ID:BigR-Lab,项目名称:modl,代码行数:26,代码来源:hcp_analysis.py

示例6: map_threshold

# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import transform [as 别名]
def map_threshold(stat_img, mask_img=None, threshold=.001,
                  height_control='fpr', cluster_threshold=0):
    """ Threshold the provided map

    Parameters
    ----------
    stat_img : Niimg-like object,
       statistical image (presumably in z scale)

    mask_img : Niimg-like object, optional,
        mask image

    threshold: float, optional
        cluster forming threshold (either a p-value or z-scale value)

    height_control: string, optional
        false positive control meaning of cluster forming
        threshold: 'fpr'|'fdr'|'bonferroni'|'none'

    cluster_threshold : float, optional
        cluster size threshold

    Returns
    -------
    thresholded_map : Nifti1Image,
        the stat_map theresholded at the prescribed voxel- and cluster-level
        
    threshold: float,
        the voxel-level threshold used actually
    """
    # Masking
    if mask_img is None:
        masker = NiftiMasker(mask_strategy='background').fit(stat_img)
    else:
        masker = NiftiMasker(mask_img=mask_img).fit()
    stats = np.ravel(masker.transform(stat_img))
    n_voxels = np.size(stats)

    # Thresholding
    if height_control == 'fpr':
        z_th = norm.isf(threshold)
    elif height_control == 'fdr':
        z_th = fdr_threshold(stats, threshold)
    elif height_control == 'bonferroni':
        z_th = norm.isf(threshold / n_voxels)
    else:  # Brute-force thresholding
        z_th = threshold
    stats *= (stats > z_th)

    # embed it back to 3D grid
    stat_map = masker.inverse_transform(stats).get_data()

    # Extract connected components above threshold
    label_map, n_labels = label(stat_map > z_th)
    labels = label_map[masker.mask_img_.get_data() > 0]

    for label_ in range(1, n_labels + 1):
        if np.sum(labels == label_) < cluster_threshold:
            stats[labels == label_] = 0

    return masker.inverse_transform(stats), z_th
开发者ID:coolspiderghy,项目名称:nistats,代码行数:63,代码来源:thresholding.py

示例7: NiftiMasker

# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import transform [as 别名]
target_header = masknii.get_header()
target_shape = masknii.shape
nifti_masker = NiftiMasker(mask_img=masknii, smoothing_fwhm=False,
                           standardize=False)
nifti_masker.fit()
nifti_masker.mask_img_.to_filename("debug_mask.nii")

for compr_name in ['PCA', 'FactorAnalysis',
                   'FastICA', 'SparsePCA']:
    for sample in ['AT', 'HT']:

        FS_nii, labels, subs = joblib.load('preload_2nd_' + sample)

        print('%s in %s' % (compr_name, sample))

        FS = nifti_masker.transform(FS_nii)

        compressor = joblib.load('preload_compr_HT%s40' % compr_name)

        FS_loadings = compressor.transform(FS)
        from scipy.stats import f_oneway

        for group, group_name in zip([labels, subs],
                                     ['tasks', 'subjects']):
            print(group_name)
            array_form = [FS_loadings[group_tag == group].ravel() for group_tag in np.unique(group)]

            fvalue, pvalue = f_oneway(*array_form)
            print('F-Value: %.4f' % fvalue)
            print('P-Value: %.16f' % pvalue)
开发者ID:banilo,项目名称:taskrest2016,代码行数:32,代码来源:compr2task_subjVStask.py

示例8: enumerate

# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import transform [as 别名]
    'MATH-STORY', 'STORY-MATH',
    'T-AVG', 'F-H', 'H-F',
    'MATCH-REL', 'REL-MATCH',

    'BODY-AVG', 'FACE-AVG', 'PLACE-AVG', 'TOOL-AVG',
    '2BK-0BK'
]
mean_supp = np.zeros((18, mask_nvox))
from nilearn.image import resample_img
for itask, task in enumerate(HCP_contrasts):
    cur_nii = op.join(means_path, 'mean_%s.nii.gz' % (task))
    print(cur_nii)
    res_nii = resample_img(cur_nii,
        target_affine=nifti_masker.mask_img_.get_affine(),
        target_shape=nifti_masker.mask_img_.shape)
    task_mean = nifti_masker.transform(res_nii)
    mean_supp[itask, :] = task_mean
mean_supp_z = zscore(mean_supp, axis=1)
    
# get classification weights
lr_supp = np.load(op.join(LR_DIR, 'V0comps.npy'))
lr_supp_z = zscore(lr_supp, axis=1)

# get LR/AE weights
WRITE_DIR = 'nips3mm_recovery'
lambs = [0.25, 0.5, 0.75, 1]
import re
from scipy.stats import pearsonr
for n_comp in [5]:
    corr_means_lr = np.zeros((len(lambs), 18))
    corr_means_lr_ae = np.zeros((len(lambs), 18))
开发者ID:Veterun,项目名称:nips2015,代码行数:33,代码来源:nips3mm_recovery.py

示例9: SecondLevelModel

# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import transform [as 别名]

#.........这里部分代码省略.........
            In case a list of FirstLevelModel was provided as
            second_level_input, we have to provide a contrast to apply to
            the first level models to get the corresponding list of images
            desired, that would be tested at the second level. In case a
            pandas DataFrame was provided as second_level_input this is the
            map name to extract from the pandas dataframe map_name column.
            It has to be a 't' contrast.

        second_level_stat_type: {'t', 'F'}, optional
            Type of the second level contrast

        output_type: str, optional
            Type of the output map. Can be 'z_score', 'stat', 'p_value',
            'effect_size' or 'effect_variance'

        Returns
        -------
        output_image: Nifti1Image
            The desired output image

        """
        if self.second_level_input_ is None:
            raise ValueError('The model has not been fit yet')

        # first_level_contrast check
        if isinstance(self.second_level_input_[0], FirstLevelModel):
            if first_level_contrast is None:
                raise ValueError('If second_level_input was a list of '
                                 'FirstLevelModel, then first_level_contrast '
                                 'is mandatory. It corresponds to the '
                                 'second_level_contrast argument of the '
                                 'compute_contrast method of FirstLevelModel')

        # check contrast definition
        if second_level_contrast is None:
            if self.design_matrix_.shape[1] == 1:
                second_level_contrast = np.ones([1])
            else:
                raise ValueError('No second-level contrast is specified.')
        if isinstance(second_level_contrast, np.ndarray):
            con_val = second_level_contrast
            if np.all(con_val == 0):
                raise ValueError('Contrast is null')
        else:
            design_info = DesignInfo(self.design_matrix_.columns.tolist())
            constraint = design_info.linear_constraint(second_level_contrast)
            con_val = constraint.coefs
        # check output type
        if isinstance(output_type, _basestring):
            if output_type not in ['z_score', 'stat', 'p_value', 'effect_size',
                                   'effect_variance']:
                raise ValueError(
                    'output_type must be one of "z_score", "stat"'
                    ', "p_value", "effect_size" or "effect_variance"')
        else:
            raise ValueError('output_type must be one of "z_score", "stat",'
                             ' "p_value", "effect_size" or "effect_variance"')

        # Get effect_maps appropriate for chosen contrast
        effect_maps = _infer_effect_maps(self.second_level_input_,
                                         first_level_contrast)
        # Check design matrix X and effect maps Y agree on number of rows
        if len(effect_maps) != self.design_matrix_.shape[0]:
            raise ValueError(
                'design_matrix does not match the number of maps considered. '
                '%i rows in design matrix do not match with %i maps' %
                (self.design_matrix_.shape[0], len(effect_maps)))

        # Fit an Ordinary Least Squares regression for parametric statistics
        Y = self.masker_.transform(effect_maps)
        if self.memory:
            mem_glm = self.memory.cache(run_glm, ignore=['n_jobs'])
        else:
            mem_glm = run_glm
        labels, results = mem_glm(Y, self.design_matrix_.values,
                                  n_jobs=self.n_jobs, noise_model='ols')
        # We save memory if inspecting model details is not necessary
        if self.minimize_memory:
            for key in results:
                results[key] = SimpleRegressionResults(results[key])
        self.labels_ = labels
        self.results_ = results

        # We compute contrast object
        if self.memory:
            mem_contrast = self.memory.cache(compute_contrast)
        else:
            mem_contrast = compute_contrast
        contrast = mem_contrast(self.labels_, self.results_, con_val,
                                second_level_stat_type)

        # We get desired output from contrast object
        estimate_ = getattr(contrast, output_type)()

        # Prepare the returned images
        output = self.masker_.inverse_transform(estimate_)
        contrast_name = str(con_val)
        output.header['descrip'] = (
            '%s of contrast %s' % (output_type, contrast_name))
        return output
开发者ID:alpinho,项目名称:nistats,代码行数:104,代码来源:second_level_model.py

示例10: FastICA

# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import transform [as 别名]
### Visualize the mask ########################################################
import matplotlib.pyplot as plt
import numpy as np
import nibabel
plt.figure()
plt.axis('off')
plt.imshow(np.rot90(nibabel.load(dataset.func[0]).get_data()[..., 20, 0]),
          interpolation='nearest', cmap=plt.cm.gray)
ma = np.ma.masked_equal(mask, False)
plt.imshow(np.rot90(ma[..., 20]), interpolation='nearest', cmap=plt.cm.autumn,
          alpha=0.5)
plt.title("Mask")

### Preprocess data ###########################################################
nifti_masker.fit(dataset.func[0])
fmri_masked = nifti_masker.transform(dataset.func[0])

### Run an algorithm ##########################################################
from sklearn.decomposition import FastICA
n_components = 20
ica = FastICA(n_components=n_components, random_state=42)
components_masked = ica.fit_transform(fmri_masked.T).T

### Reverse masking ###########################################################
components = nifti_masker.inverse_transform(components_masked)

### Show results ##############################################################
components_data = np.ma.masked_equal(components.get_data(), 0)
plt.figure()
plt.axis('off')
plt.imshow(np.rot90(nibabel.load(dataset.func[0]).get_data()[..., 20, 0]),
开发者ID:VirgileFritsch,项目名称:nilearn,代码行数:33,代码来源:plot_nifti_simple.py

示例11: map_threshold

# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import transform [as 别名]
def map_threshold(stat_img=None, mask_img=None, level=.001,
                  height_control='fpr', cluster_threshold=0):
    """ Compute the required threshold level and return the thresholded map

    Parameters
    ----------
    stat_img : Niimg-like object or None, optional
       statistical image (presumably in z scale)
       whenever height_control is 'fpr' or None,
       stat_img=None is acceptable.
       If it is 'fdr' or 'bonferroni', an error is raised if stat_img is None.

    mask_img : Niimg-like object, optional,
        mask image

    level: float, optional
        number controling the thresholding (either a p-value or z-scale value).
        Not to be confused with the z-scale threshold: level can be a p-values,
        e.g. "0.05" or another type of number depending on the
        height_control parameter. The z-scale threshold is actually returned by
        the function.

    height_control: string, or None optional
        false positive control meaning of cluster forming
        threshold: 'fpr'|'fdr'|'bonferroni'\|None

    cluster_threshold : float, optional
        cluster size threshold. In the returned thresholded map,
        sets of connected voxels (`clusters`) with size smaller
        than this number will be removed.

    Returns
    -------
    thresholded_map : Nifti1Image,
        the stat_map thresholded at the prescribed voxel- and cluster-level

    threshold: float,
        the voxel-level threshold used actually

    Note
    ----
    If the input image is not z-scaled (i.e. some z-transformed statistic)
    the computed threshold is not rigorous and likely meaningless
    """
    # Check that height_control is correctly specified
    if height_control not in ['fpr', 'fdr', 'bonferroni', None]:
        raise ValueError(
            "height control should be one of ['fpr', 'fdr', 'bonferroni', None]")

    # if height_control is 'fpr' or None, we don't need to look at the data
    # to compute the threhsold
    if height_control == 'fpr':
        threshold = norm.isf(level)
    elif height_control is None:
        threshold = level

    # In this case, and is stat_img is None, we return
    if stat_img is None:
        if height_control in ['fpr', None]:
            return None, threshold
        else:
            raise ValueError(
                'Map_threshold requires stat_img not to be None'
                'when the heigh_control procedure is bonferroni or fdr')
    
    # Masking
    if mask_img is None:
        masker = NiftiMasker(mask_strategy='background').fit(stat_img)
    else:
        masker = NiftiMasker(mask_img=mask_img).fit()
    stats = np.ravel(masker.transform(stat_img))
    n_voxels = np.size(stats)

    # Thresholding
    if height_control == 'fdr':
        threshold = fdr_threshold(stats, level)
    elif height_control == 'bonferroni':
        threshold = norm.isf(level / n_voxels)
    stats *= (stats > threshold)

    # embed it back to 3D grid
    stat_map = masker.inverse_transform(stats).get_data()

    # Extract connected components above threshold
    label_map, n_labels = label(stat_map > threshold)
    labels = label_map[masker.mask_img_.get_data() > 0]

    for label_ in range(1, n_labels + 1):
        if np.sum(labels == label_) < cluster_threshold:
            stats[labels == label_] = 0

    return masker.inverse_transform(stats), threshold
开发者ID:alpinho,项目名称:nistats,代码行数:94,代码来源:thresholding.py

示例12: Masker

# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import transform [as 别名]
class Masker(object):
    """
    Class that takes a binary mask.nii file and allows us to use it
    within a volumizer in order to reduce the dimensionality of our data in
    realtime.

    If we have other ROI masks (e.g. wm, csf), we can use them detrend the data
    by setting them as orthogonals.
    """

    def __init__(self, mask_img, center=None, radius=8):
        self.mask_img = mask_img
        self.masker = NiftiMasker(mask_img=mask_img)
        self.fit = False
        # set the mask center
        if center is None:
            self.center = self.find_center_of_mass(self.masker)

        else:
            self.center = center
        print("Center=", center)
        print("COM calc=", self.find_center_of_mass(self.masker))

        # the radius of the mask, used for determining what data to read.
        self.radius = radius
        self.orthogonals = []
        self.use_orthogonal = False
        self.ortho_fits = []

    def reduce_volume(self, volume, method='mean'):
        if not self.fit:
            self.masker.fit(volume)
        if method == 'mean':
            reduced = npm(self.masker.transform(volume['image']))
        return reduced

    def find_center_of_mass(self, niftimasker):
        """
        Find the center of mass of an image given a nifti masker object
        in the z plane. We can use this information to only select dicoms
        we need in a DicomFilter object.
        """

        com = measurements.center_of_mass(
            nibabel.load(niftimasker.mask_img).get_data())
        affine = nibabel.load(niftimasker.mask_img).affine
        offset = affine[0:3, 3]
        tcom = np.dot(affine[0:3, 0:3], com) + offset
        return tcom[2]

    def add_orthogonal(self, mask_img):
        # add another mask_img to our orthogonals with get_orthogonal
        self.use_orthogonal = True
        self.orthogonals.append(NiftiMasker(mask_img=mask_img))
        self.ortho_fits.append(False)

    def get_orthogonals(self, volume):
        """
        Return a list of ROI averages for a volume given a set of
        orthogonal masks
        """
        for i, fit in enumerate(self.ortho_fits):
            if not fit:
                self.orthogonals[i].fit(volume)

        return [npm(x.transform(volume['image'])) for x in self.orthogonals]
开发者ID:cni,项目名称:rtfmri,代码行数:68,代码来源:masker.py

示例13: NiftiMasker

# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import transform [as 别名]
    masker = NiftiMasker(
        mask_img=mask_img, memory='nilearn_cache', memory_level=1)
    masker = masker.fit()

    # Images may fail to be transformed, and are of different shapes,
    # so we need to transform one-by-one and keep track of failures.
    X = []
    is_usable = np.ones((len(images),), dtype=bool)

    for index, image_path in enumerate(images):
        # load image and remove nan and inf values.
        # applying smooth_img to an image with fwhm=None simply cleans up
        # non-finite values but otherwise doesn't modify the image.
        image = smooth_img(image_path, fwhm=None)
        try:
            X.append(masker.transform(image))
        except Exception as e:
            meta = nv_data['images_meta'][index]
            print("Failed to mask/reshape image: id: {0}; "
                  "name: '{1}'; collection: {2}; error: {3}".format(
                      meta.get('id'), meta.get('name'),
                      meta.get('collection_id'), e))
            is_usable[index] = False

# Now reshape list into 2D matrix, and remove failed images from terms
X = np.vstack(X)
term_weights = term_weights[is_usable, :]


######################################################################
# Run ICA and map components to terms
开发者ID:TheChymera,项目名称:nilearn,代码行数:33,代码来源:plot_ica_neurovault.py

示例14: FirstLevelGLM

# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import transform [as 别名]

#.........这里部分代码省略.........
                setattr(self.masker_, param_name, our_param)

        # make design_matrices a list of arrays
        if isinstance(design_matrices, (_basestring, pd.DataFrame)):
            design_matrices_ = [design_matrices]
        else:
            design_matrices_ = [X for X in design_matrices]

        design_matrices = []
        for design_matrix in design_matrices_:
            if isinstance(design_matrix, _basestring):
                loaded = pd.read_csv(design_matrix, index_col=0)
                design_matrices.append(loaded.values)
            elif isinstance(design_matrix, pd.DataFrame):
                design_matrices.append(design_matrix.values)
            else:
                raise TypeError(
                    'Design matrix can only be a pandas DataFrames or a'
                    'string. A %s was provided' % type(design_matrix))

        # make imgs a list of Nifti1Images
        if isinstance(imgs, (Nifti1Image, _basestring)):
            imgs = [imgs]

        if len(imgs) != len(design_matrices):
            raise ValueError(
                'len(imgs) %d does not match len(design_matrices) %d'
                % (len(imgs), len(design_matrices)))

        # Loop on imgs and design matrices
        self.labels_, self.results_ = [], []
        self.masker_.fit(imgs)
        for X, img in zip(design_matrices, imgs):
            Y = self.masker_.transform(img)
            if self.percent_signal_change:
                Y, _ = percent_mean_scaling(Y)
            labels_, results_ = session_glm(
                Y, X, noise_model=self.noise_model, bins=100)
            self.labels_.append(labels_)
            self.results_.append(results_)
        return self

    def transform(self, con_vals, contrast_type=None, contrast_name='',
                  output_z=True, output_stat=False, output_effects=False,
                  output_variance=False):
        """Generate different outputs corresponding to
        the contrasts provided e.g. z_map, t_map, effects and variance.
        In multi-session case, outputs the fixed effects map.

        Parameters
        ----------
        con_vals : array or list of arrays of shape (n_col) or (n_dim, n_col)
            where ``n_col`` is the number of columns of the design matrix,
            numerical definition of the contrast (one array per run)

        contrast_type : {'t', 'F'}, optional
            type of the contrast

        contrast_name : str, optional
            name of the contrast

        output_z : bool, optional
            Return or not the corresponding z-stat image

        output_stat : bool, optional
            Return or not the base (t/F) stat image
开发者ID:AlexandreAbraham,项目名称:pypreprocess,代码行数:70,代码来源:glm.py

示例15: StandardScaler

# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import transform [as 别名]
labels = np.int32(labels)

# contrasts are IN ORDER -> shuffle!
new_inds = np.arange(0, X_task.shape[0])
np.random.shuffle(new_inds)
X_task = X_task[new_inds]
labels = labels[new_inds]
# subs = subs[new_inds]

# rest
# X_rest = nifti_masker.transform('preload_HR20persub_10mm_ero2.nii')
# X_rest = nifti_masker.transform('dump_rs_spca_s12_tmp')
rs_spca_data = joblib.load('dump_rs_spca_s12_tmp')
rs_spca_niis = nib.Nifti1Image(rs_spca_data,
                               nifti_masker.mask_img_.get_affine())
X_rest = nifti_masker.transform(rs_spca_niis)
del rs_spca_niis
del rs_spca_data

X_task = StandardScaler().fit_transform(X_task)
X_rest = StandardScaler().fit_transform(X_rest)

# ARCHI task
AT_niis, AT_labels, AT_subs = joblib.load('preload_AT_3mm')
AT_X = nifti_masker.transform(AT_niis)
AT_X = StandardScaler().fit_transform(AT_X)
print('done :)')

##############################################################################
# define computation graph
##############################################################################
开发者ID:Veterun,项目名称:nips2015,代码行数:33,代码来源:nips3mm.py


注:本文中的nilearn.input_data.NiftiMasker.transform方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。