本文整理汇总了Python中nilearn.input_data.NiftiMasker.fit_transform方法的典型用法代码示例。如果您正苦于以下问题:Python NiftiMasker.fit_transform方法的具体用法?Python NiftiMasker.fit_transform怎么用?Python NiftiMasker.fit_transform使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类nilearn.input_data.NiftiMasker
的用法示例。
在下文中一共展示了NiftiMasker.fit_transform方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: similarity
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import fit_transform [as 别名]
def similarity(self, image, method='correlation'):
""" Calculate similarity of Brain_Data() instance with single Brain_Data or Nibabel image
Args:
self: Brain_Data instance of data to be applied
image: Brain_Data or Nibabel instance of weight map
Returns:
pexp: Outputs a vector of pattern expression values
"""
if not isinstance(image, Brain_Data):
if isinstance(image, nib.Nifti1Image):
image = Brain_Data(image)
else:
raise ValueError("Image is not a Brain_Data or nibabel instance")
dim = image.shape()
# Check to make sure masks are the same for each dataset and if not create a union mask
# This might be handy code for a new Brain_Data method
if np.sum(self.nifti_masker.mask_img.get_data()==1)!=np.sum(image.nifti_masker.mask_img.get_data()==1):
new_mask = intersect_masks([self.nifti_masker.mask_img, image.nifti_masker.mask_img], threshold=1, connected=False)
new_nifti_masker = NiftiMasker(mask_img=new_mask)
data2 = new_nifti_masker.fit_transform(self.to_nifti())
image2 = new_nifti_masker.fit_transform(image.to_nifti())
else:
data2 = self.data
image2 = image.data
# Calculate pattern expression
if method is 'dot_product':
if len(image2.shape) > 1:
if image2.shape[0]>1:
pexp = []
for i in range(image2.shape[0]):
pexp.append(np.dot(data2, image2[i,:]))
pexp = np.array(pexp)
else:
pexp = np.dot(data2, image2)
else:
pexp = np.dot(data2, image2)
elif method is 'correlation':
if len(image2.shape) > 1:
if image2.shape[0]>1:
pexp = []
for i in range(image2.shape[0]):
pexp.append(pearson(image2[i,:], data2))
pexp = np.array(pexp)
else:
pexp = pearson(image2, data2)
else:
pexp = pearson(image2, data2)
return pexp
示例2: ts
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import fit_transform [as 别名]
def ts(img_path,
mask=False,
substitution={},
):
"""
Return the mean and median of a Region of Interest (ROI) time course.
Parameters
----------
img_path : str
Path to NIfTI file from which the ROI is to be extracted.
maks : nilearn.NiftiMasker or str, optional
Nilearn `nifti1.Nifti1Image` object to use for masking the desired ROI, or a string specifying the path of a maskfile.
substitution : dict, optional
A dictionary with keys which include 'subject' and 'session'.
"""
if substitution:
img_path = img_path.format(**substitution)
img_path = path.abspath(path.expanduser(img_path))
img = nib.load(img_path)
try:
masked_data = mask.fit_transform(img)
except:
mask = path.abspath(path.expanduser(mask))
mask = NiftiMasker(mask_img=mask)
masked_data = mask.fit_transform(img).T
ts_means = np.mean(masked_data, axis=0)
ts_medians = np.mean(masked_data, axis=0)
return ts_means, ts_medians
示例3: apply_mask
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import fit_transform [as 别名]
def apply_mask(self, mask):
""" Mask Brain_Data instance
Args:
mask: mask (Brain_Data or nifti object)
"""
if isinstance(mask,Brain_Data):
mask = mask.to_nifti() # convert to nibabel
if not isinstance(mask, nib.Nifti1Image):
if type(mask) is str:
if os.path.isfile(mask):
mask = nib.load(mask)
# Check if mask need to be resampled into Brain_Data mask space
if not ((self.mask.get_affine()==mask.get_affine()).all()) & (self.mask.shape[0:3]==mask.shape[0:3]):
mask = resample_img(mask,target_affine=self.mask.get_affine(),target_shape=self.mask.shape)
else:
raise ValueError("Mask is not a nibabel instance, Brain_Data instance, or a valid file name.")
masked = deepcopy(self)
nifti_masker = NiftiMasker(mask_img=mask)
masked.data = nifti_masker.fit_transform(self.to_nifti())
if len(self.data.shape) > 2:
masked.data = masked.data.squeeze()
masked.nifti_masker = nifti_masker
return masked
示例4: MaskFlatten
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import fit_transform [as 别名]
def MaskFlatten(concat_dict, mask, iter_n):
'''Mask image data, convert to 2D feature matrix'''
nifti_masker = NiftiMasker(mask_img=mask)
masked_dict = {}
for i in range(iter_n):
masked_dict[i] = nifti_masker.fit_transform(concat_dict[i])
return masked_dict
示例5: nilearn_denoise
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import fit_transform [as 别名]
def nilearn_denoise(in_file, brain_mask, wm_mask, csf_mask,
motreg_file, outlier_file,
bandpass, tr ):
"""Clean time series using Nilearn high_variance_confounds to extract
CompCor regressors and NiftiMasker for regression of all nuissance regressors,
detrending, normalziation and bandpass filtering.
"""
import numpy as np
import nibabel as nb
import os
from nilearn.image import high_variance_confounds
from nilearn.input_data import NiftiMasker
from nipype.utils.filemanip import split_filename
# reload niftis to round affines so that nilearn doesn't complain
wm_nii=nb.Nifti1Image(nb.load(wm_mask).get_data(), np.around(nb.load(wm_mask).get_affine(), 2), nb.load(wm_mask).get_header())
csf_nii=nb.Nifti1Image(nb.load(csf_mask).get_data(), np.around(nb.load(csf_mask).get_affine(), 2), nb.load(csf_mask).get_header())
time_nii=nb.Nifti1Image(nb.load(in_file).get_data(),np.around(nb.load(in_file).get_affine(), 2), nb.load(in_file).get_header())
# infer shape of confound array
# not ideal
confound_len = nb.load(in_file).get_data().shape[3]
# create outlier regressors
outlier_regressor = np.empty((confound_len,1))
try:
outlier_val = np.genfromtxt(outlier_file)
except IOError:
outlier_val = np.empty((0))
for index in np.atleast_1d(outlier_val):
outlier_vector = np.zeros((confound_len, 1))
outlier_vector[index] = 1
outlier_regressor = np.hstack((outlier_regressor, outlier_vector))
outlier_regressor = outlier_regressor[:,1::]
# load motion regressors
motion_regressor=np.genfromtxt(motreg_file)
# extract high variance confounds in wm/csf masks from motion corrected data
wm_regressor=high_variance_confounds(time_nii, mask_img=wm_nii, detrend=True)
csf_regressor=high_variance_confounds(time_nii, mask_img=csf_nii, detrend=True)
# create Nifti Masker for denoising
denoiser=NiftiMasker(mask_img=brain_mask, standardize=True, detrend=True, high_pass=bandpass[1], low_pass=bandpass[0], t_r=tr)
# denoise and return denoise data to img
confounds=np.hstack((outlier_regressor,wm_regressor, csf_regressor, motion_regressor))
denoised_data=denoiser.fit_transform(in_file, confounds=confounds)
denoised_img=denoiser.inverse_transform(denoised_data)
# save
_, base, _ = split_filename(in_file)
img_fname = base + '_denoised.nii.gz'
nb.save(denoised_img, img_fname)
confound_fname = os.path.join(os.getcwd(), "all_confounds.txt")
np.savetxt(confound_fname, confounds, fmt="%.10f")
return os.path.abspath(img_fname), confound_fname
示例6: _run_interface
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import fit_transform [as 别名]
def _run_interface(self, runtime):
from nilearn.input_data import NiftiMasker, NiftiLabelsMasker
from nipype.utils.filemanip import split_filename
import nibabel as nib
import os
functional_filename = self.inputs.in_file
atlas_filename = self.inputs.atlas_filename
mask_filename = self.inputs.mask_filename
# Extracting the ROI signals
masker = NiftiLabelsMasker(labels_img=atlas_filename,
background_label = 0,
standardize=True,
detrend = True,
verbose = 1
)
time_series = masker.fit_transform(functional_filename)
# Removing the ROI signal from the time series
nifti_masker = NiftiMasker(mask_img=mask_filename)
masked_data = nifti_masker.fit_transform(functional_filename, confounds=time_series[...,0])
masked_img = nifti_masker.inverse_transform(masked_data)
# Saving the result to disk
outputs = self._outputs().get()
fname = self.inputs.in_file
_, base, _ = split_filename(fname)
nib.save(masked_img, os.path.abspath(base + '_regressed.nii.gz'))
return runtime
示例7: signal_extractor
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import fit_transform [as 别名]
class signal_extractor():
def __init__(self, dataset = None):
self.dataset = dataset
if dataset.has_key('mask'):
self.masker = NiftiMasker(mask_img = self.dataset.mask,
low_pass = .1,
high_pass = .01,
smoothing_fwhm =6.,
t_r = 1.05,
detrend = True,
standardize = False,
memory_level = 0,
verbose=5)
else:
self.masker = NiftiMasker(
low_pass = .1,
high_pass = .01,
smoothing_fwhm =6.,
t_r = 1.05,
detrend = True,
standardize = False,
memory_level = 0,
verbose=5)
def extract(self):
for idx, func in enumerate([self.dataset.func1]):
#add mask, smoothing, filter and detrending
for i in range(len(self.dataset.subjects)):
tic = time.clock()
#extract signal to x
x = self.masker.fit_transform(func[i])
print "loading time : "+ str(time.clock() - tic)
yield x, self.masker
示例8: extract_brain_rad
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import fit_transform [as 别名]
def extract_brain_rad(db, rad_column, rad_dir, stat, include_chim=False):
"""Replaces radiation presence by stat on whole brain ROI.
Assumes brain mask and radiation nifti file is in rad_dir."""
brain_mask_file = 'BrainMask_to_rd.nii.gz'
extracted_rad_stat = {} # Memoization of radiation statistic
for idx, row in db.iterrows():
if row[rad_column] == 1:
sub_id = row['patient']
if sub_id in extracted_rad_stat:
db.loc[idx, rad_column] = extracted_rad_stat[sub_id]
else:
mask_path = os.path.join(rad_dir, sub_id, brain_mask_file)
mask_check = os.path.isfile(mask_path)
rad_path = os.path.join(rad_dir, sub_id, sub_id + '.nii')
rad_check = os.path.isfile(rad_path)
if mask_check and rad_check:
masker = NiftiMasker(mask_path)
rad_stat = stat(masker.fit_transform(rad_path))
extracted_rad_stat[sub_id] = rad_stat
db.loc[idx, rad_column] = rad_stat
else:
db.loc[idx, rad_column] = None
elif not include_chim:
db.loc[idx, rad_column] = None
db = db[db[rad_column].notnull()]
return db
示例9: transform
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import fit_transform [as 别名]
def transform(self, imgs, confounds=None):
"""
Parameters
----------
imgs: list of Niimg-like objects
"""
self._check_fitted()
if self.smoothing_fwhm:
imgs = smooth_img(imgs, self.smoothing_fwhm)
imgs = [_utils.check_niimg_3d(img) for img in imgs]
for i, roi in enumerate(self.mask_img_):
masker = NiftiMasker(mask_img=roi)
x = masker.fit_transform(imgs)
if self.extract_funcs is not None:
x = np.array([FDICT[f][0](x, **FDICT[f][1]) for f in self.extract_funcs])
if i == 0:
X = x
else:
X = np.concatenate((X, x), axis=0)
return X.swapaxes(0, 1)
示例10: multivariate_similarity
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import fit_transform [as 别名]
def multivariate_similarity(self, images, method='ols'):
""" Predict spatial distribution of Brain_Data() instance from linear combination of other Brain_Data() instances or Nibabel images
Args:
self: Brain_Data instance of data to be applied
images: Brain_Data instance of weight map
Returns:
out: dictionary of regression statistics in Brain_Data instances {'beta','t','p','df','residual'}
"""
## Notes: Should add ridge, and lasso, elastic net options options
if len(self.shape()) > 1:
raise ValueError("This method can only decompose a single brain image.")
if not isinstance(images, Brain_Data):
raise ValueError("Images are not a Brain_Data instance")
dim = images.shape()
# Check to make sure masks are the same for each dataset and if not create a union mask
# This might be handy code for a new Brain_Data method
if np.sum(self.nifti_masker.mask_img.get_data()==1)!=np.sum(images.nifti_masker.mask_img.get_data()==1):
new_mask = intersect_masks([self.nifti_masker.mask_img, images.nifti_masker.mask_img], threshold=1, connected=False)
new_nifti_masker = NiftiMasker(mask_img=new_mask)
data2 = new_nifti_masker.fit_transform(self.to_nifti())
image2 = new_nifti_masker.fit_transform(images.to_nifti())
else:
data2 = self.data
image2 = images.data
# Add intercept and transpose
image2 = np.vstack((np.ones(image2.shape[1]),image2)).T
# Calculate pattern expression
if method is 'ols':
b = np.dot(np.linalg.pinv(image2), data2)
res = data2 - np.dot(image2,b)
sigma = np.std(res,axis=0)
stderr = np.dot(np.matrix(np.diagonal(np.linalg.inv(np.dot(image2.T,image2)))**.5).T,np.matrix(sigma))
t_out = b /stderr
df = image2.shape[0]-image2.shape[1]
p = 2*(1-t.cdf(np.abs(t_out),df))
return {'beta':b, 't':t_out, 'p':p, 'df':df, 'sigma':sigma, 'residual':res}
示例11: apply_mask
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import fit_transform [as 别名]
def apply_mask(data=None, weight_map=None, mask=None, method='dot_product', save_output=False, output_dir='.'):
""" Apply Nifti weight map to Nifti Images.
Args:
data: nibabel instance of data to be applied
weight_map: nibabel instance of weight map
mask: binary nibabel mask
method: type of pattern expression (e.g,. 'dot_product','correlation')
save_output: Boolean indicating whether or not to save output to csv file.
output_dir: Directory to use for writing all outputs
**kwargs: Additional parameters to pass
Returns:
pexp: Outputs a vector of pattern expression values
"""
if mask is not None:
if type(mask) is not nib.nifti1.Nifti1Image:
raise ValueError("Mask is not a nibabel instance")
else:
mask = nib.load(os.path.join(resource_dir,'MNI152_T1_2mm_brain_mask_dil.nii.gz'))
if type(data) is not nib.nifti1.Nifti1Image:
raise ValueError("Data is not a nibabel instance")
nifti_masker = NiftiMasker(mask_img=mask)
data_masked = nifti_masker.fit_transform(data)
if type(weight_map) is not nib.nifti1.Nifti1Image:
raise ValueError("Weight_map is not a nibabel instance")
weight_map_masked = nifti_masker.fit_transform(weight_map)
# Calculate pattern expression
if method is 'dot_product':
pexp = np.dot(data_masked,np.transpose(weight_map_masked)).squeeze()
elif method is 'correlation':
pexp = pearson(data_masked,weight_map_masked)
if save_output:
np.savetxt(os.path.join(output_dir,"Pattern_Expression_" + method + ".csv"), pexp, delimiter=",")
return pexp
示例12: significant_signal
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import fit_transform [as 别名]
def significant_signal(data_path,
substitution={},
mask_path='',
exclude_ones=False,
):
"""Return the mean and median inverse logarithm of a p-value map.
Parameters
----------
data_path : str
Path to a p-value map in NIfTI format.
mask_path : str
Path to a region of interest map in NIfTI format.
THIS IS ALMOST ALWAYS REQUIRED, as NIfTI statistic images populate the whole 3D circumscribed space around your structure of interest,
and commonly assign null values to the background.
In an inverse logarithm computation, null corresponds to infinity, which can considerably bias the evaluation.
substitution : dict
Dictionary whose keys are format identifiers present in `data_path` and whose values are strings.
Returns
-------
mean : float
median : float
"""
if substitution:
data_path = data_path.format(**substitution)
data_path = path.abspath(path.expanduser(data_path))
try:
img = nib.load(data_path)
except FileNotFoundError:
return float('NaN'), float('NaN')
if mask_path:
mask_path = path.abspath(path.expanduser(mask_path))
masker = NiftiMasker(mask_img=mask_path)
masked_data = masker.fit_transform(img).T
data = masked_data[~np.isnan(masked_data)]
else:
data = img.get_data()
data = data[~np.isnan(data)]
# We interpret zero as the lowest p-value, and conservatively estimate it to be equal to just under half of the smallest value in the defined range
nonzero = data[np.nonzero(data)]
data_min = np.min(nonzero)
data_min = data_min*0.49
data[data == 0] = data_min
if exclude_ones:
data = data[data!=1]
data = -np.log10(data)
# We use np.ma.median() because life is complicated:
# https://github.com/numpy/numpy/issues/7330
median = np.ma.median(data, axis=None)
mean = np.mean(data)
return mean, median
示例13: _vectorize_nii
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import fit_transform [as 别名]
def _vectorize_nii(in_data_file, mask_file, parcellation_path, fwhm):
from nilearn.input_data import NiftiMasker, NiftiLabelsMasker
import nibabel as nib
if parcellation_path is None:
masker = NiftiMasker(mask_img=mask_file, smoothing_fwhm=fwhm)
else:
masker = NiftiLabelsMasker(labels_img=parcellation_path, smoothing_fwhm=fwhm)
vectorized_data = masker.fit_transform(in_data_file)
return vectorized_data, masker
示例14: map_threshold
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import fit_transform [as 别名]
def map_threshold(stat_img, mask_img, threshold, height_control='fpr',
cluster_threshold=0):
""" Threshold the provvided map
Parameters
----------
stat_img : Niimg-like object,
statistical image (presumably in z scale)
mask_img : Niimg-like object,
mask image
threshold: float,
cluster forming threshold (either a p-value or z-scale value)
height_control: string
false positive control meaning of cluster forming
threshold: 'fpr'|'fdr'|'bonferroni'|'none'
cluster_threshold : float, optional
cluster size threshold
Returns
-------
thresholded_map : Nifti1Image,
the stat_map theresholded at the prescribed voxel- and cluster-level
"""
# Masking
masker = NiftiMasker(mask_img=mask_img)
stats = np.ravel(masker.fit_transform(stat_img))
n_voxels = np.size(stats)
# Thresholding
if height_control == 'fpr':
z_th = norm.isf(threshold)
elif height_control == 'fdr':
z_th = fdr_threshold(stats, threshold)
elif height_control == 'bonferroni':
z_th = norm.isf(threshold / n_voxels)
else: # Brute-force thresholding
z_th = threshold
stats *= (stats > z_th)
stat_map = masker.inverse_transform(stats).get_data()
# Extract connected components above threshold
label_map, n_labels = label(stat_map > z_th)
labels = label_map[(masker.mask_img_.get_data() > 0)]
for label_ in range(1, n_labels + 1):
if np.sum(labels == label_) < cluster_threshold:
stats[labels == label_] = 0
return masker.inverse_transform(stats)
示例15: preprocess_varpar
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import fit_transform [as 别名]
def preprocess_varpar(num, subj, subj_dir, **kwargs):
from nistats.design_matrix import make_design_matrix
from nistats.first_level_model import run_glm
bold_path = 'BOLD/task001_run00%i/bold_dico_bold7Tp1_to_subjbold7Tp1.nii.gz' % (num+1)
bold_path = os.path.join(DATA_DIR, subj, bold_path)
mask = os.path.join(DATA_DIR, subj, 'templates', 'bold7Tp1', 'brain_mask.nii.gz')
bold = load(bold_path)
masker = NiftiMasker(mask)
data = masker.fit_transform(bold)
dmat = make_design_matrix(np.arange(data.shape[0])*TR, hrf_model='fir', drift_order=5,
**kwargs)
labels, results = run_glm(data, dmat, noise_model='ols', verbose=1)
img = masker.inverse_transform(StandardScaler().fit_transform(results[0.0].resid))
# return StandardScaler().fit_transform(results[0.0].resid)
save(img, os.path.join(subj_dir, 'run00%i.nii.gz' % num))