本文整理汇总了Python中nilearn.input_data.NiftiMasker.inverse_transform方法的典型用法代码示例。如果您正苦于以下问题:Python NiftiMasker.inverse_transform方法的具体用法?Python NiftiMasker.inverse_transform怎么用?Python NiftiMasker.inverse_transform使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类nilearn.input_data.NiftiMasker
的用法示例。
在下文中一共展示了NiftiMasker.inverse_transform方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: map_threshold
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import inverse_transform [as 别名]
def map_threshold(stat_img, mask_img, threshold, height_control='fpr',
cluster_threshold=0):
""" Threshold the provvided map
Parameters
----------
stat_img : Niimg-like object,
statistical image (presumably in z scale)
mask_img : Niimg-like object,
mask image
threshold: float,
cluster forming threshold (either a p-value or z-scale value)
height_control: string
false positive control meaning of cluster forming
threshold: 'fpr'|'fdr'|'bonferroni'|'none'
cluster_threshold : float, optional
cluster size threshold
Returns
-------
thresholded_map : Nifti1Image,
the stat_map theresholded at the prescribed voxel- and cluster-level
"""
# Masking
masker = NiftiMasker(mask_img=mask_img)
stats = np.ravel(masker.fit_transform(stat_img))
n_voxels = np.size(stats)
# Thresholding
if height_control == 'fpr':
z_th = norm.isf(threshold)
elif height_control == 'fdr':
z_th = fdr_threshold(stats, threshold)
elif height_control == 'bonferroni':
z_th = norm.isf(threshold / n_voxels)
else: # Brute-force thresholding
z_th = threshold
stats *= (stats > z_th)
stat_map = masker.inverse_transform(stats).get_data()
# Extract connected components above threshold
label_map, n_labels = label(stat_map > z_th)
labels = label_map[(masker.mask_img_.get_data() > 0)]
for label_ in range(1, n_labels + 1):
if np.sum(labels == label_) < cluster_threshold:
stats[labels == label_] = 0
return masker.inverse_transform(stats)
示例2: make_ttest
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import inverse_transform [as 别名]
def make_ttest(reg1, reg2):
masker = NiftiMasker(nib.load(MASK_FILE), standardize=False)
masker.fit()
subjects = [1, 2, 3, 5, 6, 7, 8, 9, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]
a = np.arctanh(join_all_subjects(reg1, subjects, masker))
b = np.arctanh(join_all_subjects(reg2, subjects, masker))
t, prob = ttest_rel(a, b)
tt = masker.inverse_transform(t)
pp = masker.inverse_transform(prob)
return tt, pp
示例3: _run_interface
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import inverse_transform [as 别名]
def _run_interface(self, runtime):
from nilearn.input_data import NiftiMasker, NiftiLabelsMasker
from nipype.utils.filemanip import split_filename
import nibabel as nib
import os
functional_filename = self.inputs.in_file
atlas_filename = self.inputs.atlas_filename
mask_filename = self.inputs.mask_filename
# Extracting the ROI signals
masker = NiftiLabelsMasker(labels_img=atlas_filename,
background_label = 0,
standardize=True,
detrend = True,
verbose = 1
)
time_series = masker.fit_transform(functional_filename)
# Removing the ROI signal from the time series
nifti_masker = NiftiMasker(mask_img=mask_filename)
masked_data = nifti_masker.fit_transform(functional_filename, confounds=time_series[...,0])
masked_img = nifti_masker.inverse_transform(masked_data)
# Saving the result to disk
outputs = self._outputs().get()
fname = self.inputs.in_file
_, base, _ = split_filename(fname)
nib.save(masked_img, os.path.abspath(base + '_regressed.nii.gz'))
return runtime
示例4: preprocess
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import inverse_transform [as 别名]
def preprocess(num, subj, subj_dir, subj_warp_dir, force_warp=False):
bold_path = 'BOLD/task001_run00%i/bold_dico_bold7Tp1_to_subjbold7Tp1.nii.gz' % (num+1)
bold_path = os.path.join(DATA_DIR, subj, bold_path)
template_path = os.path.join(DATA_DIR, 'templates', 'grpbold7Tp1', 'brain.nii.gz')
warp_path = os.path.join(DATA_DIR, subj, 'templates', 'bold7Tp1', 'in_grpbold7Tp1', 'subj2tmpl_warp.nii.gz')
output_path = os.path.join(subj_warp_dir, 'run00%i.nii.gz' % num)
if force_warp or not os.path.exists(output_path):
print 'Warping image #%i...' % num
subprocess.call(['fsl5.0-applywarp', '-i', bold_path, '-o', output_path, '-r', template_path, '-w', warp_path, '-d', 'float'])
else:
print 'Reusing cached warp image #%i' % num
print 'Loading image #%i...' % num
bold = load(output_path)
masker = NiftiMasker(load(MASK_FILE))
# masker = niftimasker(load(MASK_FILE), detrend=true, smoothing_fwhm=4.0,
# high_pass=0.01, t_r=2.0, standardize=true)
masker.fit()
print 'Removing confounds from image #%i...' % num
data = masker.transform(bold, confounds(num, subj))
print 'Detrending image #%i...' % num
filtered = np.float32(savgol_filter(data, 61, 5, axis=0))
img = masker.inverse_transform(data-filtered)
print 'Smoothing image #%i...' % num
img = image.smooth_img(img, 4.0)
print 'Saving image #%i...' % num
save(img, os.path.join(subj_dir, 'run00%i.nii.gz' % num))
print 'Finished with image #%i' % num
示例5: nilearn_denoise
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import inverse_transform [as 别名]
def nilearn_denoise(in_file, brain_mask, wm_mask, csf_mask,
motreg_file, outlier_file,
bandpass, tr ):
"""Clean time series using Nilearn high_variance_confounds to extract
CompCor regressors and NiftiMasker for regression of all nuissance regressors,
detrending, normalziation and bandpass filtering.
"""
import numpy as np
import nibabel as nb
import os
from nilearn.image import high_variance_confounds
from nilearn.input_data import NiftiMasker
from nipype.utils.filemanip import split_filename
# reload niftis to round affines so that nilearn doesn't complain
wm_nii=nb.Nifti1Image(nb.load(wm_mask).get_data(), np.around(nb.load(wm_mask).get_affine(), 2), nb.load(wm_mask).get_header())
csf_nii=nb.Nifti1Image(nb.load(csf_mask).get_data(), np.around(nb.load(csf_mask).get_affine(), 2), nb.load(csf_mask).get_header())
time_nii=nb.Nifti1Image(nb.load(in_file).get_data(),np.around(nb.load(in_file).get_affine(), 2), nb.load(in_file).get_header())
# infer shape of confound array
# not ideal
confound_len = nb.load(in_file).get_data().shape[3]
# create outlier regressors
outlier_regressor = np.empty((confound_len,1))
try:
outlier_val = np.genfromtxt(outlier_file)
except IOError:
outlier_val = np.empty((0))
for index in np.atleast_1d(outlier_val):
outlier_vector = np.zeros((confound_len, 1))
outlier_vector[index] = 1
outlier_regressor = np.hstack((outlier_regressor, outlier_vector))
outlier_regressor = outlier_regressor[:,1::]
# load motion regressors
motion_regressor=np.genfromtxt(motreg_file)
# extract high variance confounds in wm/csf masks from motion corrected data
wm_regressor=high_variance_confounds(time_nii, mask_img=wm_nii, detrend=True)
csf_regressor=high_variance_confounds(time_nii, mask_img=csf_nii, detrend=True)
# create Nifti Masker for denoising
denoiser=NiftiMasker(mask_img=brain_mask, standardize=True, detrend=True, high_pass=bandpass[1], low_pass=bandpass[0], t_r=tr)
# denoise and return denoise data to img
confounds=np.hstack((outlier_regressor,wm_regressor, csf_regressor, motion_regressor))
denoised_data=denoiser.fit_transform(in_file, confounds=confounds)
denoised_img=denoiser.inverse_transform(denoised_data)
# save
_, base, _ = split_filename(in_file)
img_fname = base + '_denoised.nii.gz'
nb.save(denoised_img, img_fname)
confound_fname = os.path.join(os.getcwd(), "all_confounds.txt")
np.savetxt(confound_fname, confounds, fmt="%.10f")
return os.path.abspath(img_fname), confound_fname
示例6: preprocess_varpar
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import inverse_transform [as 别名]
def preprocess_varpar(num, subj, subj_dir, **kwargs):
from nistats.design_matrix import make_design_matrix
from nistats.first_level_model import run_glm
bold_path = 'BOLD/task001_run00%i/bold_dico_bold7Tp1_to_subjbold7Tp1.nii.gz' % (num+1)
bold_path = os.path.join(DATA_DIR, subj, bold_path)
mask = os.path.join(DATA_DIR, subj, 'templates', 'bold7Tp1', 'brain_mask.nii.gz')
bold = load(bold_path)
masker = NiftiMasker(mask)
data = masker.fit_transform(bold)
dmat = make_design_matrix(np.arange(data.shape[0])*TR, hrf_model='fir', drift_order=5,
**kwargs)
labels, results = run_glm(data, dmat, noise_model='ols', verbose=1)
img = masker.inverse_transform(StandardScaler().fit_transform(results[0.0].resid))
# return StandardScaler().fit_transform(results[0.0].resid)
save(img, os.path.join(subj_dir, 'run00%i.nii.gz' % num))
示例7: residualize_imgs
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import inverse_transform [as 别名]
def residualize_imgs(in_file, mask_file, confounds_file):
'''
* takes 4d file, mask file & confounds as np.array
* regresses out confounds (only within mask)
* writes residualized nii
'''
from nilearn.input_data import NiftiMasker
import os
import numpy as np
confounds = np.loadtxt(confounds_file)
masker = NiftiMasker(mask_img=mask_file)
brain_data_2d = masker.fit_transform(in_file, confounds=confounds)
out_file = os.path.join(os.getcwd(), 'residualized_data.nii.gz')
out_img = masker.inverse_transform(brain_data_2d)
out_img.to_filename(out_file)
return out_file
示例8: test_dict_learning
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import inverse_transform [as 别名]
def test_dict_learning():
data, mask_img, components, rng = _make_canica_test_data(n_subjects=8)
mask = NiftiMasker(mask_img=mask_img).fit()
dict_init = mask.inverse_transform(components)
dict_learning = DictLearning(n_components=4, random_state=0,
dict_init=dict_init,
mask=mask_img,
smoothing_fwhm=0., alpha=1)
dict_learning_auto_init = DictLearning(n_components=4, random_state=0,
mask=mask_img,
smoothing_fwhm=0., n_epochs=10,
alpha=1)
maps = {}
for estimator in [dict_learning,
dict_learning_auto_init]:
estimator.fit(data)
maps[estimator] = estimator.masker_. \
inverse_transform(estimator.components_).get_data()
maps[estimator] = np.reshape(np.rollaxis(maps[estimator], 3, 0),
(4, 400))
for this_dict_learning in [dict_learning]:
these_maps = maps[this_dict_learning]
S = np.sqrt(np.sum(components ** 2, axis=1))
S[S == 0] = 1
components /= S[:, np.newaxis]
S = np.sqrt(np.sum(these_maps ** 2, axis=1))
S[S == 0] = 1
these_maps /= S[:, np.newaxis]
K = np.abs(components.dot(these_maps.T))
recovered_maps = np.sum(K > 0.9)
assert(recovered_maps >= 2)
# Smoke test n_epochs > 1
dict_learning = DictLearning(n_components=4, random_state=0,
dict_init=dict_init,
mask=mask_img,
smoothing_fwhm=0., n_epochs=2, alpha=1)
dict_learning.fit(data)
示例9: preprocess
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import inverse_transform [as 别名]
def preprocess(num, subj, subj_dir, subj_warp_dir, force_warp=False, group_mode=False):
bold_path = 'BOLD/task001_run00%i/bold_dico_bold7Tp1_to_subjbold7Tp1.nii.gz' % (num+1)
bold_path = os.path.join(DATA_DIR, subj, bold_path)
template_path = os.path.join(DATA_DIR, 'templates', 'grpbold7Tp1', 'brain.nii.gz')
warp_path = os.path.join(DATA_DIR, subj, 'templates', 'bold7Tp1', 'in_grpbold7Tp1', 'subj2tmpl_warp.nii.gz')
output_path = os.path.join(subj_warp_dir, 'run00%i.nii.gz' % num)
if group_mode:
if force_warp or not os.path.exists(output_path):
print 'Warping image #%i...' % num
subprocess.call(['fsl5.0-applywarp', '-i', bold_path, '-o', output_path, '-r', template_path, '-w', warp_path, '-d', 'float'])
else:
print 'Reusing cached warp image #%i' % num
mask = None
bold = output_path
else:
mask = os.path.join(DATA_DIR, subj, 'templates', 'bold7Tp1', 'brain_mask.nii.gz')
bold = bold_path
masker = NiftiMasker(mask, standardize=True, detrend=True)
img = masker.inverse_transform(masker.fit_transform(bold))
print 'Saving image #%i...' % num
save(img, os.path.join(subj_dir, 'run00%i.nii.gz' % num))
print 'Finished with image #%i' % num
示例10: FastICA
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import inverse_transform [as 别名]
### Apply ICA #################################################################
from sklearn.decomposition import FastICA
n_components = 20
ica = FastICA(n_components=n_components, random_state=42)
components_masked = ica.fit_transform(data_masked.T).T
# Normalize estimated components, for thresholding to make sense
components_masked -= components_masked.mean(axis=0)
components_masked /= components_masked.std(axis=0)
# Threshold
components_masked[components_masked < .8] = 0
# Now invert the masking operation, going back to a full 3D
# representation
component_img = masker.inverse_transform(components_masked)
### Visualize the results #####################################################
# Show some interesting components
import matplotlib.pyplot as plt
from nilearn import image
from nilearn.plotting import plot_stat_map
# Use the mean as a background
mean_img = image.mean_img(func_filename)
plot_stat_map(image.index_img(component_img, 5), mean_img)
plot_stat_map(image.index_img(component_img, 12), mean_img)
plt.show()
示例11: permuted_ols
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import inverse_transform [as 别名]
grouped_conditions_encoded[2 * s + 1] = conditions_encoded[
session_face_mask][0]
##############################################################################
# Perform massively univariate analysis with permuted OLS
#
# We use a two-sided t-test to compute p-values, but we keep trace of the
# effect sign to add it back at the end and thus observe the signed effect
from nilearn.mass_univariate import permuted_ols
neg_log_pvals, t_scores_original_data, _ = permuted_ols(
grouped_conditions_encoded, grouped_fmri_masked,
# + intercept as a covariate by default
n_perm=10000, two_sided_test=True,
n_jobs=1) # can be changed to use more CPUs
signed_neg_log_pvals = neg_log_pvals * np.sign(t_scores_original_data)
signed_neg_log_pvals_unmasked = nifti_masker.inverse_transform(
signed_neg_log_pvals)
##############################################################################
# scikit-learn F-scores for comparison
#
# F-test does not allow to observe the effect sign (pure two-sided test)
from nilearn._utils.fixes import f_regression
_, pvals_bonferroni = f_regression(
grouped_fmri_masked,
grouped_conditions_encoded) # f_regression implicitly adds intercept
pvals_bonferroni *= fmri_masked.shape[1]
pvals_bonferroni[np.isnan(pvals_bonferroni)] = 1
pvals_bonferroni[pvals_bonferroni > 1] = 1
neg_log_pvals_bonferroni = -np.log10(pvals_bonferroni)
neg_log_pvals_bonferroni_unmasked = nifti_masker.inverse_transform(
neg_log_pvals_bonferroni)
示例12: NiftiMasker
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import inverse_transform [as 别名]
n_subjects = 100 # more subjects requires more memory
### Load Oasis dataset ########################################################
dataset_files = datasets.fetch_oasis_vbm(n_subjects=n_subjects)
age = dataset_files.ext_vars['age'].astype(float)
### Preprocess data ###########################################################
nifti_masker = NiftiMasker(
standardize=False,
smoothing_fwhm=2,
memory='nilearn_cache') # cache options
# remove features with too low between-subject variance
gm_maps_masked = nifti_masker.fit_transform(dataset_files.gray_matter_maps)
gm_maps_masked[:, gm_maps_masked.var(0) < 0.01] = 0.
# final masking
new_images = nifti_masker.inverse_transform(gm_maps_masked)
gm_maps_masked = nifti_masker.fit_transform(new_images)
n_samples, n_features = gm_maps_masked.shape
print n_samples, "subjects, ", n_features, "features"
### Prediction with SVR #######################################################
print "ANOVA + SVR"
### Define the prediction function to be used.
# Here we use a Support Vector Classification, with a linear kernel
from sklearn.svm import SVR
svr = SVR(kernel='linear')
### Dimension reduction
from sklearn.feature_selection import SelectKBest, f_regression
# Here we use a classical univariate feature selection based on F-test,
示例13: WardAgglomeration
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import inverse_transform [as 别名]
mask = masker.mask_img_.get_data().astype(np.bool)
shape = mask.shape
connectivity = image.grid_to_graph(n_x=shape[0], n_y=shape[1],
n_z=shape[2], mask=mask)
# Computing the ward for the first time, this is long...
start = time.time()
ward = WardAgglomeration(n_clusters=1000, connectivity=connectivity,
memory='nilearn_cache')
ward.fit(pet_masked[0])
print "Ward agglomeration 1000 clusters: %.2fs" % (time.time() - start)
labels = ward.labels_ + 1
labels_img = masker.inverse_transform(labels)
first_plot = plot_roi(labels_img, pet_img[0], title="Ward parcellation",
display_mode='xz')
# labels_img is a Nifti1Image object, it can be saved to file with the
# following code:
labels_img.to_filename('parcellation.nii')
"""
##################################################################
# Compute the ward with more clusters, should be faster as we are using
# the caching mechanism
start = time.time()
ward = WardAgglomeration(n_clusters=2000, connectivity=connectivity,
memory='nilearn_cache')
ward.fit(fmri_masked)
示例14: print
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import inverse_transform [as 别名]
print("Actual number of subjects after quality check: %d" % n_samples)
### Mask data #################################################################
nifti_masker = NiftiMasker(
smoothing_fwhm=5,
memory='nilearn_cache', memory_level=1) # cache options
fmri_masked = nifti_masker.fit_transform(contrast_map_filenames)
### Anova (parametric F-scores) ###############################################
from nilearn._utils.fixes import f_regression
_, pvals_anova = f_regression(fmri_masked, tested_var, center=True)
pvals_anova *= fmri_masked.shape[1]
pvals_anova[np.isnan(pvals_anova)] = 1
pvals_anova[pvals_anova > 1] = 1
neg_log_pvals_anova = - np.log10(pvals_anova)
neg_log_pvals_anova_unmasked = nifti_masker.inverse_transform(
neg_log_pvals_anova)
### Perform massively univariate analysis with permuted OLS ###################
neg_log_pvals_permuted_ols, _, _ = permuted_ols(
tested_var, fmri_masked,
model_intercept=True,
n_perm=5000, # 5,000 for the sake of time. Idealy, this should be 10,000
n_jobs=1) # can be changed to use more CPUs
neg_log_pvals_permuted_ols_unmasked = nifti_masker.inverse_transform(
np.ravel(neg_log_pvals_permuted_ols))
### Visualization #############################################################
from nilearn.plotting import plot_stat_map
# Various plotting parameters
z_slice = 12 # plotted slice
开发者ID:andreas-koukorinis,项目名称:gaelvaroquaux.github.io,代码行数:34,代码来源:plot_localizer_mass_univariate_methods.py
示例15: extract_atlas_rad
# 需要导入模块: from nilearn.input_data import NiftiMasker [as 别名]
# 或者: from nilearn.input_data.NiftiMasker import inverse_transform [as 别名]
def extract_atlas_rad(db, rad_column, rad_dir, stat, pca_explained_var=None,
include_chim=False):
"""Replaces radiation presence by stat on ROIs from atlas.
Assumes brain mask and radiation nifti file is in rad_dir."""
atlas_mask_file = 'labels_to_rd.nii.gz'
xml_path = os.path.join(rad_dir, 'lpba40.label.xml')
labels, rois_name = read_roiLabel(xml_path)
# labels = labels[:2] # for debugging
# rois_name = rois_name[:2] # for debugging
labels.append(1000)
rois_name.append('rest_brain')
brain_mask_file = 'BrainMask_to_rd.nii.gz'
extracted_rad_stat = {} # Memoization of radiation statistic
# PCA estimation
for idx, row in db.iterrows():
if row[rad_column] == 1:
sub_id = row['patient']
if sub_id in extracted_rad_stat:
continue
else:
atlas_mask_path = os.path.join(rad_dir, sub_id,
atlas_mask_file)
atlas_mask_check = os.path.isfile(atlas_mask_path)
brain_mask_path = os.path.join(rad_dir, sub_id,
brain_mask_file)
brain_mask_check = os.path.isfile(brain_mask_path)
rad_path = os.path.join(rad_dir, sub_id, sub_id + '.nii')
rad_check = os.path.isfile(rad_path)
if atlas_mask_check and brain_mask_check and rad_check:
extracted_rad = []
brain_masker = NiftiMasker(brain_mask_path)
atlas_brain = brain_masker.fit_transform(atlas_mask_path)
atlas_brain = atlas_brain.astype(np.int16)
atlas_brain[atlas_brain == 0] = 1000
atlas_brain = brain_masker.inverse_transform(atlas_brain)
for idx, label in enumerate(labels):
print ('processing ROI ' + str(label) + ' ' +
rois_name[idx] + ' for subject ' + sub_id)
masker = NiftiMasker(get_roi_mask(atlas_brain,
label))
rad_stat = stat(masker.fit_transform(rad_path))
extracted_rad.append(rad_stat)
extracted_rad_stat[sub_id] = extracted_rad
rad_data = np.vstack([x for x in extracted_rad_stat.values()])
scaler = StandardScaler()
rad_data = scaler.fit_transform(rad_data)
if pca_explained_var is not None:
pca = PCA(pca_explained_var, whiten=True)
else:
pca = PCA(whiten=True)
pca.fit(rad_data)
components_name = ['component_{0:02d}'.format(x)
for x in range(1, pca.n_components_ + 1)]
# Modify db to include pca components
for c in components_name:
db[c] = ''
for idx, row in db.iterrows():
if row[rad_column] == 1:
sub_id = row['patient']
atlas_mask_path = os.path.join(rad_dir, sub_id,
atlas_mask_file)
atlas_mask_check = os.path.isfile(atlas_mask_path)
brain_mask_path = os.path.join(rad_dir, sub_id,
brain_mask_file)
brain_mask_check = os.path.isfile(brain_mask_path)
rad_path = os.path.join(rad_dir, sub_id, sub_id + '.nii')
rad_check = os.path.isfile(rad_path)
if atlas_mask_check and brain_mask_check and rad_check:
rois_v = np.array(extracted_rad_stat[sub_id]).reshape(1, -1)
components_value = pca.transform(scaler.transform(rois_v))
components_value = np.ravel(components_value)
for cidx, c in enumerate(components_name):
db.loc[idx, c] = components_value[cidx]
else:
db.loc[idx, rad_column] = None
elif not include_chim:
db.loc[idx, rad_column] = None
else:
for c in components_name:
db.loc[idx, c] = 0
db = db[db[rad_column].notnull()]
db = db.drop(rad_column, 1)
return db, rois_name, pca, components_name, rad_data