本文整理汇总了Python中nipy.load_image函数的典型用法代码示例。如果您正苦于以下问题:Python load_image函数的具体用法?Python load_image怎么用?Python load_image使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了load_image函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: read_niftis
def read_niftis(file_list):
'''Finds nifti files in a directory.
Args:
source (str): The source directory for niftis
Returns:
list: List of file paths.
'''
data0 = load_image(file_list[0]).get_data()
x, y, z, t = data0.shape
print 'Found %d files with data shape is %r' % (len(file_list), data0.shape)
data = []
new_file_list = []
for i, f in enumerate(file_list):
print '%d) Loading subject from file: %s' % (i, f)
nifti = load_image(f)
subject_data = nifti.get_data()
if subject_data.shape != (x, y, z, t):
raise ValueError('Shape mismatch')
subject_data -= subject_data.mean()
subject_data /= subject_data.std()
data.append(subject_data)
new_file_list.append(f)
data = np.array(data).transpose(0, 4, 1, 2, 3).astype('float32')
return data, new_file_list
示例2: read_niftis
def read_niftis(file_list):
'''Reads niftis from a file list into numpy array.
Args:
file_list (int): List of file paths.
Returns:
numpy.array: Array of data from nifti file list.
list: New file list with bad files filtered.
'''
data0 = load_image(file_list[0]).get_data()
x, y, z = data0.shape
print 'Found %d files with data shape is %r' % (len(file_list), data0.shape)
n = len(file_list)
data = []
new_file_list = []
for i, f in enumerate(file_list):
print '%d) Loading subject from file: %s' % (i, f)
nifti = load_image(f)
subject_data = nifti.get_data()
if subject_data.shape != (x, y, z):
raise ValueError('Shape mismatch')
data.append(subject_data)
new_file_list.append(f)
data = np.array(data).astype('float32')
return data, new_file_list
示例3: check_diag_results
def check_diag_results(results, img_shape,
time_axis, slice_axis, ncomps,
out_path, froot, ext='.nii.gz'):
S = img_shape[slice_axis]
T = img_shape[time_axis]
pca_shape = list(img_shape)
pca_shape[time_axis] = ncomps
assert_equal(results['pca'].shape, tuple(pca_shape))
assert_equal(results['pca_res']['basis_projections'].shape,
tuple(pca_shape))
# Roll pca axis last to test shape of output image
ax_order = list(range(4))
ax_order.remove(time_axis)
ax_order.append(time_axis)
rolled_shape = tuple(pca_shape[i] for i in ax_order)
pca_img = load_image(pjoin(out_path, 'pca_' + froot + ext))
assert_equal(pca_img.shape, rolled_shape)
for prefix in ('mean', 'min', 'max', 'std'):
fname = pjoin(out_path, prefix + '_' + froot + ext)
img = load_image(fname)
assert_equal(img.shape, rolled_shape[:-1])
vars = np.load(pjoin(out_path, 'vectors_components_' + froot + '.npz'))
assert_equal(set(vars),
set(['basis_vectors', 'pcnt_var', 'volume_means',
'slice_mean_diff2']))
assert_equal(vars['volume_means'].shape, (T,))
assert_equal(vars['basis_vectors'].shape, (T, T-1))
assert_equal(vars['slice_mean_diff2'].shape, (T-1, S))
示例4: read_niftis
def read_niftis(file_lists):
"""
Read niftis.
Parameters
----------
file_lists: list of list of paths.
Each list of file paths is a unique class.
Returns
-------
data, labels: tuple of array-like and list
The data and corresponding labels
"""
data0 = load_image(file_lists[0][0]).get_data()
if len(data0.shape) == 3:
x, y, z = data0.shape
t = 1
elif len(data0.shape) == 4:
x, y, z, t = data0.shape
else:
raise ValueError("Cannot parse data with dimensions %r" % data0.shape)
dt = (sum(len(fl) for fl in file_lists)) * t
data = np.zeros((dt, x, y, z))
labels = [[i] * (len(fl) * t) for i, fl in enumerate(file_lists)]
labels = [item for sublist in labels for item in sublist]
for i, fl in enumerate(file_lists):
assert len([j for j in labels if j == i]) == len(fl) * t
flattened_list = [item for sublist in file_lists for item in sublist]
for i, f in enumerate(flattened_list):
logger.info("Loading subject from file: %s%s" % (f, '' * 30))
nifti = load_image(f)
subject_data = nifti.get_data()
if len(subject_data.shape) == 3:
data[i] = subject_data
elif len(subject_data.shape) == 4:
data[i * t: (i + 1) * t] = subject_data.transpose((3, 0, 1, 2))
else:
raise ValueError("Cannot parse subject data with dimensions %r"
% subject_data.shape)
logger.info("\rLoading subject from file: %s\n" % ('DONE' + " "*30))
if data.shape[0] != len(labels):
raise ValueError("Data and labels have different number of samples.")
base_file = flattened_list[0]
# Use nibabel in case we need to convert from 4d to 3d
base = nib.load(base_file)
if len(base.shape) == 4:
base = nib.four_to_three(base)[0]
return data, labels, base
示例5: main
def main(nifti_file, anat_file, roi_file, out_file, thr=2):
'''
Main function for running as a script.
'''
iscale = 2
nifti = load_image(nifti_file)
anat = load_image(anat_file)
roi_dict = pickle.load(open(roi_file, 'rb'))
montage(nifti, anat, roi_dict, out_file=out_file)
示例6: resample_image
def resample_image(source_file, target_file, outdir, w2wmap=None, order=3,
cval=0, verbose=0):
""" Resample the source image to match the target image using Nipy.
Parameters
----------
source_file: str (mandatory)
the image to resample.
target_file: str (mandatory)
the reference image.
outdir: str (mandatory)
the folder where the resampled image will be saved.
w2wmap: array (4, 4) or callable
physical to physical transformation.
verbose: int (optional, default 0)
the verbosity level.
Returns
-------
resampled_file: str
the resampled image.
"""
# Get target image information
target_image = nipy.load_image(target_file)
onto_shape = target_image.shape[:3]
onto_aff = xyz_affine(target_image.affine, xyz=[0, 1, 2], verbose=verbose)
# Define index and physical coordinate systems
arraycoo = "ijklmnopq"[:len(onto_shape)]
spacecoo = "xyztrsuvw"[:len(onto_shape)]
if verbose > 0:
print("\narraycoo: ", arraycoo, "\nspacecoo: ", spacecoo,
"\nonto_aff\n", onto_aff)
dmaker = CoordSysMaker(arraycoo, 'generic-array')
rmaker = CoordSysMaker(spacecoo, 'generic-scanner')
cm_maker = cmap.CoordMapMaker(dmaker, rmaker)
cmap_out = cm_maker.make_affine(onto_aff)
if verbose > 0:
print("cmap_out:\n", cmap_out)
# Define the default physical to physical transformation
if w2wmap is None:
w2wmap = np.eye(onto_aff.shape[0])
if verbose > 0:
print("w2wmap:\n", w2wmap)
# Resample
source_image = nipy.load_image(source_file)
resampled_image = resample(
source_image, cmap_out, w2wmap, onto_shape, order=order, cval=cval)
# Save the resampled image
resampled_file = os.path.join(
outdir, "resampled_{0}".format(os.path.basename(source_file)))
nipy.save_image(resampled_image, resampled_file)
return resampled_file
示例7: test_nipy_3_4d
def test_nipy_3_4d():
# Test nipy_3dto4d and nipy_4dto3d
fimg = load_image(funcfile)
N = fimg.shape[-1]
out_4d = 'func4d.nii'
with InTemporaryDirectory() as tmpdir:
cmd = ['nipy_4dto3d', funcfile, '--out-path=' + tmpdir]
run_command(cmd)
imgs_3d = ['functional_%04d.nii' % i for i in range(N)]
for iname in imgs_3d:
assert_true(isfile(iname))
cmd = ['nipy_3dto4d'] + imgs_3d + ['--out-4d=' + out_4d]
run_command(cmd)
fimg_back = load_image(out_4d)
assert_almost_equal(fimg.get_data(), fimg_back.get_data())
del fimg_back
示例8: save_4d_data
def save_4d_data(Hammer_atlas, image_path, path_4d, image_names):
'''produce nparrays (voxels in region) x (image in study)
only if number of images less then 1000
'''
region_codes=np.unique(Hammer_atlas._data)
region_codes=region_codes[region_codes!=0]
region_coodinates={i:np.where(Hammer_atlas._data==i) for i in region_codes}
data_4d={i:[] for i in region_codes}
for im in image_names:
print im
try:
images_data=nipy.load_image(os.path.join(image_path, im ))._data
for k in data_4d:
data_4d[k].append(images_data[region_coodinates[k]])
except:
raise ValueError("Error during reading image {}".format(str(im)))
for c in region_codes:
c=int(c)
np_4d=np.array(data_4d[c])
print np_4d.shape
np.save(os.path.join(path_4d, str(c) +"_" + str(1)) , np_4d )
convert_array_for_regression(path_4d, c)
delete_arrays(path_4d, c)
示例9: test_screen
def test_screen():
img = ni.load_image(funcfile)
res = screen(img)
assert_equal(res['mean'].ndim, 3)
assert_equal(res['pca'].ndim, 4)
assert_equal(sorted(res.keys()),
['max', 'mean', 'min',
'pca', 'pca_res',
'std', 'ts_res'])
data = img.get_data()
assert_array_equal(np.max(data, axis=-1), res['max'].get_data())
assert_array_equal(np.mean(data, axis=-1), res['mean'].get_data())
assert_array_equal(np.min(data, axis=-1), res['min'].get_data())
assert_array_equal(np.std(data, axis=-1), res['std'].get_data())
pca_res = pca(data, axis=-1, standardize=False, ncomp=10)
# On windows, there seems to be some randomness in the PCA output vector
# signs; this routine sets the basis vectors to have first value positive,
# and therefore standardized the signs
pca_res = res2pos1(pca_res)
screen_pca_res = res2pos1(res['pca_res'])
for key in pca_res:
assert_almost_equal(pca_res[key], screen_pca_res[key])
ts_res = time_slice_diffs(data)
for key in ts_res:
assert_array_equal(ts_res[key], res['ts_res'][key])
示例10: get_nifti
def get_nifti(self, topo_view, base_nifti=None, **kwargs):
"""
Process the nifti
Parameters
----------
topo_view: array-like
Topological view to create nifti. 3D.
Returns
-------
image: nipy image
Nifti image from topological view.
"""
if base_nifti is None:
assert self.base_nifti is not None, ("`base.nii` not in dataset "
"directory. You may need to "
"reprocess.")
base_nifti = self.base_nifti
image = Image.from_image(base_nifti, data=topo_view)
else:
if isinstance(base_nifti, str):
base_nifti = load_image(base_nifti)
base2new_affine = np.linalg.inv(
base_nifti.affine).dot(self.base_nifti.affine)
cmap = AffineTransform("kji", "zxy", base2new_affine)
image = Image.from_image(base_nifti, data=topo_view, coordmap=cmap)
return image
示例11: add_overlay
def add_overlay(self, overlay, thr, limit, cmap, alpha=0.8):
overlay = load_image(overlay)
data = overlay.get_data()
ovl = normalize_dims(data,self.diag)
if limit == 'max':
vmin = thr
vmax = np.max(ovl)
print 'using image max of ' + str(vmax) +' as threshold'
ovl = np.ma.masked_less(ovl, thr)
elif thr > limit:
print "One or more overlays have inverse ranges,"
print "beware of correct colormap!"
ovl = np.ma.masked_greater(ovl, thr)
vmax = thr
vmin = limit
else:
ovl = np.ma.masked_less(ovl, thr)
vmax = limit
vmin = thr
self.image_list.append((ovl, cmap, vmax, vmin, alpha))
示例12: test_diagnose
def test_diagnose():
args = Args()
img = load_image(funcfile)
with InTemporaryDirectory() as tmpdir:
# Copy the functional file to a temporary writeable directory
os.mkdir('mydata')
tmp_funcfile = pjoin(tmpdir, 'mydata', 'myfunc.nii.gz')
shutil.copy(funcfile, tmp_funcfile)
args.filename = tmp_funcfile
args.time_axis = None
args.slice_axis = None
args.out_path = None
args.out_fname_label = None
args.ncomponents = 10
res = diagnose(args)
check_diag_results(res, img.shape, 3, 2, 10, 'mydata', 'myfunc')
args.slice_axis = 'j'
res = diagnose(args)
check_diag_results(res, img.shape, 3, 1, 10, 'mydata', 'myfunc')
# Time axis is not going to work because we'd have to use up one of the
# needed spatial axes
args.time_axis = 'i'
assert_raises(NiftiError, diagnose, args)
args.time_axis = 't'
# Check that output works
os.mkdir('myresults')
args.out_path = 'myresults'
args.out_fname_label = 'myana'
res = diagnose(args)
check_diag_results(res, img.shape, 3, 1, 10, 'myresults', 'myana')
示例13: space_time_realign
def space_time_realign(Images,TR=2,numslices=None,SliceTime='asc_alt_2',RefScan=None):
'''
4D simultaneous slice timing and spatial realignment. Adapted from
Alexis Roche's example script, and extend to be used for multiplex
imaging sequences
Inputs:
Images: list of images, input as a list of strings
numslices: for non-multiplex sequence, default to be the number of
slices in the image. For multiplex sequence, enter as a tuple,
such that the first element is the number of planes acquired in
parallel between each other, and the second element is the number
of slices of each parallel plane/slab
SliceTime:enter as a string to specify how the slices are ordered.
Choices are the following
1).'ascending': sequential ascending acquisition
2).'descending': sequential descending acquisition
3).'asc_alt_2': ascending interleaved, starting at first slice
4).'asc_alt_2_1': ascending interleaved, starting at the second
slice
5).'desc_alt_2': descending interleaved, starting at last slice
6).'asc_alt_siemens': ascending interleaved, starting at the first
slice if odd number of slices, or second slice if even number
of slices
7).'asc_alt_half': ascending interleaved by half the volume
8).'desc_alt_half': descending interleaved by half the volume
RefScan: reference volume for spatial realignment movement estimation
'''
# load images
runs = [load_image(run) for run in Images]
# parse data info
if numslices is None:
numslices = runs[0].shape[2]
numplanes = 1
elif isinstance(numslices,tuple):
numslices = numslices[0]
numplanes = numplanes[1]
# parse slice timing according to the input
slice_timing = getattr(timefuncs,SliceTime)(TR,numslices)
#repeat the slice timing for multiplex seqquence
slice_timing = np.tile(slice_timing,numplanes)
# Spatio-temporal realigner assuming interleaved ascending slice order
R = SpaceTimeRealign(runs, tr=TR, slice_times=slice_timing, slice_info=2,
affine_class='Rigid')
print('Slice times: %s' % slice_timing)
# Estimate motion within- and between-sessions
R.estimate(refscan=RefScan)
# Resample data on a regular space+time lattice using 4d interpolation
print('Saving results ...')
for i in range(len(runs)):
corr_run = R.resample(i)
fname = os.path.join(os.path.split(Images[i])[0],'ra' + os.path.split(Images[i])[1])
save_image(corr_run, fname)
print(fname)
示例14: save_qa_img_dirnme
def save_qa_img_dirnme(in4d, outdir):
pth, nme = os.path.split(in4d)
img = nipy.load_image(in4d)
diag.plot_tsdiffs(diag.time_slice_diffs(img))
cleantime = time.asctime().replace(' ','-').replace(':', '_')
figfile = os.path.join(outdir, 'QA_%s_%s.png'%(nme, cleantime))
pylab.savefig(figfile)
示例15: sources_to_nifti
def sources_to_nifti(CHECKPOINT, MASKMAT, BASENIFTI, ONAME, savepath, voxels, win):
bnifti = load_image(BASENIFTI)
mask = loadmat(MASKMAT)['mask']
model = np.load(CHECKPOINT) # Numpy array of sources from Infomax ICA
for i in range(len(model)): # Goes component by component
W = model[i,:].reshape([voxels,win])
f = zeros(len(mask))
idx = where(mask==1)
data = zeros((bnifti.shape[0],bnifti.shape[1],bnifti.shape[2],W.shape[1]))
f[idx[0].tolist()] = detrend(W)/std(W)
for j in range(0,W.shape[1]):
data[:,:,:,j] = reshape(f,(bnifti.shape[0],bnifti.shape[1],bnifti.shape[2] ), order='F')
img = Image.from_image(bnifti,data=data)
os.chdir(savepath)
fn = ONAME + "%s.nii" % (str(i)) # Where result should be saved and under what name
save_image(img,fn)