本文整理汇总了Python中scipy.ndimage.filters.gaussian_filter方法的典型用法代码示例。如果您正苦于以下问题:Python filters.gaussian_filter方法的具体用法?Python filters.gaussian_filter怎么用?Python filters.gaussian_filter使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类scipy.ndimage.filters
的用法示例。
在下文中一共展示了filters.gaussian_filter方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: elastic_transform
# 需要导入模块: from scipy.ndimage import filters [as 别名]
# 或者: from scipy.ndimage.filters import gaussian_filter [as 别名]
def elastic_transform(image, alpha=1000, sigma=30, spline_order=1, mode='nearest', random_state=np.random):
"""Elastic deformation of image as described in [Simard2003]_.
.. [Simard2003] Simard, Steinkraus and Platt, "Best Practices for
Convolutional Neural Networks applied to Visual Document Analysis", in
Proc. of the International Conference on Document Analysis and
Recognition, 2003.
"""
assert image.ndim == 3
shape = image.shape[:2]
dx = gaussian_filter((random_state.rand(*shape) * 2 - 1),
sigma, mode="constant", cval=0) * alpha
dy = gaussian_filter((random_state.rand(*shape) * 2 - 1),
sigma, mode="constant", cval=0) * alpha
x, y = np.meshgrid(np.arange(shape[0]), np.arange(shape[1]), indexing='ij')
indices = [np.reshape(x + dx, (-1, 1)), np.reshape(y + dy, (-1, 1))]
result = np.empty_like(image)
for i in range(image.shape[2]):
result[:, :, i] = map_coordinates(
image[:, :, i], indices, order=spline_order, mode=mode).reshape(shape)
return result
示例2: smoothing
# 需要导入模块: from scipy.ndimage import filters [as 别名]
# 或者: from scipy.ndimage.filters import gaussian_filter [as 别名]
def smoothing(im, mode = None):
# utility function to smooth an image
if mode is None:
return im
elif mode == 'L2':
# L2 norm
return im / (np.sqrt(np.mean(np.square(im))) + K.epsilon())
elif mode == 'GaussianBlur':
# Gaussian Blurring with width of 3
return filters.gaussian_filter(im,1/8)
elif mode == 'Decay':
# Decay regularization
decay = 0.98
return decay * im
elif mode == 'Clip_weak':
# Clip weak pixel regularization
percentile = 1
threshold = np.percentile(np.abs(im),percentile)
im[np.where(np.abs(im) < threshold)] = 0
return im
else:
# print error message
print('Unknown smoothing parameter. No smoothing implemented.')
return im
示例3: _log_density
# 需要导入模块: from scipy.ndimage import filters [as 别名]
# 或者: from scipy.ndimage.filters import gaussian_filter [as 别名]
def _log_density(self, stimulus):
shape = stimulus.shape[0], stimulus.shape[1]
stimulus_id = get_image_hash(stimulus)
stimulus_index = self.stimuli.stimulus_ids.index(stimulus_id)
#fixations = self.fixations[self.fixations.n == stimulus_index]
inds = self.fixations.n != stimulus_index
ZZ = np.zeros(shape)
_fixations = np.array([self.ys[inds]*shape[0], self.xs[inds]*shape[1]]).T
fill_fixation_map(ZZ, _fixations)
ZZ = gaussian_filter(ZZ, [self.bandwidth*shape[0], self.bandwidth*shape[1]])
ZZ *= (1-self.eps)
ZZ += self.eps * 1.0/(shape[0]*shape[1])
ZZ = np.log(ZZ)
ZZ -= logsumexp(ZZ)
#ZZ -= np.log(np.exp(ZZ).sum())
return ZZ
示例4: pick_peaks
# 需要导入模块: from scipy.ndimage import filters [as 别名]
# 或者: from scipy.ndimage.filters import gaussian_filter [as 别名]
def pick_peaks(nc, L=16, offset_denom=0.1):
"""Obtain peaks from a novelty curve using an adaptive threshold."""
offset = nc.mean() * float(offset_denom)
th = filters.median_filter(nc, size=L) + offset
#th = filters.gaussian_filter(nc, sigma=L/2., mode="nearest") + offset
#import pylab as plt
#plt.plot(nc)
#plt.plot(th)
#plt.show()
# th = np.ones(nc.shape[0]) * nc.mean() - 0.08
peaks = []
for i in range(1, nc.shape[0] - 1):
# is it a peak?
if nc[i - 1] < nc[i] and nc[i] > nc[i + 1]:
# is it above the threshold?
if nc[i] > th[i]:
peaks.append(i)
return peaks
示例5: pick_peaks
# 需要导入模块: from scipy.ndimage import filters [as 别名]
# 或者: from scipy.ndimage.filters import gaussian_filter [as 别名]
def pick_peaks(nc, L=16):
"""Obtain peaks from a novelty curve using an adaptive threshold."""
offset = nc.mean() / 20.
nc = filters.gaussian_filter1d(nc, sigma=4) # Smooth out nc
th = filters.median_filter(nc, size=L) + offset
#th = filters.gaussian_filter(nc, sigma=L/2., mode="nearest") + offset
peaks = []
for i in range(1, nc.shape[0] - 1):
# is it a peak?
if nc[i - 1] < nc[i] and nc[i] > nc[i + 1]:
# is it above the threshold?
if nc[i] > th[i]:
peaks.append(i)
#plt.plot(nc)
#plt.plot(th)
#for peak in peaks:
#plt.axvline(peak)
#plt.show()
return peaks
示例6: preprocess_img
# 需要导入模块: from scipy.ndimage import filters [as 别名]
# 或者: from scipy.ndimage.filters import gaussian_filter [as 别名]
def preprocess_img(inputfile, output_preprocessed, zooms):
img = nib.load(inputfile)
data = img.get_data()
affine = img.affine
zoom = img.header.get_zooms()[:3]
data, affine = reslice(data, affine, zoom, zooms, 1)
data = np.squeeze(data)
data = np.pad(data, [(0, 256 - len_) for len_ in data.shape], "constant")
data_sub = data - gaussian_filter(data, sigma=1)
img = sitk.GetImageFromArray(np.copy(data_sub))
img = sitk.AdaptiveHistogramEqualization(img)
data_clahe = sitk.GetArrayFromImage(img)[:, :, :, None]
data = np.concatenate((data_clahe, data[:, :, :, None]), 3)
data = (data - np.mean(data, (0, 1, 2))) / np.std(data, (0, 1, 2))
assert data.ndim == 4, data.ndim
assert np.allclose(np.mean(data, (0, 1, 2)), 0.), np.mean(data, (0, 1, 2))
assert np.allclose(np.std(data, (0, 1, 2)), 1.), np.std(data, (0, 1, 2))
data = np.float32(data)
img = nib.Nifti1Image(data, affine)
nib.save(img, output_preprocessed)
示例7: lucas_kanade
# 需要导入模块: from scipy.ndimage import filters [as 别名]
# 或者: from scipy.ndimage.filters import gaussian_filter [as 别名]
def lucas_kanade(stem, pat: str, kernel: int = 5, Nfilter: int = 7):
flist = getimgfiles(stem, pat)
# %% priming read
im1 = imageio.imread(flist[0], as_gray=True)
# %% evaluate the first frame's POI
X = im1.shape[1] // 16
Y = im1.shape[0] // 16
poi = getPOI(X, Y, kernel)
# % get the weights
W = gaussianWeight(kernel)
# %% loop over all images in directory
for i in range(1, len(flist)):
im2 = imageio.imread(flist[i], as_gray=True)
im2 = gaussian_filter(im2, Nfilter)
V = LucasKanade(im1, im2, kernel, poi, W)
compareGraphsLK(im1, im2, poi, V)
im1 = im2.copy()
示例8: get_diffraction_test_image
# 需要导入模块: from scipy.ndimage import filters [as 别名]
# 或者: from scipy.ndimage.filters import gaussian_filter [as 别名]
def get_diffraction_test_image(self, dtype=np.float32):
image_x, image_y = self.image_x, self.image_y
cx, cy = image_x / 2, image_y / 2
image = np.zeros((image_y, image_x), dtype=np.float32)
iterator = zip(self._x_list, self._y_list, self._intensity_list)
for x, y, i in iterator:
if self.diff_intensity_reduction is not False:
dr = np.hypot(x - cx, y - cy)
i = self._get_diff_intensity_reduction(dr, i)
image[y, x] = i
disk = morphology.disk(self.disk_r, dtype=dtype)
image = convolve2d(image, disk, mode="same")
if self.rotation != 0:
image = rotate(image, self.rotation, reshape=False)
if self.blur != 0:
image = gaussian_filter(image, self.blur)
if self._background_lorentz_width is not False:
image += self._get_background_lorentz()
if self.intensity_noise is not False:
noise = np.random.random((image_y, image_x)) * self.intensity_noise
image += noise
return image
示例9: elastic_transform
# 需要导入模块: from scipy.ndimage import filters [as 别名]
# 或者: from scipy.ndimage.filters import gaussian_filter [as 别名]
def elastic_transform(image,elastic_value_x ,elastic_value_y):
"""Elastic deformation of images as described in [Simard2003]_ (with modifications JUST in Y-DIRECTION).
.. [Simard2003] Simard, Steinkraus and Platt, "Best Practices for
Convolutional Neural Networks applied to Visual Document Analysis", in
Proc. of the International Conference on Document Analysis and
Recognition, 2003.
Based on https://gist.github.com/erniejunior/601cdf56d2b424757de5
"""
shape = image.shape
random_state = np.random.RandomState(None)
nY = shape[0] // 25
nX = shape[1] // 25
sigma = min(shape[1], shape[0]) * 0.0025
alpha_X = elastic_value_x * min(shape[0], shape[1])
alpha_Y = elastic_value_y * min(shape[0], shape[1])
dx = gaussian_filter((random_state.rand(nY, nX) * 2 - 1), sigma)
dy = gaussian_filter((random_state.rand(nY, nX) * 2 - 1), sigma)
x, y, z = np.meshgrid(np.arange(shape[1]), np.arange(shape[0]), np.arange(shape[2]))
dx = misc.imresize(dx, [shape[0], shape[1]], interp='bicubic')
dy = misc.imresize(dy, [shape[0], shape[1]], interp='bicubic')
# plt.imshow(dx, cmap=plt.cm.gray)
# plt.show()
dxT = []
dyT = []
for dummy in range(shape[2]):
dxT.append(dx)
dyT.append(dy)
dx = np.dstack(dxT)
dy = np.dstack(dyT)
dx = dx * alpha_X
dy = dy * alpha_Y
indices = np.reshape(y + dy, (-1, 1)), np.reshape(x + dx, (-1, 1)), np.reshape(z, (-1, 1))
image = map_coordinates(image, indices, order=1).reshape(shape)
return image
示例10: smoothfill
# 需要导入模块: from scipy.ndimage import filters [as 别名]
# 或者: from scipy.ndimage.filters import gaussian_filter [as 别名]
def smoothfill(vols, unassign, protect=None):
"""Fill using Gaussian smoothing.
"""
vols = [v.astype(np.float) for v in vols]
unassign = unassign.copy()
if protect is None:
protect = []
try:
next(iter(protect))
except StopIteration:
pass
except TypeError:
protect = [protect]
noprotect = list(set(range(len(vols)))-set(protect))
sum_of_unassigned = np.inf
# for as long as the number of unassigned is changing
while unassign.sum() < sum_of_unassigned:
sum_of_unassigned = unassign.sum()
for i in noprotect:
cs = gaussian_filter(vols[i].astype(np.float),1)
cs[vols[i]==1] = 1
vols[i] = cs
vols = binarize(vols, return_empty=True)
unassign = vols.pop()
return vols
示例11: __call__
# 需要导入模块: from scipy.ndimage import filters [as 别名]
# 或者: from scipy.ndimage.filters import gaussian_filter [as 别名]
def __call__(self, image):
if isinstance(self.sigma, collections.Sequence):
sigma = random_num_generator(
self.sigma, random_state=self.random_state)
else:
sigma = self.sigma
image = gaussian_filter(image, sigma=(sigma, sigma, 0))
return image
示例12: process
# 需要导入模块: from scipy.ndimage import filters [as 别名]
# 或者: from scipy.ndimage.filters import gaussian_filter [as 别名]
def process(model, clip, path_outdata, idx):
''' process one clip and save the predicted saliency map '''
with torch.no_grad():
smap = model(clip.cuda()).cpu().data[0]
smap = (smap.numpy()*255.).astype(np.int)/255.
smap = gaussian_filter(smap, sigma=7)
cv2.imwrite(os.path.join(path_outdata, '%04d.png'%(idx+1)), (smap/np.max(smap)*255.).astype(np.uint8))
示例13: normalize_image
# 需要导入模块: from scipy.ndimage import filters [as 别名]
# 或者: from scipy.ndimage.filters import gaussian_filter [as 别名]
def normalize_image(img, sigma=7/6):
mu = gaussian_filter(img, sigma, mode='nearest')
mu_sq = mu * mu
sigma = numpy.sqrt(numpy.abs(gaussian_filter(img * img, sigma, mode='nearest') - mu_sq))
img_norm = (img - mu) / (sigma + 1)
return img_norm
示例14: measure
# 需要导入模块: from scipy.ndimage import filters [as 别名]
# 或者: from scipy.ndimage.filters import gaussian_filter [as 别名]
def measure(self, line):
h, w = line.shape
smoothed = filters.gaussian_filter(line, (h * 0.5, h * self.smoothness), mode='constant')
smoothed += 0.001 * filters.uniform_filter(smoothed, (h * 0.5, w), mode='constant')
a = np.argmax(smoothed, axis=0)
a = filters.gaussian_filter(a, h * self.extra)
center = np.array(a, 'i')
deltas = abs(np.arange(h)[:, np.newaxis] - center[np.newaxis, :])
mad = np.mean(deltas[line != 0])
r = int(1 + self.range * mad)
return center, r
示例15: blur_im_list
# 需要导入模块: from scipy.ndimage import filters [as 别名]
# 或者: from scipy.ndimage.filters import gaussian_filter [as 别名]
def blur_im_list(im_List, fwhm_x, fwhm_t):
"""Apply a gaussian filter to a list of images, with fwhm_x in radians and fwhm_t in frames. Currently only for Stokes I.
Args:
fwhm_x (float): circular beam size for spatial blurring in radians
fwhm_t (float): temporal blurring in frames
Returns:
(Image): output image list
"""
# Blur Stokes I
sigma_x = fwhm_x / im_List[0].psize / (2. * np.sqrt(2. * np.log(2.)))
sigma_t = fwhm_t / (2. * np.sqrt(2. * np.log(2.)))
arr = np.array([im.imvec.reshape(im.ydim, im.xdim) for im in im_List])
arr = filt.gaussian_filter(arr, (sigma_t, sigma_x, sigma_x))
ret = []
for j in range(len(im_List)):
ret.append(image.Image(arr[j], im_List[0].psize, im_List[0].ra, im_List[0].dec, rf=im_List[0].rf, source=im_List[0].source, mjd=im_List[j].mjd))
return ret
##################################################################################################
# Convenience Functions for Analytical Work
##################################################################################################