本文整理汇总了Python中keras.backend.std方法的典型用法代码示例。如果您正苦于以下问题:Python backend.std方法的具体用法?Python backend.std怎么用?Python backend.std使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类keras.backend
的用法示例。
在下文中一共展示了backend.std方法的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: call
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import std [as 别名]
def call(self, x):
mean = K.mean(x, axis=-1)
std = K.std(x, axis=-1)
if len(x.shape) == 3:
mean = K.permute_dimensions(
K.repeat(mean, x.shape.as_list()[-1]),
[0,2,1]
)
std = K.permute_dimensions(
K.repeat(std, x.shape.as_list()[-1]),
[0,2,1]
)
elif len(x.shape) == 2:
mean = K.reshape(
K.repeat_elements(mean, x.shape.as_list()[-1], 0),
(-1, x.shape.as_list()[-1])
)
std = K.reshape(
K.repeat_elements(mean, x.shape.as_list()[-1], 0),
(-1, x.shape.as_list()[-1])
)
return self._g * (x - mean) / (std + self._epsilon) + self._b
示例2: call
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import std [as 别名]
def call(self, inputs, training=None):
input_shape = K.int_shape(inputs)
reduction_axes = list(range(0, len(input_shape)))
if (self.axis is not None):
del reduction_axes[self.axis]
del reduction_axes[0]
mean = K.mean(inputs, reduction_axes, keepdims=True)
stddev = K.std(inputs, reduction_axes, keepdims=True) + self.epsilon
normed = (inputs - mean) / stddev
broadcast_shape = [1] * len(input_shape)
if self.axis is not None:
broadcast_shape[self.axis] = input_shape[self.axis]
if self.scale:
broadcast_gamma = K.reshape(self.gamma, broadcast_shape)
normed = normed * broadcast_gamma
if self.center:
broadcast_beta = K.reshape(self.beta, broadcast_shape)
normed = normed + broadcast_beta
return normed
示例3: call
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import std [as 别名]
def call(self, inputs, training=None):
input_shape = K.int_shape(inputs)
reduction_axes = list(range(0, len(input_shape)))
if self.axis is not None:
del reduction_axes[self.axis]
del reduction_axes[0]
mean = K.mean(inputs, reduction_axes, keepdims=True)
stddev = K.std(inputs, reduction_axes, keepdims=True) + self.epsilon
normed = (inputs - mean) / stddev
broadcast_shape = [1] * len(input_shape)
if self.axis is not None:
broadcast_shape[self.axis] = input_shape[self.axis]
if self.scale:
broadcast_gamma = K.reshape(self.gamma, broadcast_shape)
normed = normed * broadcast_gamma
if self.center:
broadcast_beta = K.reshape(self.beta, broadcast_shape)
normed = normed + broadcast_beta
return normed
示例4: render_naive
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import std [as 别名]
def render_naive(layer_name, filter_index, img0=img_noise, iter_n=20, step=1.0):
if layer_name not in layer_dict:
print("ERROR: invalid layer name: %s" % layer_name)
return
layer = layer_dict[layer_name]
print("{} < {}".format(filter_index, layer.output_shape[-1]))
activation = K.mean(layer.output[:, :, :, filter_index])
grads = K.gradients(activation, input_tensor)[0]
# DropoutやBNを含むネットワークはK.learning_phase()が必要
iterate = K.function([input_tensor, K.learning_phase()], [activation, grads])
img = img0.copy()
for i in range(iter_n):
# 学習はしないので0を入力
activation_value, grads_value = iterate([img, 0])
grads_value /= K.std(grads_value) + 1e-8
img += grads_value * step
print(i, activation_value)
示例5: nss
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import std [as 别名]
def nss(y_true, y_pred):
max_y_pred = K.repeat_elements(K.expand_dims(K.repeat_elements(K.expand_dims(K.max(K.max(y_pred, axis=2), axis=2)),
shape_r_out, axis=-1)), shape_c_out, axis=-1)
y_pred /= max_y_pred
y_pred_flatten = K.batch_flatten(y_pred)
y_mean = K.mean(y_pred_flatten, axis=-1)
y_mean = K.repeat_elements(K.expand_dims(K.repeat_elements(K.expand_dims(K.expand_dims(y_mean)),
shape_r_out, axis=-1)), shape_c_out, axis=-1)
y_std = K.std(y_pred_flatten, axis=-1)
y_std = K.repeat_elements(K.expand_dims(K.repeat_elements(K.expand_dims(K.expand_dims(y_std)),
shape_r_out, axis=-1)), shape_c_out, axis=-1)
y_pred = (y_pred - y_mean) / (y_std + K.epsilon())
return -(K.sum(K.sum(y_true * y_pred, axis=2), axis=2) / K.sum(K.sum(y_true, axis=2), axis=2))
# Gaussian priors initialization
示例6: call
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import std [as 别名]
def call(self, inputs):
"""This is where the layer's logic lives.
Parameters
----------
inputs: tensor
Input tensor, or list/tuple of input tensors
kwargs: dict
Additional keyword arguments
Returns
-------
tensor
A tensor or list/tuple of tensors
"""
if self.data_format == 'channels_last':
pooled = K.std(inputs, axis=[1, 2])
else:
pooled = K.std(inputs, axis=[2, 3])
return pooled
示例7: call
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import std [as 别名]
def call(self, inputs, training=None):
input_shape = K.int_shape(inputs[0])
reduction_axes = list(range(0, len(input_shape)))
beta = inputs[1]
gamma = inputs[2]
if self.axis is not None:
del reduction_axes[self.axis]
del reduction_axes[0]
mean = K.mean(inputs[0], reduction_axes, keepdims=True)
stddev = K.std(inputs[0], reduction_axes, keepdims=True) + self.epsilon
normed = (inputs[0] - mean) / stddev
return normed * gamma + beta
示例8: ssim
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import std [as 别名]
def ssim(y_true, y_pred):
"""structural similarity measurement system."""
## K1, K2 are two constants, much smaller than 1
K1 = 0.04
K2 = 0.06
## mean, std, correlation
mu_x = K.mean(y_pred)
mu_y = K.mean(y_true)
sig_x = K.std(y_pred)
sig_y = K.std(y_true)
sig_xy = (sig_x * sig_y) ** 0.5
## L, number of pixels, C1, C2, two constants
L = 33
C1 = (K1 * L) ** 2
C2 = (K2 * L) ** 2
ssim = (2 * mu_x * mu_y + C1) * (2 * sig_xy * C2) * 1.0 / ((mu_x ** 2 + mu_y ** 2 + C1) * (sig_x ** 2 + sig_y ** 2 + C2))
return ssim
示例9: nrmse_b
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import std [as 别名]
def nrmse_b(y_true, y_pred):
" If this value is larger than 1, you 'd obtain a better model by simply generating a random time series " \
"of the same mean and standard deviation as Y."
return K.sqrt(K.mean(K.sum(K.square(y_true - y_pred)))) / K.std(K.identity(y_true))
示例10: mvn
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import std [as 别名]
def mvn(tensor):
"""Per row mean-variance normalization."""
epsilon = 1e-6
mean = K.mean(tensor, axis=1, keepdims=True)
std = K.std(tensor, axis=1, keepdims=True)
mvn = (tensor - mean) / (std + epsilon)
return mvn
示例11: gmsd_loss
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import std [as 别名]
def gmsd_loss(y_true, y_pred):
""" Gradient Magnitude Similarity Deviation Loss.
Improved image quality metric over MS-SSIM with easier calculations
Parameters
----------
y_true: tensor or variable
The ground truth value
y_pred: tensor or variable
The predicted value
Returns
-------
tensor
The loss value
References
----------
http://www4.comp.polyu.edu.hk/~cslzhang/IQA/GMSD/GMSD.htm
https://arxiv.org/ftp/arxiv/papers/1308/1308.3052.pdf
"""
true_edge = scharr_edges(y_true, True)
pred_edge = scharr_edges(y_pred, True)
ephsilon = 0.0025
upper = 2.0 * true_edge * pred_edge
lower = K.square(true_edge) + K.square(pred_edge)
gms = (upper + ephsilon) / (lower + ephsilon)
gmsd = K.std(gms, axis=(1, 2, 3), keepdims=True)
gmsd = K.squeeze(gmsd, axis=-1)
return gmsd
# Gaussian Blur is here as it is only used for losses.
# It was previously kept in lib/model/masks but the import of keras backend
# breaks plaidml
示例12: call
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import std [as 别名]
def call(self, inputs, training=None): # pylint:disable=arguments-differ,unused-argument
"""This is where the layer's logic lives.
Parameters
----------
inputs: tensor
Input tensor, or list/tuple of input tensors
Returns
-------
tensor
A tensor or list/tuple of tensors
"""
input_shape = K.int_shape(inputs)
reduction_axes = list(range(0, len(input_shape)))
if self.axis is not None:
del reduction_axes[self.axis]
del reduction_axes[0]
mean = K.mean(inputs, reduction_axes, keepdims=True)
stddev = K.std(inputs, reduction_axes, keepdims=True) + self.epsilon
normed = (inputs - mean) / stddev
broadcast_shape = [1] * len(input_shape)
if self.axis is not None:
broadcast_shape[self.axis] = input_shape[self.axis]
if self.scale:
broadcast_gamma = K.reshape(self.gamma, broadcast_shape)
normed = normed * broadcast_gamma
if self.center:
broadcast_beta = K.reshape(self.beta, broadcast_shape)
normed = normed + broadcast_beta
return normed
示例13: call
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import std [as 别名]
def call(self, x):
mean = K.mean(x, axis=-1, keepdims=True)
std = K.std(x, axis=-1, keepdims=True)
return self.gamma * (x - mean) / (std + self.eps) + self.beta
示例14: call
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import std [as 别名]
def call(self, x, **kwargs):
mean = K.mean(x, axis=-1, keepdims=True)
std = K.std(x, axis=-1, keepdims=True)
return self.gamma * (x - mean) / (std + self.eps) + self.beta