本文整理汇总了Python中keras.backend.any方法的典型用法代码示例。如果您正苦于以下问题:Python backend.any方法的具体用法?Python backend.any怎么用?Python backend.any使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类keras.backend
的用法示例。
在下文中一共展示了backend.any方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_energy
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import any [as 别名]
def get_energy(self, y_true, input_energy, mask):
"""Energy = a1' y1 + u1' y1 + y1' U y2 + u2' y2 + y2' U y3 + u3' y3 + an' y3
"""
input_energy = K.sum(input_energy * y_true, 2) # (B, T)
# (B, T-1)
chain_energy = K.sum(K.dot(y_true[:, :-1, :],
self.chain_kernel) * y_true[:, 1:, :], 2)
if mask is not None:
mask = K.cast(mask, K.floatx())
# (B, T-1), mask[:,:-1]*mask[:,1:] makes it work with any padding
chain_mask = mask[:, :-1] * mask[:, 1:]
input_energy = input_energy * mask
chain_energy = chain_energy * chain_mask
total_energy = K.sum(input_energy, -1) + K.sum(chain_energy, -1) # (B, )
return total_energy
示例2: call
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import any [as 别名]
def call(self, x, mask=None):
# x: (batch_size, input_length, input_dim)
if mask is None:
return K.mean(x, axis=1) # (batch_size, input_dim)
else:
# This is to remove padding from the computational graph.
if K.ndim(mask) > K.ndim(x):
# This is due to the bug in Bidirectional that is passing the input mask
# instead of computing output mask.
# TODO: Fix the implementation of Bidirectional.
mask = K.any(mask, axis=(-2, -1))
if K.ndim(mask) < K.ndim(x):
mask = K.expand_dims(mask)
masked_input = switch(mask, x, K.zeros_like(x))
weights = K.cast(mask / (K.sum(mask) + K.epsilon()), 'float32')
return K.sum(masked_input * weights, axis=1) # (batch_size, input_dim)
示例3: loss_function
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import any [as 别名]
def loss_function(self):
if self.learn_mode == 'join':
def loss(y_true, y_pred):
assert self._inbound_nodes, 'CRF has not connected to any layer.'
assert not self._outbound_nodes, 'When learn_model="join", CRF must be the last layer.'
if self.sparse_target:
y_true = K.one_hot(K.cast(y_true[:, :, 0], 'int32'), self.units)
X = self._inbound_nodes[0].input_tensors[0]
mask = self._inbound_nodes[0].input_masks[0]
nloglik = self.get_negative_log_likelihood(y_true, X, mask)
return nloglik
return loss
else:
if self.sparse_target:
return sparse_categorical_crossentropy
else:
return categorical_crossentropy
示例4: compute_mask
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import any [as 别名]
def compute_mask(self, input, mask=None):
if mask is not None:
return K.any(mask, axis=1)
return mask
示例5: compute_mask
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import any [as 别名]
def compute_mask(self, input, mask=None):
if mask is not None and self.learn_mode == 'join':
return K.any(mask, axis=1)
return mask
示例6: get_log_normalization_constant
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import any [as 别名]
def get_log_normalization_constant(self, input_energy, mask, **kwargs):
"""Compute logarithm of the normalization constant Z, where
Z = sum exp(-E) -> logZ = log sum exp(-E) =: -nlogZ
"""
# should have logZ[:, i] == logZ[:, j] for any i, j
logZ = self.recursion(input_energy, mask, return_sequences=False, **kwargs)
return logZ[:, 0]
示例7: _cosine_distance
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import any [as 别名]
def _cosine_distance(M, k):
# this is equation (6), or as I like to call it: The NaN factory.
# TODO: Find it in a library (keras cosine loss?)
# normalizing first as it is better conditioned.
nk = K.l2_normalize(k, axis=-1)
nM = K.l2_normalize(M, axis=-1)
cosine_distance = K.batch_dot(nM, nk)
# TODO: Do succesfull error handling
#cosine_distance_error_handling = tf.Print(cosine_distance, [cosine_distance], message="NaN occured in _cosine_distance")
#cosine_distance_error_handling = K.ones(cosine_distance_error_handling.shape)
#cosine_distance = tf.case({K.any(tf.is_nan(cosine_distance)) : (lambda: cosine_distance_error_handling)},
# default = lambda: cosine_distance, strict=True)
return cosine_distance
示例8: viterbi_decoding
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import any [as 别名]
def viterbi_decoding(self, X, mask=None):
input_energy = self.activation(K.dot(X, self.kernel) + self.bias)
if self.use_boundary:
input_energy = self.add_boundary_energy(
input_energy, mask, self.left_boundary, self.right_boundary)
argmin_tables = self.recursion(input_energy, mask, return_logZ=False)
argmin_tables = K.cast(argmin_tables, 'int32')
# backward to find best path, `initial_best_idx` can be any,
# as all elements in the last argmin_table are the same
argmin_tables = K.reverse(argmin_tables, 1)
# matrix instead of vector is required by tf `K.rnn`
initial_best_idx = [K.expand_dims(argmin_tables[:, 0, 0])]
if K.backend() == 'theano':
initial_best_idx = [K.T.unbroadcast(initial_best_idx[0], 1)]
def gather_each_row(params, indices):
n = K.shape(indices)[0]
if K.backend() == 'theano':
return params[K.T.arange(n), indices]
else:
indices = K.transpose(K.stack([K.tf.range(n), indices]))
return K.tf.gather_nd(params, indices)
def find_path(argmin_table, best_idx):
next_best_idx = gather_each_row(argmin_table, best_idx[0][:, 0])
next_best_idx = K.expand_dims(next_best_idx)
if K.backend() == 'theano':
next_best_idx = K.T.unbroadcast(next_best_idx, 1)
return next_best_idx, [next_best_idx]
_, best_paths, _ = K.rnn(find_path, argmin_tables, initial_best_idx,
input_length=K.int_shape(X)[1], unroll=self.unroll)
best_paths = K.reverse(best_paths, 1)
best_paths = K.squeeze(best_paths, 2)
return K.one_hot(best_paths, self.units)
示例9: call
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import any [as 别名]
def call(self, inputs):
if not isinstance(inputs, list) or len(inputs) != 2:
raise ValueError('Inputs to ExternalMasking should be a list of 2 tensors.')
boolean_mask = K.any(K.not_equal(inputs[-1], self.mask_value),
axis=-1, keepdims=True)
return inputs[0] * K.cast(boolean_mask, K.dtype(inputs[0]))
示例10: compute_mask
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import any [as 别名]
def compute_mask(self, input, mask):
# redefining compute mask because the input ndim is different from the output ndim, and
# this needs to be handled.
if self.return_sequences and mask is not None:
# Get rid of syn and hyp dimensions
# input mask's shape: (batch_size, num_words, num_hyps, num_senses)
# output mask's shape: (batch_size, num_words)
return K.any(mask, axis=(-2, -1))
else:
return None
示例11: compute_mask
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import any [as 别名]
def compute_mask(x, mask_value=0):
boolean_mask = K.any(K.not_equal(x, mask_value), axis=-1, keepdims=False)
return K.cast(boolean_mask, K.floatx())
示例12: get_energy
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import any [as 别名]
def get_energy(self, y_true, input_energy, mask):
"""Energy = a1' y1 + u1' y1 + y1' U y2 + u2' y2 + y2' U y3 + u3' y3 + an' y3
"""
input_energy = K.sum(input_energy * y_true, 2) # (B, T)
chain_energy = K.sum(K.dot(y_true[:, :-1, :], self.chain_kernel) * y_true[:, 1:, :], 2) # (B, T-1)
if mask is not None:
mask = K.cast(mask, K.floatx())
chain_mask = mask[:, :-1] * mask[:, 1:] # (B, T-1), mask[:,:-1]*mask[:,1:] makes it work with any padding
input_energy = input_energy * mask
chain_energy = chain_energy * chain_mask
total_energy = K.sum(input_energy, -1) + K.sum(chain_energy, -1) # (B, )
return total_energy
示例13: viterbi_decoding
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import any [as 别名]
def viterbi_decoding(self, X, mask=None):
input_energy = self.activation(K.dot(X, self.kernel) + self.bias)
if self.use_boundary:
input_energy = self.add_boundary_energy(input_energy, mask, self.left_boundary, self.right_boundary)
argmin_tables = self.recursion(input_energy, mask, return_logZ=False)
argmin_tables = K.cast(argmin_tables, 'int32')
# backward to find best path, `initial_best_idx` can be any, as all elements in the last argmin_table are the same
argmin_tables = K.reverse(argmin_tables, 1)
initial_best_idx = [K.expand_dims(argmin_tables[:, 0, 0])] # matrix instead of vector is required by tf `K.rnn`
if K.backend() == 'theano':
initial_best_idx = [K.T.unbroadcast(initial_best_idx[0], 1)]
def gather_each_row(params, indices):
n = K.shape(indices)[0]
if K.backend() == 'theano':
return params[K.T.arange(n), indices]
else:
indices = K.transpose(K.stack([K.tf.range(n), indices]))
return K.tf.gather_nd(params, indices)
def find_path(argmin_table, best_idx):
next_best_idx = gather_each_row(argmin_table, best_idx[0][:, 0])
next_best_idx = K.expand_dims(next_best_idx)
if K.backend() == 'theano':
next_best_idx = K.T.unbroadcast(next_best_idx, 1)
return next_best_idx, [next_best_idx]
_, best_paths, _ = K.rnn(find_path, argmin_tables, initial_best_idx, input_length=K.int_shape(X)[1], unroll=self.unroll)
best_paths = K.reverse(best_paths, 1)
best_paths = K.squeeze(best_paths, 2)
return K.one_hot(best_paths, self.units)
示例14: normalize_mask
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import any [as 别名]
def normalize_mask(x, mask):
'''Keep the mask align wtih the tensor x
Arguments: x is a data tensor; mask is a binary tensor
Rationale: keep mask at same dimensionality as x, but only with a length-1
trailing dimension. This ensures broadcastability, which is important
because inferring shapes is hard and shapes are easy to get wrong.
'''
mask = K.cast(mask, K.floatx())
while K.ndim(mask) != K.ndim(x):
if K.ndim(mask) > K.ndim(x):
mask = K.any(mask, axis=-1)
elif K.ndim(mask) < K.ndim(x):
mask = K.expand_dims(mask)
return K.any(mask, axis=-1, keepdims=True)
示例15: compute_mask
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import any [as 别名]
def compute_mask(self, x, mask=None):
if mask is None:
return None
#import pdb
#pdb.set_trace()
target_dim = K.ndim(x) - 2
num_reducing = K.ndim(mask) - target_dim
if num_reducing:
axes = tuple([-i for i in range(1,num_reducing+1)])
mask = K.any(mask, axes)
return mask