本文整理匯總了Python中keras.backend.all方法的典型用法代碼示例。如果您正苦於以下問題:Python backend.all方法的具體用法?Python backend.all怎麽用?Python backend.all使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類keras.backend
的用法示例。
在下文中一共展示了backend.all方法的8個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: _denormalize_tensorflow
# 需要導入模塊: from keras import backend [as 別名]
# 或者: from keras.backend import all [as 別名]
def _denormalize_tensorflow(D, hparams):
if hparams.allow_clipping_in_normalization:
if hparams.symmetric_mels:
return (((tf.clip_by_value(D, -hparams.max_abs_value,
hparams.max_abs_value) + hparams.max_abs_value) * -hparams.min_level_db / (2 * hparams.max_abs_value))
+ hparams.min_level_db)
else:
return ((tf.clip_by_value(D, 0, hparams.max_abs_value) * -hparams.min_level_db / hparams.max_abs_value) + hparams.min_level_db)
if hparams.symmetric_mels:
return (((D + hparams.max_abs_value) * -hparams.min_level_db / (2 * hparams.max_abs_value)) + hparams.min_level_db)
else:
return ((D * -hparams.min_level_db / hparams.max_abs_value) + hparams.min_level_db)
# given a path, return list of all files in directory
示例2: customPooling
# 需要導入模塊: from keras import backend [as 別名]
# 或者: from keras.backend import all [as 別名]
def customPooling(x):
target = x[1]
inputs = x[0]
maskVal = 0
#getting the mask by observing the model's inputs
mask = K.equal(inputs, maskVal)
mask = K.all(mask, axis=-1, keepdims=True)
#inverting the mask for getting the valid steps for each sample
mask = 1 - K.cast(mask, K.floatx())
#summing the valid steps for each sample
stepsPerSample = K.sum(mask, axis=1, keepdims=False)
#applying the mask to the target (to make sure you are summing zeros below)
target = target * mask
#calculating the mean of the steps (using our sum of valid steps as averager)
means = K.sum(target, axis=1, keepdims=False) / stepsPerSample
return means
示例3: contingency_table
# 需要導入模塊: from keras import backend [as 別名]
# 或者: from keras.backend import all [as 別名]
def contingency_table(y, z):
"""Compute contingency table."""
y = K.round(y)
z = K.round(z)
def count_matches(a, b):
tmp = K.concatenate([a, b])
return K.sum(K.cast(K.all(tmp, -1), K.floatx()))
ones = K.ones_like(y)
zeros = K.zeros_like(y)
y_ones = K.equal(y, ones)
y_zeros = K.equal(y, zeros)
z_ones = K.equal(z, ones)
z_zeros = K.equal(z, zeros)
tp = count_matches(y_ones, z_ones)
tn = count_matches(y_zeros, z_zeros)
fp = count_matches(y_zeros, z_ones)
fn = count_matches(y_ones, z_zeros)
return (tp, tn, fp, fn)
示例4: all_acc
# 需要導入模塊: from keras import backend [as 別名]
# 或者: from keras.backend import all [as 別名]
def all_acc(y_true, y_pred):
"""
All Accuracy
https://github.com/rasmusbergpalm/normalization/blob/master/train.py#L10
"""
return K.mean(
K.all(
K.equal(
K.max(y_true, axis=-1),
K.cast(K.argmax(y_pred, axis=-1), K.floatx())
),
axis=1)
)
示例5: squash_mask
# 需要導入模塊: from keras import backend [as 別名]
# 或者: from keras.backend import all [as 別名]
def squash_mask(self, mask):
if K.ndim(mask) == 2:
return mask
elif K.ndim(mask) == 3:
return K.all(mask, axis=-1)
return mask
示例6: compute_mask
# 需要導入模塊: from keras import backend [as 別名]
# 或者: from keras.backend import all [as 別名]
def compute_mask(self, x, mask=None):
if self.return_probabilities:
mask2 = mask
if mask is not None:
mask2 = K.expand_dims(K.all(mask2, axis=-1))
return [mask, mask2]
return mask
示例7: zero_one_accuracy
# 需要導入模塊: from keras import backend [as 別名]
# 或者: from keras.backend import all [as 別名]
def zero_one_accuracy(y_true, y_pred):
y_true, y_pred = tensorify(y_true), tensorify(y_pred)
n_instances, n_objects = get_instances_objects(y_true)
equal_ranks = K.cast(K.all(K.equal(y_pred, y_true), axis=1), dtype="float32")
denominator = K.cast(n_instances, dtype="float32")
zero_one_loss = K.sum(equal_ranks) / denominator
return zero_one_loss
示例8: zero_one_accuracy_for_scores
# 需要導入模塊: from keras import backend [as 別名]
# 或者: from keras.backend import all [as 別名]
def zero_one_accuracy_for_scores(y_true, y_pred):
y_true, y_pred = tensorify(y_true), tensorify(y_pred)
n_instances, n_objects = get_instances_objects(y_true)
predicted_rankings = scores_to_rankings(n_objects, y_pred)
y_true = K.cast(y_true, dtype="float32")
equal_ranks = K.cast(
K.all(K.equal(predicted_rankings, y_true), axis=1), dtype="float32"
)
denominator = K.cast(n_instances, dtype="float32")
zero_one_loss = K.sum(equal_ranks) / denominator
return zero_one_loss