本文整理匯總了Python中tensorflow.keras.backend.tanh方法的典型用法代碼示例。如果您正苦於以下問題:Python backend.tanh方法的具體用法?Python backend.tanh怎麽用?Python backend.tanh使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類tensorflow.keras.backend
的用法示例。
在下文中一共展示了backend.tanh方法的6個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: __init__
# 需要導入模塊: from tensorflow.keras import backend [as 別名]
# 或者: from tensorflow.keras.backend import tanh [as 別名]
def __init__(self,
ratio,
return_mask=False,
sigmoid_gating=False,
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
kernel_constraint=None,
**kwargs):
super().__init__(**kwargs)
self.ratio = ratio
self.return_mask = return_mask
self.sigmoid_gating = sigmoid_gating
self.gating_op = K.sigmoid if self.sigmoid_gating else K.tanh
self.kernel_initializer = initializers.get(kernel_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.kernel_constraint = constraints.get(kernel_constraint)
示例2: logtanh
# 需要導入模塊: from tensorflow.keras import backend [as 別名]
# 或者: from tensorflow.keras.backend import tanh [as 別名]
def logtanh(x, a=1):
"""
log * tanh
See Also: arcsinh
"""
return K.tanh(x) * K.log(2 + a * abs(x))
示例3: gelu_tanh
# 需要導入模塊: from tensorflow.keras import backend [as 別名]
# 或者: from tensorflow.keras.backend import tanh [as 別名]
def gelu_tanh(x):
"""基於Tanh近似計算的gelu函數
"""
cdf = 0.5 * (
1.0 + K.tanh((np.sqrt(2 / np.pi) * (x + 0.044715 * K.pow(x, 3))))
)
return x * cdf
示例4: set_gelu
# 需要導入模塊: from tensorflow.keras import backend [as 別名]
# 或者: from tensorflow.keras.backend import tanh [as 別名]
def set_gelu(version):
"""設置gelu版本
"""
version = version.lower()
assert version in ['erf', 'tanh'], 'gelu version must be erf or tanh'
if version == 'erf':
keras.utils.get_custom_objects()['gelu'] = gelu_erf
else:
keras.utils.get_custom_objects()['gelu'] = gelu_tanh
示例5: mish
# 需要導入模塊: from tensorflow.keras import backend [as 別名]
# 或者: from tensorflow.keras.backend import tanh [as 別名]
def mish(x):
return x * K.tanh(K.softplus(x))
示例6: __call__
# 需要導入模塊: from tensorflow.keras import backend [as 別名]
# 或者: from tensorflow.keras.backend import tanh [as 別名]
def __call__(self, x):
if isinstance(self.alpha, six.string_types):
assert self.alpha in ["auto", "auto_po2"]
if self.alpha is None:
scale = self.default_alpha
elif isinstance(self.alpha, six.string_types):
scale = 1.0
elif isinstance(self.alpha, np.ndarray):
scale = self.alpha
else:
scale = float(self.alpha)
if self.use_stochastic_rounding:
len_axis = len(x.shape.as_list())
if len_axis == 1:
axis = None
elif K.image_data_format() == "channels_last":
axis = list(range(len_axis - 1))
else:
axis = list(range(1, len_axis))
# if stochastic_round is through, we need to scale
# number so that the precision is small enough.
# This is especially important if range of x is very
# small, which occurs during initialization of weights.
m = K.max(tf.abs(x), axis=axis, keepdims=True)
m = tf.where(m > 1.0, tf.ones_like(m), m)
f = 2 * m
x = tf_utils.smart_cond(
K.learning_phase(),
lambda: f * _round_through(
x / f, use_stochastic_rounding=True, precision=0.125),
lambda: x)
k_sign = tf.sign(x)
if self.use_stochastic_rounding:
# in inference, we use a biased "1" for stochastic rounding right now
k_sign += (1.0 - tf.abs(k_sign)) * tf_utils.smart_cond(
K.learning_phase(),
lambda: 2.0 * tf.round(tf.random.uniform(tf.shape(x))) - 1.0,
lambda: tf.ones_like(tf.shape(x), dtype=K.floatx()))
# if something still remains, just make it positive for now.
k_sign += (1.0 - tf.abs(k_sign))
if self.use_01:
k_sign = (k_sign + 1.0) / 2.0
# approximate binary by tanh(x) as it has limited range between -1 and +1.
if self.alpha is None:
x = K.tanh(x)
scale = _get_scale(self.alpha, x, k_sign)
self.scale = scale
return x + tf.stop_gradient(-x + scale * k_sign)