本文整理匯總了Python中tensorflow.python.ops.gen_math_ops._tanh_grad方法的典型用法代碼示例。如果您正苦於以下問題:Python gen_math_ops._tanh_grad方法的具體用法?Python gen_math_ops._tanh_grad怎麽用?Python gen_math_ops._tanh_grad使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類tensorflow.python.ops.gen_math_ops
的用法示例。
在下文中一共展示了gen_math_ops._tanh_grad方法的5個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: _TanhGrad
# 需要導入模塊: from tensorflow.python.ops import gen_math_ops [as 別名]
# 或者: from tensorflow.python.ops.gen_math_ops import _tanh_grad [as 別名]
def _TanhGrad(op, grad):
"""Returns grad * (1 - tanh(x) * tanh(x))."""
y = op.outputs[0] # y = tanh(x)
with ops.control_dependencies([grad.op]):
y = math_ops.conj(y)
# pylint: disable=protected-access
return gen_math_ops._tanh_grad(y, grad)
示例2: _TanhGradGrad
# 需要導入模塊: from tensorflow.python.ops import gen_math_ops [as 別名]
# 或者: from tensorflow.python.ops.gen_math_ops import _tanh_grad [as 別名]
def _TanhGradGrad(op, grad):
with ops.control_dependencies([grad.op]):
a = math_ops.conj(op.inputs[0])
b = math_ops.conj(op.inputs[1])
# pylint: disable=protected-access
return grad * -2.0 * b * a, gen_math_ops._tanh_grad(a, grad)
示例3: testGradGrad
# 需要導入模塊: from tensorflow.python.ops import gen_math_ops [as 別名]
# 或者: from tensorflow.python.ops.gen_math_ops import _tanh_grad [as 別名]
def testGradGrad(self):
np.random.seed(7)
shape = (5,)
dtype_tols = [(np.float32, 5e-4), (np.float64, 1e-6), (np.complex64, 5e-4),
(np.complex128, 1e-6)]
op_range = [(gen_math_ops._inv_grad, [-2, 2]),
(gen_math_ops._rsqrt_grad, [0.1, 3]),
(gen_math_ops._sigmoid_grad, [-2, 2]),
(gen_math_ops._sqrt_grad, [0.1, 3]),
(gen_math_ops._tanh_grad, [-2, 2]),]
def rand(dtype):
x = np.random.uniform(
real_range[0], real_range[1], size=shape[0]).astype(dtype)
if dtype in (np.complex64, np.complex128):
x += 1j * np.random.uniform(-2, 2, size=shape[0]).astype(dtype)
return x
for op, real_range in op_range:
with self.test_session():
for dtype, tol in dtype_tols:
x = tf.constant(rand(dtype))
y = tf.constant(rand(dtype))
z = op(x, y)
grads = tf.test.compute_gradient(
[x, y], [shape, shape],
z,
shape,
x_init_value=[rand(dtype), rand(dtype)])
if isinstance(grads, tuple):
grads = [grads]
for analytical, numerical in grads:
self.assertAllClose(analytical, numerical, rtol=tol, atol=tol)
示例4: _TanhGrad
# 需要導入模塊: from tensorflow.python.ops import gen_math_ops [as 別名]
# 或者: from tensorflow.python.ops.gen_math_ops import _tanh_grad [as 別名]
def _TanhGrad(op, grad):
"""Returns grad * (1 - tanh(x) * tanh(x))."""
y = op.outputs[0] # y = tanh(x)
with ops.control_dependencies([grad]):
y = math_ops.conj(y)
# pylint: disable=protected-access
return gen_math_ops._tanh_grad(y, grad)
開發者ID:PacktPublishing,項目名稱:Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda,代碼行數:9,代碼來源:math_grad.py
示例5: _TanhGradGrad
# 需要導入模塊: from tensorflow.python.ops import gen_math_ops [as 別名]
# 或者: from tensorflow.python.ops.gen_math_ops import _tanh_grad [as 別名]
def _TanhGradGrad(op, grad):
with ops.control_dependencies([grad]):
a = math_ops.conj(op.inputs[0])
b = math_ops.conj(op.inputs[1])
# pylint: disable=protected-access
return grad * -2.0 * b * a, gen_math_ops._tanh_grad(a, grad)
開發者ID:PacktPublishing,項目名稱:Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda,代碼行數:8,代碼來源:math_grad.py