本文整理匯總了Python中tensorflow.python.ops.gen_nn_ops._sparse_softmax_cross_entropy_with_logits方法的典型用法代碼示例。如果您正苦於以下問題:Python gen_nn_ops._sparse_softmax_cross_entropy_with_logits方法的具體用法?Python gen_nn_ops._sparse_softmax_cross_entropy_with_logits怎麽用?Python gen_nn_ops._sparse_softmax_cross_entropy_with_logits使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類tensorflow.python.ops.gen_nn_ops
的用法示例。
在下文中一共展示了gen_nn_ops._sparse_softmax_cross_entropy_with_logits方法的3個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: _testXent
# 需要導入模塊: from tensorflow.python.ops import gen_nn_ops [as 別名]
# 或者: from tensorflow.python.ops.gen_nn_ops import _sparse_softmax_cross_entropy_with_logits [as 別名]
def _testXent(self, np_features, np_labels):
np_loss, np_backprop = self._npXent(np_features, np_labels)
with self.test_session(use_gpu=True) as sess:
loss, backprop = gen_nn_ops._sparse_softmax_cross_entropy_with_logits(
np_features, np_labels)
tf_loss, tf_backprop = sess.run([loss, backprop])
self.assertAllCloseAccordingToType(np_loss, tf_loss)
self.assertAllCloseAccordingToType(np_backprop, tf_backprop)
示例2: testSingleClass
# 需要導入模塊: from tensorflow.python.ops import gen_nn_ops [as 別名]
# 或者: from tensorflow.python.ops.gen_nn_ops import _sparse_softmax_cross_entropy_with_logits [as 別名]
def testSingleClass(self):
for label_dtype in np.int32, np.int64:
with self.test_session(use_gpu=True) as sess:
loss, backprop = gen_nn_ops._sparse_softmax_cross_entropy_with_logits(
np.array([[1.], [-1.], [0.]]).astype(np.float32),
np.array([0, 0, 0]).astype(label_dtype))
tf_loss, tf_backprop = sess.run([loss, backprop])
self.assertAllClose([0.0, 0.0, 0.0], tf_loss)
self.assertAllClose([[0.0], [0.0], [0.0]], tf_backprop)
示例3: testInvalidLabel
# 需要導入模塊: from tensorflow.python.ops import gen_nn_ops [as 別名]
# 或者: from tensorflow.python.ops.gen_nn_ops import _sparse_softmax_cross_entropy_with_logits [as 別名]
def testInvalidLabel(self):
features = [
[1., 1., 1., 1.],
[1., 1., 1., 1.],
[1., 2., 3., 4.],
[1., 2., 3., 4.]]
labels = [4, 3, 0, -1]
if tf.test.is_built_with_cuda() and tf.test.is_gpu_available():
with self.test_session(use_gpu=True) as sess:
loss, backprop = (
gen_nn_ops._sparse_softmax_cross_entropy_with_logits(
features, labels))
tf_loss, tf_backprop = sess.run([loss, backprop])
self.assertAllClose(
[[np.nan] * 4,
[0.25, 0.25, 0.25, -0.75],
[-0.968, 0.087, 0.237, 0.6439],
[np.nan] * 4],
tf_backprop, rtol=1e-3, atol=1e-3)
self.assertAllClose(
[np.nan, 1.3862, 3.4420, np.nan], tf_loss, rtol=1e-3, atol=1e-3)
with self.test_session(use_gpu=False) as sess:
loss, backprop = (
gen_nn_ops._sparse_softmax_cross_entropy_with_logits(
features, labels))
with self.assertRaisesOpError("Received a label value of"):
sess.run([loss, backprop])