本文整理匯總了Python中tensorflow.python.ops.clip_ops.global_norm方法的典型用法代碼示例。如果您正苦於以下問題:Python clip_ops.global_norm方法的具體用法?Python clip_ops.global_norm怎麽用?Python clip_ops.global_norm使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類tensorflow.python.ops.clip_ops
的用法示例。
在下文中一共展示了clip_ops.global_norm方法的9個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: add_gradients_summaries
# 需要導入模塊: from tensorflow.python.ops import clip_ops [as 別名]
# 或者: from tensorflow.python.ops.clip_ops import global_norm [as 別名]
def add_gradients_summaries(grads_and_vars):
"""Add summaries to gradients.
Args:
grads_and_vars: A list of gradient to variable pairs (tuples).
Returns:
The list of created summaries.
"""
summaries = []
for grad, var in grads_and_vars:
if grad is not None:
if isinstance(grad, ops.IndexedSlices):
grad_values = grad.values
else:
grad_values = grad
summaries.append(
summary.histogram(var.op.name + '/gradient', grad_values))
summaries.append(
summary.scalar(var.op.name + '/gradient_norm',
clip_ops.global_norm([grad_values])))
else:
logging.info('Var %s has no gradient', var.op.name)
return summaries
示例2: add_gradients_summaries
# 需要導入模塊: from tensorflow.python.ops import clip_ops [as 別名]
# 或者: from tensorflow.python.ops.clip_ops import global_norm [as 別名]
def add_gradients_summaries(grads_and_vars):
"""Add summaries to gradients.
Args:
grads_and_vars: A list of gradient to variable pairs (tuples).
Returns:
The list of created summaries.
"""
summaries = []
for grad, var in grads_and_vars:
if grad is not None:
if isinstance(grad, ops.IndexedSlices):
grad_values = grad.values
else:
grad_values = grad
summaries.append(
summary.histogram(var.op.name + '_gradient', grad_values))
summaries.append(
summary.scalar(var.op.name + '_gradient_norm',
clip_ops.global_norm([grad_values])))
else:
logging.info('Var %s has no gradient', var.op.name)
return summaries
示例3: add_gradients_summaries
# 需要導入模塊: from tensorflow.python.ops import clip_ops [as 別名]
# 或者: from tensorflow.python.ops.clip_ops import global_norm [as 別名]
def add_gradients_summaries(grads_and_vars):
"""Add summaries to gradients.
Args:
grads_and_vars: A list of gradient to variable pairs (tuples).
Returns:
The list of created summaries.
"""
summaries = []
for grad, var in grads_and_vars:
if grad is not None:
if isinstance(grad, ops.IndexedSlices):
grad_values = grad.values
else:
grad_values = grad
summaries.append(
summary.histogram(var.op.name + '/gradient', grad_values))
summaries.append(
summary.histogram(var.op.name + '/gradient_norm',
clip_ops.global_norm([grad_values])))
else:
logging.info('Var %s has no gradient', var.op.name)
return summaries
示例4: add_gradients_summaries
# 需要導入模塊: from tensorflow.python.ops import clip_ops [as 別名]
# 或者: from tensorflow.python.ops.clip_ops import global_norm [as 別名]
def add_gradients_summaries(grads_and_vars):
"""Add summaries to gradients.
Args:
grads_and_vars: A list of gradient to variable pairs (tuples).
Returns:
The list of created summaries.
"""
summaries = []
for grad, var in grads_and_vars:
if grad is not None:
if isinstance(grad, ops.IndexedSlices):
grad_values = grad.values
else:
grad_values = grad
summaries.append(
summary.histogram(var.op.name + '_gradient', grad_values))
summaries.append(
summary.histogram(var.op.name + '_gradient_norm',
clip_ops.global_norm([grad_values])))
else:
logging.info('Var %s has no gradient', var.op.name)
return summaries
示例5: add_gradients_summaries
# 需要導入模塊: from tensorflow.python.ops import clip_ops [as 別名]
# 或者: from tensorflow.python.ops.clip_ops import global_norm [as 別名]
def add_gradients_summaries(grads_and_vars):
"""Add summaries to gradients.
Args:
grads_and_vars: A list of gradient to variable pairs (tuples).
Returns:
The list of created summaries.
"""
summaries = []
for grad, var in grads_and_vars:
if grad is not None:
if isinstance(grad, ops.IndexedSlices):
grad_values = grad.values
else:
grad_values = grad
summaries.append(
summary.histogram(var.op.name + '/gradient', grad_values))
summaries.append(
summary.scalar(var.op.name + '/gradient_norm',
clip_ops.global_norm([grad_values])))
else:
logging.info('Var %s has no gradient', var.op.name)
return summaries
示例6: add_gradients_summaries
# 需要導入模塊: from tensorflow.python.ops import clip_ops [as 別名]
# 或者: from tensorflow.python.ops.clip_ops import global_norm [as 別名]
def add_gradients_summaries(grads_and_vars):
"""Add summaries to gradients.
Args:
grads_and_vars: A list of gradient to variable pairs (tuples).
Returns:
The list of created summaries.
"""
summaries = []
for grad, var in grads_and_vars:
if grad is not None:
if isinstance(grad, ops.IndexedSlices):
grad_values = grad.values
else:
grad_values = grad
summaries.append(
summary.histogram(var.op.name + ':gradient', grad_values))
summaries.append(
summary.histogram(var.op.name + ':gradient_norm',
clip_ops.global_norm([grad_values])))
else:
logging.info('Var %s has no gradient', var.op.name)
return summaries
示例7: add_gradients_summaries
# 需要導入模塊: from tensorflow.python.ops import clip_ops [as 別名]
# 或者: from tensorflow.python.ops.clip_ops import global_norm [as 別名]
def add_gradients_summaries(grads_and_vars):
"""Add summaries to gradients.
Args:
grads_and_vars: A list of gradient to variable pairs (tuples).
Returns:
The list of created summaries.
"""
summaries = []
for grad, var in grads_and_vars:
if grad is not None:
if isinstance(grad, ops.IndexedSlices):
grad_values = grad.values
else:
grad_values = grad
summaries.append(summary.histogram_summary(
var.op.name + ':gradient', grad_values))
summaries.append(summary.histogram_summary(
var.op.name + ':gradient_norm', clip_ops.global_norm([grad_values])))
else:
logging.info('Var %s has no gradient', var.op.name)
return summaries
示例8: add_gradients_summaries
# 需要導入模塊: from tensorflow.python.ops import clip_ops [as 別名]
# 或者: from tensorflow.python.ops.clip_ops import global_norm [as 別名]
def add_gradients_summaries(grads_and_vars):
"""Add summaries to gradients.
Args:
grads_and_vars: A list of gradient to variable pairs (tuples).
Returns:
The list of created summaries.
"""
summaries = []
for grad, var in grads_and_vars:
if grad is not None:
if isinstance(grad, ops.IndexedSlices):
grad_values = grad.values
else:
grad_values = grad
summaries.append(
summary.histogram(var.op.name + '/gradient', grad_values))
summaries.append(
summary.scalar(var.op.name + '/gradient_norm',
clip_ops.global_norm([grad_values])))
else:
log.warn('Var %s has no gradient', var.op.name)
return summaries
示例9: optimize
# 需要導入模塊: from tensorflow.python.ops import clip_ops [as 別名]
# 或者: from tensorflow.python.ops.clip_ops import global_norm [as 別名]
def optimize(gradients, optim, global_step, summaries, global_norm=None, global_norm_clipped=None, appendix=''):
"""Modified from sugartensor"""
# Add Summary
if summaries is None:
summaries = ["loss", "learning_rate"]
# if "gradient_norm" in summaries:
# if global_norm is None:
# tf.summary.scalar("global_norm/gradient_norm" + appendix,
# clip_ops.global_norm(list(zip(*gradients))[0]))
# else:
# tf.summary.scalar("global_norm/gradient_norm" + appendix,
# global_norm)
# if global_norm_clipped is not None:
# tf.summary.scalar("global_norm/gradient_norm_clipped" + appendix,
# global_norm_clipped)
# Add histograms for variables, gradients and gradient norms.
for gradient, variable in gradients:
if isinstance(gradient, ops.IndexedSlices):
grad_values = gradient.values
else:
grad_values = gradient
if grad_values is not None:
var_name = variable.name.replace(":", "_")
# if "gradients" in summaries:
# tf.summary.histogram("gradients/%s" % var_name, grad_values)
# if "gradient_norm" in summaries:
# tf.summary.scalar("gradient_norm/%s" % var_name,
# clip_ops.global_norm([grad_values]))
# Gradient Update OP
return optim.apply_gradients(gradients, global_step=global_step)