當前位置: 首頁>>代碼示例>>Python>>正文


Python clip_ops.global_norm方法代碼示例

本文整理匯總了Python中tensorflow.python.ops.clip_ops.global_norm方法的典型用法代碼示例。如果您正苦於以下問題:Python clip_ops.global_norm方法的具體用法?Python clip_ops.global_norm怎麽用?Python clip_ops.global_norm使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在tensorflow.python.ops.clip_ops的用法示例。


在下文中一共展示了clip_ops.global_norm方法的9個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: add_gradients_summaries

# 需要導入模塊: from tensorflow.python.ops import clip_ops [as 別名]
# 或者: from tensorflow.python.ops.clip_ops import global_norm [as 別名]
def add_gradients_summaries(grads_and_vars):
  """Add summaries to gradients.

  Args:
    grads_and_vars: A list of gradient to variable pairs (tuples).

  Returns:
    The list of created summaries.
  """
  summaries = []
  for grad, var in grads_and_vars:
    if grad is not None:
      if isinstance(grad, ops.IndexedSlices):
        grad_values = grad.values
      else:
        grad_values = grad
      summaries.append(
          summary.histogram(var.op.name + '/gradient', grad_values))
      summaries.append(
          summary.scalar(var.op.name + '/gradient_norm',
                         clip_ops.global_norm([grad_values])))
    else:
      logging.info('Var %s has no gradient', var.op.name)

  return summaries 
開發者ID:yuantailing,項目名稱:ctw-baseline,代碼行數:27,代碼來源:learning.py

示例2: add_gradients_summaries

# 需要導入模塊: from tensorflow.python.ops import clip_ops [as 別名]
# 或者: from tensorflow.python.ops.clip_ops import global_norm [as 別名]
def add_gradients_summaries(grads_and_vars):
  """Add summaries to gradients.

  Args:
    grads_and_vars: A list of gradient to variable pairs (tuples).

  Returns:
    The list of created summaries.
  """
  summaries = []
  for grad, var in grads_and_vars:
    if grad is not None:
      if isinstance(grad, ops.IndexedSlices):
        grad_values = grad.values
      else:
        grad_values = grad
      summaries.append(
          summary.histogram(var.op.name + '_gradient', grad_values))
      summaries.append(
          summary.scalar(var.op.name + '_gradient_norm',
                         clip_ops.global_norm([grad_values])))
    else:
      logging.info('Var %s has no gradient', var.op.name)

  return summaries 
開發者ID:ryfeus,項目名稱:lambda-packs,代碼行數:27,代碼來源:training.py

示例3: add_gradients_summaries

# 需要導入模塊: from tensorflow.python.ops import clip_ops [as 別名]
# 或者: from tensorflow.python.ops.clip_ops import global_norm [as 別名]
def add_gradients_summaries(grads_and_vars):
  """Add summaries to gradients.

  Args:
    grads_and_vars: A list of gradient to variable pairs (tuples).

  Returns:
    The list of created summaries.
  """
  summaries = []
  for grad, var in grads_and_vars:
    if grad is not None:
      if isinstance(grad, ops.IndexedSlices):
        grad_values = grad.values
      else:
        grad_values = grad
      summaries.append(
          summary.histogram(var.op.name + '/gradient', grad_values))
      summaries.append(
          summary.histogram(var.op.name + '/gradient_norm',
                            clip_ops.global_norm([grad_values])))
    else:
      logging.info('Var %s has no gradient', var.op.name)

  return summaries 
開發者ID:abhisuri97,項目名稱:auto-alt-text-lambda-api,代碼行數:27,代碼來源:learning.py

示例4: add_gradients_summaries

# 需要導入模塊: from tensorflow.python.ops import clip_ops [as 別名]
# 或者: from tensorflow.python.ops.clip_ops import global_norm [as 別名]
def add_gradients_summaries(grads_and_vars):
  """Add summaries to gradients.

  Args:
    grads_and_vars: A list of gradient to variable pairs (tuples).

  Returns:
    The list of created summaries.
  """
  summaries = []
  for grad, var in grads_and_vars:
    if grad is not None:
      if isinstance(grad, ops.IndexedSlices):
        grad_values = grad.values
      else:
        grad_values = grad
      summaries.append(
          summary.histogram(var.op.name + '_gradient', grad_values))
      summaries.append(
          summary.histogram(var.op.name + '_gradient_norm',
                            clip_ops.global_norm([grad_values])))
    else:
      logging.info('Var %s has no gradient', var.op.name)

  return summaries 
開發者ID:abhisuri97,項目名稱:auto-alt-text-lambda-api,代碼行數:27,代碼來源:training.py

示例5: add_gradients_summaries

# 需要導入模塊: from tensorflow.python.ops import clip_ops [as 別名]
# 或者: from tensorflow.python.ops.clip_ops import global_norm [as 別名]
def add_gradients_summaries(grads_and_vars):
  """Add summaries to gradients.
  Args:
    grads_and_vars: A list of gradient to variable pairs (tuples).
  Returns:
    The list of created summaries.
  """
  summaries = []
  for grad, var in grads_and_vars:
    if grad is not None:
      if isinstance(grad, ops.IndexedSlices):
        grad_values = grad.values
      else:
        grad_values = grad
      summaries.append(
          summary.histogram(var.op.name + '/gradient', grad_values))
      summaries.append(
          summary.scalar(var.op.name + '/gradient_norm',
                         clip_ops.global_norm([grad_values])))
    else:
      logging.info('Var %s has no gradient', var.op.name)

  return summaries 
開發者ID:autoai-org,項目名稱:CVTron,代碼行數:25,代碼來源:learning.py

示例6: add_gradients_summaries

# 需要導入模塊: from tensorflow.python.ops import clip_ops [as 別名]
# 或者: from tensorflow.python.ops.clip_ops import global_norm [as 別名]
def add_gradients_summaries(grads_and_vars):
  """Add summaries to gradients.

  Args:
    grads_and_vars: A list of gradient to variable pairs (tuples).

  Returns:
    The list of created summaries.
  """
  summaries = []
  for grad, var in grads_and_vars:
    if grad is not None:
      if isinstance(grad, ops.IndexedSlices):
        grad_values = grad.values
      else:
        grad_values = grad
      summaries.append(
          summary.histogram(var.op.name + ':gradient', grad_values))
      summaries.append(
          summary.histogram(var.op.name + ':gradient_norm',
                            clip_ops.global_norm([grad_values])))
    else:
      logging.info('Var %s has no gradient', var.op.name)

  return summaries 
開發者ID:tobegit3hub,項目名稱:deep_image_model,代碼行數:27,代碼來源:learning.py

示例7: add_gradients_summaries

# 需要導入模塊: from tensorflow.python.ops import clip_ops [as 別名]
# 或者: from tensorflow.python.ops.clip_ops import global_norm [as 別名]
def add_gradients_summaries(grads_and_vars):
  """Add summaries to gradients.

  Args:
    grads_and_vars: A list of gradient to variable pairs (tuples).

  Returns:
    The list of created summaries.
  """
  summaries = []
  for grad, var in grads_and_vars:
    if grad is not None:
      if isinstance(grad, ops.IndexedSlices):
        grad_values = grad.values
      else:
        grad_values = grad
      summaries.append(summary.histogram_summary(
          var.op.name + ':gradient', grad_values))
      summaries.append(summary.histogram_summary(
          var.op.name + ':gradient_norm', clip_ops.global_norm([grad_values])))
    else:
      logging.info('Var %s has no gradient', var.op.name)

  return summaries 
開發者ID:tobegit3hub,項目名稱:deep_image_model,代碼行數:26,代碼來源:training.py

示例8: add_gradients_summaries

# 需要導入模塊: from tensorflow.python.ops import clip_ops [as 別名]
# 或者: from tensorflow.python.ops.clip_ops import global_norm [as 別名]
def add_gradients_summaries(grads_and_vars):
  """Add summaries to gradients.

  Args:
    grads_and_vars: A list of gradient to variable pairs (tuples).

  Returns:
    The list of created summaries.
  """
  summaries = []
  for grad, var in grads_and_vars:
    if grad is not None:
      if isinstance(grad, ops.IndexedSlices):
        grad_values = grad.values
      else:
        grad_values = grad
      summaries.append(
          summary.histogram(var.op.name + '/gradient', grad_values))
      summaries.append(
          summary.scalar(var.op.name + '/gradient_norm',
                         clip_ops.global_norm([grad_values])))
    else:
      log.warn('Var %s has no gradient', var.op.name)

  return summaries 
開發者ID:wonheeML,項目名稱:mtl-ssl,代碼行數:27,代碼來源:learning.py

示例9: optimize

# 需要導入模塊: from tensorflow.python.ops import clip_ops [as 別名]
# 或者: from tensorflow.python.ops.clip_ops import global_norm [as 別名]
def optimize(gradients, optim, global_step, summaries, global_norm=None, global_norm_clipped=None, appendix=''):
    """Modified from sugartensor"""

    # Add Summary
    if summaries is None:
        summaries = ["loss", "learning_rate"]
    # if "gradient_norm" in summaries:
    #     if global_norm is None:
    #         tf.summary.scalar("global_norm/gradient_norm" + appendix,
    #                           clip_ops.global_norm(list(zip(*gradients))[0]))
    #     else:
    #         tf.summary.scalar("global_norm/gradient_norm" + appendix,
    #                           global_norm)
    #     if global_norm_clipped is not None:
    #         tf.summary.scalar("global_norm/gradient_norm_clipped" + appendix,
    #                           global_norm_clipped)

    # Add histograms for variables, gradients and gradient norms.
    for gradient, variable in gradients:
        if isinstance(gradient, ops.IndexedSlices):
            grad_values = gradient.values
        else:
            grad_values = gradient

        if grad_values is not None:
            var_name = variable.name.replace(":", "_")
            # if "gradients" in summaries:
            #     tf.summary.histogram("gradients/%s" % var_name, grad_values)
            # if "gradient_norm" in summaries:
            #     tf.summary.scalar("gradient_norm/%s" % var_name,
            #                       clip_ops.global_norm([grad_values]))

    # Gradient Update OP
    return optim.apply_gradients(gradients, global_step=global_step) 
開發者ID:SketchyScene,項目名稱:SketchySceneColorization,代碼行數:36,代碼來源:graph_single.py


注:本文中的tensorflow.python.ops.clip_ops.global_norm方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。