当前位置: 首页>>代码示例>>Python>>正文


Python variable_scope.get_local_variable方法代码示例

本文整理汇总了Python中tensorflow.python.ops.variable_scope.get_local_variable方法的典型用法代码示例。如果您正苦于以下问题:Python variable_scope.get_local_variable方法的具体用法?Python variable_scope.get_local_variable怎么用?Python variable_scope.get_local_variable使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在tensorflow.python.ops.variable_scope的用法示例。


在下文中一共展示了variable_scope.get_local_variable方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: testGetLocalVar

# 需要导入模块: from tensorflow.python.ops import variable_scope [as 别名]
# 或者: from tensorflow.python.ops.variable_scope import get_local_variable [as 别名]
def testGetLocalVar(self):
    with self.test_session():
      # Check that local variable respects naming.
      with tf.variable_scope("outer") as outer:
        with tf.variable_scope(outer, "default", []):
          local_var = variable_scope.get_local_variable(
              "w", [], collections=["foo"])
          self.assertEqual(local_var.name, "outer/w:0")

      # Since variable is local, it should be in the local variable collection
      # but not the the trainable collection.
      self.assertIn(local_var, tf.get_collection(tf.GraphKeys.LOCAL_VARIABLES))
      self.assertIn(local_var, tf.get_collection("foo"))
      self.assertNotIn(
          local_var, tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES))

      # Check that local variable respects `reuse`.
      with tf.variable_scope(outer, "default", reuse=True):
        self.assertEqual(variable_scope.get_local_variable("w", []).name,
                         "outer/w:0") 
开发者ID:tobegit3hub,项目名称:deep_image_model,代码行数:22,代码来源:variable_scope_test.py

示例2: ornstein_uhlenbeck_noise

# 需要导入模块: from tensorflow.python.ops import variable_scope [as 别名]
# 或者: from tensorflow.python.ops.variable_scope import get_local_variable [as 别名]
def ornstein_uhlenbeck_noise(a, t_decay=100000):
    noise_var = get_local_variable("nm", initializer=tf.zeros(a.get_shape()[1:]))
    ou_theta = get_local_variable("ou_theta", initializer=0.2)
    ou_sigma = get_local_variable("ou_sigma", initializer=0.15)
    # ou_theta = tf.Print(ou_theta, [noise_var], 'noise: ', first_n=2000)
    ou_sigma = tf.train.exponential_decay(ou_sigma, tt.function.step(), t_decay, 1e-6)
    n = noise_var.assign_sub(ou_theta * noise_var - tf.random_normal(a.get_shape()[1:], stddev=ou_sigma))
    return a + n 
开发者ID:rmst,项目名称:chi,代码行数:10,代码来源:ddpg.py

示例3: weighted_resample

# 需要导入模块: from tensorflow.python.ops import variable_scope [as 别名]
# 或者: from tensorflow.python.ops.variable_scope import get_local_variable [as 别名]
def weighted_resample(inputs, weights, overall_rate, scope=None,
                      mean_decay=0.999, seed=None):
  """Performs an approximate weighted resampling of `inputs`.

  This method chooses elements from `inputs` where each item's rate of
  selection is proportional to its value in `weights`, and the average
  rate of selection across all inputs (and many invocations!) is
  `overall_rate`.

  Args:
    inputs: A list of tensors whose first dimension is `batch_size`.
    weights: A `[batch_size]`-shaped tensor with each batch member's weight.
    overall_rate: Desired overall rate of resampling.
    scope: Scope to use for the op.
    mean_decay: How quickly to decay the running estimate of the mean weight.
    seed: Random seed.

  Returns:
    A list of tensors exactly like `inputs`, but with an unknown (and
      possibly zero) first dimension.
    A tensor containing the effective resampling rate used for each output.
  """
  # Algorithm: Just compute rates as weights/mean_weight *
  # overall_rate. This way the average weight corresponds to the
  # overall rate, and a weight twice the average has twice the rate,
  # etc.
  with ops.name_scope(scope, 'weighted_resample', inputs) as opscope:
    # First: Maintain a running estimated mean weight, with zero debiasing
    # enabled (by default) to avoid throwing the average off.

    with variable_scope.variable_scope(scope, 'estimate_mean', inputs):
      estimated_mean = variable_scope.get_local_variable(
          'estimated_mean',
          initializer=math_ops.cast(0, weights.dtype),
          dtype=weights.dtype)

      batch_mean = math_ops.reduce_mean(weights)
      mean = moving_averages.assign_moving_average(
          estimated_mean, batch_mean, mean_decay)

    # Then, normalize the weights into rates using the mean weight and
    # overall target rate:
    rates = weights * overall_rate / mean

    results = resample_at_rate([rates] + inputs, rates,
                               scope=opscope, seed=seed, back_prop=False)

    return (results[1:], results[0]) 
开发者ID:ryfeus,项目名称:lambda-packs,代码行数:50,代码来源:resample.py

示例4: weighted_resample

# 需要导入模块: from tensorflow.python.ops import variable_scope [as 别名]
# 或者: from tensorflow.python.ops.variable_scope import get_local_variable [as 别名]
def weighted_resample(inputs, weights, overall_rate, scope=None,
                      mean_decay=0.999, seed=None):
  """Performs an approximate weighted resampling of `inputs`.

  This method chooses elements from `inputs` where each item's rate of
  selection is proportional to its value in `weights`, and the average
  rate of selection across all inputs (and many invocations!) is
  `overall_rate`.

  Args:
    inputs: A list of tensors whose first dimension is `batch_size`.
    weights: A `[batch_size]`-shaped tensor with each batch member's weight.
    overall_rate: Desired overall rate of resampling.
    scope: Scope to use for the op.
    mean_decay: How quickly to decay the running estimate of the mean weight.
    seed: Random seed.

  Returns:
    A list of tensors exactly like `inputs`, but with an unknown (and
      possibly zero) first dimension.
    A tensor containing the effective resampling rate used for each output.

  """
  # Algorithm: Just compute rates as weights/mean_weight *
  # overall_rate. This way the average weight corresponds to the
  # overall rate, and a weight twice the average has twice the rate,
  # etc.
  with ops.name_scope(scope, 'weighted_resample', inputs) as opscope:
    # First: Maintain a running estimated mean weight, with zero debiasing
    # enabled (by default) to avoid throwing the average off.

    with variable_scope.variable_scope(scope, 'estimate_mean', inputs):
      estimated_mean = variable_scope.get_local_variable(
          'estimated_mean',
          initializer=math_ops.cast(0, weights.dtype),
          dtype=weights.dtype)

      batch_mean = math_ops.reduce_mean(weights)
      mean = moving_averages.assign_moving_average(
          estimated_mean, batch_mean, mean_decay)

    # Then, normalize the weights into rates using the mean weight and
    # overall target rate:
    rates = weights * overall_rate / mean

    results = resample_at_rate([rates] + inputs, rates,
                               scope=opscope, seed=seed, back_prop=False)

    return (results[1:], results[0]) 
开发者ID:abhisuri97,项目名称:auto-alt-text-lambda-api,代码行数:51,代码来源:resample.py

示例5: weighted_resample

# 需要导入模块: from tensorflow.python.ops import variable_scope [as 别名]
# 或者: from tensorflow.python.ops.variable_scope import get_local_variable [as 别名]
def weighted_resample(inputs, weights, overall_rate, scope=None,
                      mean_decay=0.999, warmup=10, seed=None):
  """Performs an approximate weighted resampling of `inputs`.

  This method chooses elements from `inputs` where each item's rate of
  selection is proportional to its value in `weights`, and the average
  rate of selection across all inputs (and many invocations!) is
  `overall_rate`.

  Args:
    inputs: A list of tensors whose first dimension is `batch_size`.
    weights: A `[batch_size]`-shaped tensor with each batch member's weight.
    overall_rate: Desired overall rate of resampling.
    scope: Scope to use for the op.
    mean_decay: How quickly to decay the running estimate of the mean weight.
    warmup: Until the resulting tensor has been evaluated `warmup`
      times, the resampling menthod uses the true mean over all calls
      as its weight estimate, rather than a decayed mean.
    seed: Random seed.

  Returns:
    A list of tensors exactly like `inputs`, but with an unknown (and
      possibly zero) first dimension.
    A tensor containing the effective resampling rate used for each output.

  """
  # Algorithm: Just compute rates as weights/mean_weight *
  # overall_rate. This way the the average weight corresponds to the
  # overall rate, and a weight twice the average has twice the rate,
  # etc.
  with ops.name_scope(scope, 'weighted_resample', inputs) as opscope:
    # First: Maintain a running estimated mean weight, with decay
    # adjusted (by also maintaining an invocation count) during the
    # warmup period so that at the beginning, there aren't too many
    # zeros mixed in, throwing the average off.

    with variable_scope.variable_scope(scope, 'estimate_mean', inputs):
      count_so_far = variable_scope.get_local_variable(
          'resample_count', initializer=0)

      estimated_mean = variable_scope.get_local_variable(
          'estimated_mean', initializer=0.0)

      count = count_so_far.assign_add(1)
      real_decay = math_ops.minimum(
          math_ops.truediv((count - 1), math_ops.minimum(count, warmup)),
          mean_decay)

      batch_mean = math_ops.reduce_mean(weights)
      mean = moving_averages.assign_moving_average(
          estimated_mean, batch_mean, real_decay, zero_debias=False)

    # Then, normalize the weights into rates using the mean weight and
    # overall target rate:
    rates = weights * overall_rate / mean

    results = resample_at_rate([rates] + inputs, rates,
                               scope=opscope, seed=seed, back_prop=False)

    return (results[1:], results[0]) 
开发者ID:tobegit3hub,项目名称:deep_image_model,代码行数:62,代码来源:resample.py


注:本文中的tensorflow.python.ops.variable_scope.get_local_variable方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。