当前位置: 首页>>代码示例>>Python>>正文


Python ops.batch_norm方法代码示例

本文整理汇总了Python中inception.slim.ops.batch_norm方法的典型用法代码示例。如果您正苦于以下问题:Python ops.batch_norm方法的具体用法?Python ops.batch_norm怎么用?Python ops.batch_norm使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在inception.slim.ops的用法示例。


在下文中一共展示了ops.batch_norm方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: inception_v3_parameters

# 需要导入模块: from inception.slim import ops [as 别名]
# 或者: from inception.slim.ops import batch_norm [as 别名]
def inception_v3_parameters(weight_decay=0.00004, stddev=0.1,
                            batch_norm_decay=0.9997, batch_norm_epsilon=0.001):
  """Yields the scope with the default parameters for inception_v3.

  Args:
    weight_decay: the weight decay for weights variables.
    stddev: standard deviation of the truncated guassian weight distribution.
    batch_norm_decay: decay for the moving average of batch_norm momentums.
    batch_norm_epsilon: small float added to variance to avoid dividing by zero.

  Yields:
    a arg_scope with the parameters needed for inception_v3.
  """
  # Set weight_decay for weights in Conv and FC layers.
  with scopes.arg_scope([ops.conv2d, ops.fc],
                        weight_decay=weight_decay):
    # Set stddev, activation and parameters for batch_norm.
    with scopes.arg_scope([ops.conv2d],
                          stddev=stddev,
                          activation=tf.nn.relu,
                          batch_norm_params={
                              'decay': batch_norm_decay,
                              'epsilon': batch_norm_epsilon}) as arg_scope:
      yield arg_scope 
开发者ID:ringringyi,项目名称:DOTA_models,代码行数:26,代码来源:inception_model.py

示例2: _vgg_arg_scope

# 需要导入模块: from inception.slim import ops [as 别名]
# 或者: from inception.slim.ops import batch_norm [as 别名]
def _vgg_arg_scope(weight_decay,
                   is_training):
  """Defines the VGG arg scope.
  Args:
    weight_decay: The l2 regularization coefficient.
  Returns:
    An arg_scope.
  """
  with slim.arg_scope([slim.conv2d, slim.fully_connected],
                      activation_fn=tf.nn.relu,
                      weights_regularizer=slim.l2_regularizer(weight_decay),
                      weights_initializer=tf.contrib.layers.xavier_initializer(),
                      biases_initializer=tf.zeros_initializer()):
    with slim.arg_scope([slim.batch_norm], is_training=is_training):
      with slim.arg_scope([slim.conv2d], padding='SAME', normalizer_fn=slim.batch_norm) as arg_sc:
        return arg_sc 
开发者ID:wenwei202,项目名称:terngrad,代码行数:18,代码来源:models.py

示例3: testCreateOp

# 需要导入模块: from inception.slim import ops [as 别名]
# 或者: from inception.slim.ops import batch_norm [as 别名]
def testCreateOp(self):
    height, width = 3, 3
    with self.test_session():
      images = tf.random_uniform((5, height, width, 3), seed=1)
      output = ops.batch_norm(images)
      self.assertTrue(output.op.name.startswith('BatchNorm/batchnorm'))
      self.assertListEqual(output.get_shape().as_list(), [5, height, width, 3]) 
开发者ID:ringringyi,项目名称:DOTA_models,代码行数:9,代码来源:ops_test.py

示例4: testCreateVariables

# 需要导入模块: from inception.slim import ops [as 别名]
# 或者: from inception.slim.ops import batch_norm [as 别名]
def testCreateVariables(self):
    height, width = 3, 3
    with self.test_session():
      images = tf.random_uniform((5, height, width, 3), seed=1)
      ops.batch_norm(images)
      beta = variables.get_variables_by_name('beta')[0]
      self.assertEquals(beta.op.name, 'BatchNorm/beta')
      gamma = variables.get_variables_by_name('gamma')
      self.assertEquals(gamma, [])
      moving_mean = tf.moving_average_variables()[0]
      moving_variance = tf.moving_average_variables()[1]
      self.assertEquals(moving_mean.op.name, 'BatchNorm/moving_mean')
      self.assertEquals(moving_variance.op.name, 'BatchNorm/moving_variance') 
开发者ID:ringringyi,项目名称:DOTA_models,代码行数:15,代码来源:ops_test.py

示例5: testCreateVariablesWithScale

# 需要导入模块: from inception.slim import ops [as 别名]
# 或者: from inception.slim.ops import batch_norm [as 别名]
def testCreateVariablesWithScale(self):
    height, width = 3, 3
    with self.test_session():
      images = tf.random_uniform((5, height, width, 3), seed=1)
      ops.batch_norm(images, scale=True)
      beta = variables.get_variables_by_name('beta')[0]
      gamma = variables.get_variables_by_name('gamma')[0]
      self.assertEquals(beta.op.name, 'BatchNorm/beta')
      self.assertEquals(gamma.op.name, 'BatchNorm/gamma')
      moving_mean = tf.moving_average_variables()[0]
      moving_variance = tf.moving_average_variables()[1]
      self.assertEquals(moving_mean.op.name, 'BatchNorm/moving_mean')
      self.assertEquals(moving_variance.op.name, 'BatchNorm/moving_variance') 
开发者ID:ringringyi,项目名称:DOTA_models,代码行数:15,代码来源:ops_test.py

示例6: testCreateVariablesWithoutCenterWithScale

# 需要导入模块: from inception.slim import ops [as 别名]
# 或者: from inception.slim.ops import batch_norm [as 别名]
def testCreateVariablesWithoutCenterWithScale(self):
    height, width = 3, 3
    with self.test_session():
      images = tf.random_uniform((5, height, width, 3), seed=1)
      ops.batch_norm(images, center=False, scale=True)
      beta = variables.get_variables_by_name('beta')
      self.assertEquals(beta, [])
      gamma = variables.get_variables_by_name('gamma')[0]
      self.assertEquals(gamma.op.name, 'BatchNorm/gamma')
      moving_mean = tf.moving_average_variables()[0]
      moving_variance = tf.moving_average_variables()[1]
      self.assertEquals(moving_mean.op.name, 'BatchNorm/moving_mean')
      self.assertEquals(moving_variance.op.name, 'BatchNorm/moving_variance') 
开发者ID:ringringyi,项目名称:DOTA_models,代码行数:15,代码来源:ops_test.py

示例7: testMovingAverageVariables

# 需要导入模块: from inception.slim import ops [as 别名]
# 或者: from inception.slim.ops import batch_norm [as 别名]
def testMovingAverageVariables(self):
    height, width = 3, 3
    with self.test_session():
      images = tf.random_uniform((5, height, width, 3), seed=1)
      ops.batch_norm(images, scale=True)
      moving_mean = tf.moving_average_variables()[0]
      moving_variance = tf.moving_average_variables()[1]
      self.assertEquals(moving_mean.op.name, 'BatchNorm/moving_mean')
      self.assertEquals(moving_variance.op.name, 'BatchNorm/moving_variance') 
开发者ID:ringringyi,项目名称:DOTA_models,代码行数:11,代码来源:ops_test.py

示例8: testUpdateOps

# 需要导入模块: from inception.slim import ops [as 别名]
# 或者: from inception.slim.ops import batch_norm [as 别名]
def testUpdateOps(self):
    height, width = 3, 3
    with self.test_session():
      images = tf.random_uniform((5, height, width, 3), seed=1)
      ops.batch_norm(images)
      update_ops = tf.get_collection(ops.UPDATE_OPS_COLLECTION)
      update_moving_mean = update_ops[0]
      update_moving_variance = update_ops[1]
      self.assertEquals(update_moving_mean.op.name,
                        'BatchNorm/AssignMovingAvg')
      self.assertEquals(update_moving_variance.op.name,
                        'BatchNorm/AssignMovingAvg_1') 
开发者ID:ringringyi,项目名称:DOTA_models,代码行数:14,代码来源:ops_test.py

示例9: testReuseVariables

# 需要导入模块: from inception.slim import ops [as 别名]
# 或者: from inception.slim.ops import batch_norm [as 别名]
def testReuseVariables(self):
    height, width = 3, 3
    with self.test_session():
      images = tf.random_uniform((5, height, width, 3), seed=1)
      ops.batch_norm(images, scale=True, scope='bn')
      ops.batch_norm(images, scale=True, scope='bn', reuse=True)
      beta = variables.get_variables_by_name('beta')
      gamma = variables.get_variables_by_name('gamma')
      self.assertEquals(len(beta), 1)
      self.assertEquals(len(gamma), 1)
      moving_vars = tf.get_collection('moving_vars')
      self.assertEquals(len(moving_vars), 2) 
开发者ID:ringringyi,项目名称:DOTA_models,代码行数:14,代码来源:ops_test.py

示例10: testReuseUpdateOps

# 需要导入模块: from inception.slim import ops [as 别名]
# 或者: from inception.slim.ops import batch_norm [as 别名]
def testReuseUpdateOps(self):
    height, width = 3, 3
    with self.test_session():
      images = tf.random_uniform((5, height, width, 3), seed=1)
      ops.batch_norm(images, scope='bn')
      self.assertEquals(len(tf.get_collection(ops.UPDATE_OPS_COLLECTION)), 2)
      ops.batch_norm(images, scope='bn', reuse=True)
      self.assertEquals(len(tf.get_collection(ops.UPDATE_OPS_COLLECTION)), 4) 
开发者ID:ringringyi,项目名称:DOTA_models,代码行数:10,代码来源:ops_test.py

示例11: testCreateMovingVars

# 需要导入模块: from inception.slim import ops [as 别名]
# 或者: from inception.slim.ops import batch_norm [as 别名]
def testCreateMovingVars(self):
    height, width = 3, 3
    with self.test_session():
      images = tf.random_uniform((5, height, width, 3), seed=1)
      _ = ops.batch_norm(images, moving_vars='moving_vars')
      moving_mean = tf.get_collection('moving_vars',
                                      'BatchNorm/moving_mean')
      self.assertEquals(len(moving_mean), 1)
      self.assertEquals(moving_mean[0].op.name, 'BatchNorm/moving_mean')
      moving_variance = tf.get_collection('moving_vars',
                                          'BatchNorm/moving_variance')
      self.assertEquals(len(moving_variance), 1)
      self.assertEquals(moving_variance[0].op.name, 'BatchNorm/moving_variance') 
开发者ID:ringringyi,项目名称:DOTA_models,代码行数:15,代码来源:ops_test.py

示例12: testEvalMovingVars

# 需要导入模块: from inception.slim import ops [as 别名]
# 或者: from inception.slim.ops import batch_norm [as 别名]
def testEvalMovingVars(self):
    height, width = 3, 3
    with self.test_session() as sess:
      image_shape = (10, height, width, 3)
      image_values = np.random.rand(*image_shape)
      expected_mean = np.mean(image_values, axis=(0, 1, 2))
      expected_var = np.var(image_values, axis=(0, 1, 2))
      images = tf.constant(image_values, shape=image_shape, dtype=tf.float32)
      output = ops.batch_norm(images, decay=0.1, is_training=False)
      update_ops = tf.get_collection(ops.UPDATE_OPS_COLLECTION)
      with tf.control_dependencies(update_ops):
        output = tf.identity(output)
      # Initialize all variables
      sess.run(tf.global_variables_initializer())
      moving_mean = variables.get_variables('BatchNorm/moving_mean')[0]
      moving_variance = variables.get_variables('BatchNorm/moving_variance')[0]
      mean, variance = sess.run([moving_mean, moving_variance])
      # After initialization moving_mean == 0 and moving_variance == 1.
      self.assertAllClose(mean, [0] * 3)
      self.assertAllClose(variance, [1] * 3)
      # Simulate assigment from saver restore.
      init_assigns = [tf.assign(moving_mean, expected_mean),
                      tf.assign(moving_variance, expected_var)]
      sess.run(init_assigns)
      for _ in range(10):
        sess.run([output], {images: np.random.rand(*image_shape)})
      mean = moving_mean.eval()
      variance = moving_variance.eval()
      # Although we feed different images, the moving_mean and moving_variance
      # shouldn't change.
      self.assertAllClose(mean, expected_mean)
      self.assertAllClose(variance, expected_var) 
开发者ID:ringringyi,项目名称:DOTA_models,代码行数:34,代码来源:ops_test.py

示例13: testReuseVars

# 需要导入模块: from inception.slim import ops [as 别名]
# 或者: from inception.slim.ops import batch_norm [as 别名]
def testReuseVars(self):
    height, width = 3, 3
    with self.test_session() as sess:
      image_shape = (10, height, width, 3)
      image_values = np.random.rand(*image_shape)
      expected_mean = np.mean(image_values, axis=(0, 1, 2))
      expected_var = np.var(image_values, axis=(0, 1, 2))
      images = tf.constant(image_values, shape=image_shape, dtype=tf.float32)
      output = ops.batch_norm(images, decay=0.1, is_training=False)
      update_ops = tf.get_collection(ops.UPDATE_OPS_COLLECTION)
      with tf.control_dependencies(update_ops):
        output = tf.identity(output)
      # Initialize all variables
      sess.run(tf.global_variables_initializer())
      moving_mean = variables.get_variables('BatchNorm/moving_mean')[0]
      moving_variance = variables.get_variables('BatchNorm/moving_variance')[0]
      mean, variance = sess.run([moving_mean, moving_variance])
      # After initialization moving_mean == 0 and moving_variance == 1.
      self.assertAllClose(mean, [0] * 3)
      self.assertAllClose(variance, [1] * 3)
      # Simulate assigment from saver restore.
      init_assigns = [tf.assign(moving_mean, expected_mean),
                      tf.assign(moving_variance, expected_var)]
      sess.run(init_assigns)
      for _ in range(10):
        sess.run([output], {images: np.random.rand(*image_shape)})
      mean = moving_mean.eval()
      variance = moving_variance.eval()
      # Although we feed different images, the moving_mean and moving_variance
      # shouldn't change.
      self.assertAllClose(mean, expected_mean)
      self.assertAllClose(variance, expected_var) 
开发者ID:ringringyi,项目名称:DOTA_models,代码行数:34,代码来源:ops_test.py

示例14: testEvalMovingVars

# 需要导入模块: from inception.slim import ops [as 别名]
# 或者: from inception.slim.ops import batch_norm [as 别名]
def testEvalMovingVars(self):
    height, width = 3, 3
    with self.test_session() as sess:
      image_shape = (10, height, width, 3)
      image_values = np.random.rand(*image_shape)
      expected_mean = np.mean(image_values, axis=(0, 1, 2))
      expected_var = np.var(image_values, axis=(0, 1, 2))
      images = tf.constant(image_values, shape=image_shape, dtype=tf.float32)
      output = ops.batch_norm(images, decay=0.1, is_training=False)
      update_ops = tf.get_collection(ops.UPDATE_OPS_COLLECTION)
      with tf.control_dependencies(update_ops):
        barrier = tf.no_op(name='gradient_barrier')
        output = control_flow_ops.with_dependencies([barrier], output)
      # Initialize all variables
      sess.run(tf.initialize_all_variables())
      moving_mean = variables.get_variables('BatchNorm/moving_mean')[0]
      moving_variance = variables.get_variables('BatchNorm/moving_variance')[0]
      mean, variance = sess.run([moving_mean, moving_variance])
      # After initialization moving_mean == 0 and moving_variance == 1.
      self.assertAllClose(mean, [0] * 3)
      self.assertAllClose(variance, [1] * 3)
      # Simulate assigment from saver restore.
      init_assigns = [tf.assign(moving_mean, expected_mean),
                      tf.assign(moving_variance, expected_var)]
      sess.run(init_assigns)
      for _ in range(10):
        sess.run([output], {images: np.random.rand(*image_shape)})
      mean = moving_mean.eval()
      variance = moving_variance.eval()
      # Although we feed different images, the moving_mean and moving_variance
      # shouldn't change.
      self.assertAllClose(mean, expected_mean)
      self.assertAllClose(variance, expected_var) 
开发者ID:Cyber-Neuron,项目名称:inception_v3,代码行数:35,代码来源:ops_test.py

示例15: testReuseVars

# 需要导入模块: from inception.slim import ops [as 别名]
# 或者: from inception.slim.ops import batch_norm [as 别名]
def testReuseVars(self):
    height, width = 3, 3
    with self.test_session() as sess:
      image_shape = (10, height, width, 3)
      image_values = np.random.rand(*image_shape)
      expected_mean = np.mean(image_values, axis=(0, 1, 2))
      expected_var = np.var(image_values, axis=(0, 1, 2))
      images = tf.constant(image_values, shape=image_shape, dtype=tf.float32)
      output = ops.batch_norm(images, decay=0.1, is_training=False)
      update_ops = tf.get_collection(ops.UPDATE_OPS_COLLECTION)
      with tf.control_dependencies(update_ops):
        barrier = tf.no_op(name='gradient_barrier')
        output = control_flow_ops.with_dependencies([barrier], output)
      # Initialize all variables
      sess.run(tf.initialize_all_variables())
      moving_mean = variables.get_variables('BatchNorm/moving_mean')[0]
      moving_variance = variables.get_variables('BatchNorm/moving_variance')[0]
      mean, variance = sess.run([moving_mean, moving_variance])
      # After initialization moving_mean == 0 and moving_variance == 1.
      self.assertAllClose(mean, [0] * 3)
      self.assertAllClose(variance, [1] * 3)
      # Simulate assigment from saver restore.
      init_assigns = [tf.assign(moving_mean, expected_mean),
                      tf.assign(moving_variance, expected_var)]
      sess.run(init_assigns)
      for _ in range(10):
        sess.run([output], {images: np.random.rand(*image_shape)})
      mean = moving_mean.eval()
      variance = moving_variance.eval()
      # Although we feed different images, the moving_mean and moving_variance
      # shouldn't change.
      self.assertAllClose(mean, expected_mean)
      self.assertAllClose(variance, expected_var) 
开发者ID:Cyber-Neuron,项目名称:inception_v3,代码行数:35,代码来源:ops_test.py


注:本文中的inception.slim.ops.batch_norm方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。