当前位置: 首页>>代码示例>>Python>>正文


Python core.dense函数代码示例

本文整理汇总了Python中tensorflow.python.layers.core.dense函数的典型用法代码示例。如果您正苦于以下问题:Python dense函数的具体用法?Python dense怎么用?Python dense使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了dense函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: testFunctionalDenseTwiceReuse

 def testFunctionalDenseTwiceReuse(self):
   inputs = random_ops.random_uniform((5, 3), seed=1)
   core_layers.dense(inputs, 2, name='my_dense')
   vars1 = variables.trainable_variables()
   core_layers.dense(inputs, 2, name='my_dense', reuse=True)
   vars2 = variables.trainable_variables()
   self.assertEqual(vars1, vars2)
开发者ID:AliMiraftab,项目名称:tensorflow,代码行数:7,代码来源:core_test.py

示例2: testFunctionalDenseTwice

 def testFunctionalDenseTwice(self):
   inputs = random_ops.random_uniform((5, 3), seed=1)
   core_layers.dense(inputs, 2)
   vars1 = variables.trainable_variables()
   core_layers.dense(inputs, 2)
   vars2 = variables.trainable_variables()
   self.assertEqual(len(vars1), 2)
   self.assertEqual(len(vars2), 4)
开发者ID:AliMiraftab,项目名称:tensorflow,代码行数:8,代码来源:core_test.py

示例3: testFunctionalDenseTwice

 def testFunctionalDenseTwice(self):
   inputs = random_ops.random_uniform((5, 3), seed=1)
   core_layers.dense(inputs, 2)
   vars1 = _get_variable_dict_from_varstore().values()
   core_layers.dense(inputs, 2)
   vars2 = _get_variable_dict_from_varstore().values()
   self.assertEqual(len(vars1), 2)
   self.assertEqual(len(vars2), 4)
开发者ID:AndrewTwinz,项目名称:tensorflow,代码行数:8,代码来源:core_test.py

示例4: dnn_logit_fn

  def dnn_logit_fn(features, mode):
    """Deep Neural Network logit_fn.

    Args:
      features: This is the first item returned from the `input_fn`
                passed to `train`, `evaluate`, and `predict`. This should be a
                single `Tensor` or `dict` of same.
      mode: Optional. Specifies if this training, evaluation or prediction. See
            `ModeKeys`.

    Returns:
      A `Tensor` representing the logits, or a list of `Tensor`'s representing
      multiple logits in the MultiHead case.
    """
    with variable_scope.variable_scope(
        'input_from_feature_columns',
        values=tuple(six.itervalues(features)),
        partitioner=input_layer_partitioner):
      net = feature_column_lib.input_layer(
          features=features, feature_columns=feature_columns)

    for layer_id, num_hidden_units in enumerate(hidden_units):
      with variable_scope.variable_scope(
          'hiddenlayer_%d' % layer_id, values=(net,)) as hidden_layer_scope:
        net = core_layers.dense(
            net,
            units=num_hidden_units,
            activation=activation_fn,
            kernel_initializer=init_ops.glorot_uniform_initializer(),
            name=hidden_layer_scope)
        if dropout is not None and mode == model_fn.ModeKeys.TRAIN:
          net = core_layers.dropout(net, rate=dropout, training=True)
      _add_hidden_layer_summary(net, hidden_layer_scope.name)

    if isinstance(units, int):
      with variable_scope.variable_scope(
          'logits', values=(net,)) as logits_scope:
        logits = core_layers.dense(
            net,
            units=units,
            activation=None,
            kernel_initializer=init_ops.glorot_uniform_initializer(),
            name=logits_scope)
      _add_hidden_layer_summary(logits, logits_scope.name)
    else:
      logits = []
      for head_index, logits_dimension in enumerate(units):
        with variable_scope.variable_scope(
            'logits_head_{}'.format(head_index), values=(net,)) as logits_scope:
          these_logits = core_layers.dense(
              net,
              units=logits_dimension,
              activation=None,
              kernel_initializer=init_ops.glorot_uniform_initializer(),
              name=logits_scope)
        _add_hidden_layer_summary(these_logits, logits_scope.name)
        logits.append(these_logits)
    return logits
开发者ID:DjangoPeng,项目名称:tensorflow,代码行数:58,代码来源:dnn.py

示例5: testFunctionalDenseWithCustomGetter

 def testFunctionalDenseWithCustomGetter(self):
   called = [0]
   def custom_getter(getter, *args, **kwargs):
     called[0] += 1
     return getter(*args, **kwargs)
   with tf.variable_scope('test', custom_getter=custom_getter):
     inputs = tf.random_uniform((5, 3), seed=1)
     core_layers.dense(inputs, 2)
   self.assertEqual(called[0], 2)
开发者ID:Hwhitetooth,项目名称:tensorflow,代码行数:9,代码来源:core_test.py

示例6: dnn_logit_fn

  def dnn_logit_fn(features, mode):
    """Deep Neural Network logit_fn.

    Args:
      features: This is the first item returned from the `input_fn`
                passed to `train`, `evaluate`, and `predict`. This should be a
                single `Tensor` or `dict` of same.
      mode: Optional. Specifies if this training, evaluation or prediction. See
            `ModeKeys`.

    Returns:
      A `Tensor` representing the logits, or a list of `Tensor`'s representing
      multiple logits in the MultiHead case.
    """
    is_training = mode == model_fn.ModeKeys.TRAIN
    with variable_scope.variable_scope(
        'input_from_feature_columns',
        values=tuple(six.itervalues(features)),
        partitioner=input_layer_partitioner):
      net = feature_column_lib.input_layer(
          features=features, feature_columns=feature_columns)
    for layer_id, num_hidden_units in enumerate(hidden_units):
      with variable_scope.variable_scope(
          'hiddenlayer_%d' % layer_id, values=(net,)) as hidden_layer_scope:
        net = core_layers.dense(
            net,
            units=num_hidden_units,
            activation=activation_fn,
            kernel_initializer=init_ops.glorot_uniform_initializer(),
            name=hidden_layer_scope)
        if dropout is not None and is_training:
          net = core_layers.dropout(net, rate=dropout, training=True)
        if batch_norm:
          # TODO(hjm): In future, if this becomes popular, we can enable
          # customization of the batch normalization params by accepting a
          # list of `BatchNormalization` instances as `batch_norm`.
          net = normalization.batch_normalization(
              net,
              # The default momentum 0.99 actually crashes on certain
              # problem, so here we use 0.999, which is the default of
              # tf.contrib.layers.batch_norm.
              momentum=0.999,
              training=is_training,
              name='batchnorm_%d' % layer_id)
      _add_hidden_layer_summary(net, hidden_layer_scope.name)

    with variable_scope.variable_scope('logits', values=(net,)) as logits_scope:
      logits = core_layers.dense(
          net,
          units=units,
          activation=None,
          kernel_initializer=init_ops.glorot_uniform_initializer(),
          name=logits_scope)
    _add_hidden_layer_summary(logits, logits_scope.name)

    return logits
开发者ID:AnishShah,项目名称:tensorflow,代码行数:56,代码来源:dnn.py

示例7: testFunctionalDenseTwiceReuseFromScope

 def testFunctionalDenseTwiceReuseFromScope(self):
   with self.test_session():
     with variable_scope.variable_scope('scope'):
       inputs = random_ops.random_uniform((5, 3), seed=1)
       core_layers.dense(inputs, 2, name='my_dense')
       vars1 = variables.trainable_variables()
     with variable_scope.variable_scope('scope', reuse=True):
       core_layers.dense(inputs, 2, name='my_dense')
       vars2 = variables.trainable_variables()
     self.assertEqual(vars1, vars2)
开发者ID:AndrewTwinz,项目名称:tensorflow,代码行数:10,代码来源:core_test.py

示例8: testKernelRegularizerWithReuse

 def testKernelRegularizerWithReuse(self):
   regularizer = lambda x: math_ops.reduce_sum(x) * 1e-3
   inputs = random_ops.random_uniform((5, 3), seed=1)
   _ = core_layers.dense(
       inputs, 2, name='my_dense', kernel_regularizer=regularizer)
   self.assertEqual(
       len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)), 1)
   _ = core_layers.dense(
       inputs, 2, name='my_dense', kernel_regularizer=regularizer, reuse=True)
   self.assertEqual(
       len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)), 1)
开发者ID:AndrewTwinz,项目名称:tensorflow,代码行数:11,代码来源:core_test.py

示例9: testFunctionalDenseInitializerFromScope

 def testFunctionalDenseInitializerFromScope(self):
   with self.test_session() as sess:
     with variable_scope.variable_scope(
         'scope', initializer=init_ops.ones_initializer()):
       inputs = random_ops.random_uniform((5, 3), seed=1)
       core_layers.dense(inputs, 2)
       sess.run(variables.global_variables_initializer())
       weights = sess.run(variables.trainable_variables())
       self.assertEqual(len(weights), 2)
       # Check that the matrix weights got initialized to ones (from scope).
       self.assertAllClose(weights[0], np.ones((3, 2)))
       # Check that the bias still got initialized to zeros.
       self.assertAllClose(weights[1], np.zeros((2)))
开发者ID:AliMiraftab,项目名称:tensorflow,代码行数:13,代码来源:core_test.py

示例10: testFunctionalDenseInitializerFromScope

 def testFunctionalDenseInitializerFromScope(self):
   with variable_scope.variable_scope(
       'scope', initializer=init_ops.ones_initializer()), self.test_session():
     inputs = random_ops.random_uniform((5, 3), seed=1)
     core_layers.dense(inputs, 2)
     variables.global_variables_initializer().run()
     weights = _get_variable_dict_from_varstore()
     self.assertEqual(len(weights), 2)
     # Check that the matrix weights got initialized to ones (from scope).
     self.assertAllClose(weights['scope/dense/kernel'].read_value().eval(),
                         np.ones((3, 2)))
     # Check that the bias still got initialized to zeros.
     self.assertAllClose(weights['scope/dense/bias'].read_value().eval(),
                         np.zeros((2)))
开发者ID:AndrewTwinz,项目名称:tensorflow,代码行数:14,代码来源:core_test.py

示例11: testFunctionalDenseInitializerFromScope

 def testFunctionalDenseInitializerFromScope(self):
   with variable_scope.variable_scope(
       'scope', initializer=init_ops.ones_initializer()):
     inputs = random_ops.random_uniform((5, 3), seed=1)
     core_layers.dense(inputs, 2)
     if context.in_graph_mode():
       self.evaluate(variables.global_variables_initializer())
     weights = variables.trainable_variables()
     self.assertEqual(len(weights), 2)
     # Check that the matrix weights got initialized to ones (from scope).
     self.assertAllClose(
         self.evaluate(weights[0].read_value()), np.ones((3, 2)))
     # Check that the bias still got initialized to zeros.
     self.assertAllClose(self.evaluate(weights[1].read_value()), np.zeros((2)))
开发者ID:keveman,项目名称:tensorflow,代码行数:14,代码来源:core_test.py

示例12: _fn

 def _fn(x, output_units):
   """Fully connected MLP parameterized via `real_nvp_template`."""
   for units in hidden_layers:
     x = layers.dense(
         inputs=x, units=units, activation=activation, *args, **kwargs)
   x = layers.dense(
       inputs=x,
       units=(1 if shift_only else 2) * output_units,
       activation=None,
       *args,
       **kwargs)
   if shift_only:
     return x, None
   shift, log_scale = array_ops.split(x, 2, axis=-1)
   return shift, log_scale
开发者ID:ahmedsaiduk,项目名称:tensorflow,代码行数:15,代码来源:real_nvp.py

示例13: fn

 def fn(a, b, c):
   return core_layers.dense(
       a,
       10,
       use_bias=False,
       kernel_initializer=lambda shape, dtype, partition_info: w
   ) + math_ops.matmul(b, c)
开发者ID:bikong2,项目名称:tensorflow,代码行数:7,代码来源:rev_block_lib_test.py

示例14: testEagerExecution

 def testEagerExecution(self):
   with context.eager_mode():
     container = variable_scope.EagerVariableStore()
     x = constant_op.constant([[2.0]])
     with container.as_default():
       y = core_layers.dense(
           x, 1, name='my_dense',
           kernel_initializer=init_ops.ones_initializer())
     self.assertAllEqual(y, [[2.0]])
     self.assertEqual(len(container.variables()), 2)
     # Recreate the layer to test reuse.
     with container.as_default():
       core_layers.dense(
           x, 1, name='my_dense',
           kernel_initializer=init_ops.ones_initializer())
     self.assertEqual(len(container.variables()), 2)
开发者ID:AndrewTwinz,项目名称:tensorflow,代码行数:16,代码来源:core_test.py

示例15: testFunctionalDense

 def testFunctionalDense(self):
   with self.test_session():
     inputs = random_ops.random_uniform((5, 3), seed=1)
     outputs = core_layers.dense(
         inputs, 2, activation=nn_ops.relu, name='my_dense')
     self.assertEqual(
         len(ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)), 2)
     self.assertEqual(outputs.op.name, 'my_dense/Relu')
开发者ID:AndrewTwinz,项目名称:tensorflow,代码行数:8,代码来源:core_test.py


注:本文中的tensorflow.python.layers.core.dense函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。