当前位置: 首页>>代码示例>>Python>>正文


Python tensorflow.initialize_variables函数代码示例

本文整理汇总了Python中tensorflow.initialize_variables函数的典型用法代码示例。如果您正苦于以下问题:Python initialize_variables函数的具体用法?Python initialize_variables怎么用?Python initialize_variables使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了initialize_variables函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: __call__

    def __call__(self, flow=None):
        """Constructs the layer in `Tensorflow` graph.

        Args:
            flow: This argument is ignored. (Default value = None)

        Returns:
            Output of this layer.

        """

        with tf.variable_op_scope([flow], self.name, 'Embedding', reuse=self.reuse):
            if not self.reuse:
                self._table_loader = tf.placeholder(tf.float32, shape=self._init_values.shape, name='loader')
                self._lookup_table = tf.get_variable(
                    'lookup_table',
                    initializer=self._table_loader,
                    trainable=self.trainable)
                self.params.append(self._lookup_table)
                tf.initialize_variables(self.params).run(feed_dict={self._table_loader: self._init_values})
                self.reuse = True

            flow = tf.placeholder(tf.int64, [None] + self._input_shape, 'input')
            tf.add_to_collection(GraphKeys.MODEL_INPUTS, flow)
            flow = tf.nn.embedding_lookup(self._lookup_table, flow)

        tf.add_to_collection(tf.GraphKeys.ACTIVATIONS, flow)
        return flow
开发者ID:Anna-Jiang,项目名称:first-test,代码行数:28,代码来源:layers.py

示例2: testInitializeFromValue

  def testInitializeFromValue(self):
    with self.test_session() as sess:
      init = tf.constant(0.1)
      w = tf.get_variable("v", initializer=init)
      sess.run(tf.initialize_variables([w]))
      self.assertAllClose(w.eval(), 0.1)

      with self.assertRaisesRegexp(ValueError, "shape"):
        # We disallow explicit shape specification when initializer is constant.
        tf.get_variable("u", [1], initializer=init)

      with tf.variable_scope("foo", initializer=init):
        # Constant initializer can be passed through scopes if needed.
        v = tf.get_variable("v")
        sess.run(tf.initialize_variables([v]))
        self.assertAllClose(v.eval(), 0.1)

      # Check that non-float32 initializer creates a non-float32 variable.
      init = tf.constant(1, dtype=tf.int32)
      t = tf.get_variable("t", initializer=init)
      self.assertEqual(t.dtype.base_dtype, tf.int32)

      # Raise error if `initializer` dtype and `dtype` are not identical.
      with self.assertRaisesRegexp(ValueError, "don't match"):
        tf.get_variable("s", initializer=init, dtype=tf.float64)
开发者ID:BloodD,项目名称:tensorflow,代码行数:25,代码来源:variable_scope_test.py

示例3: evaluate_model

  def evaluate_model(self, accuracy, num_steps, feed_vars=(), feed_data=None,
                     summary_tag=None, print_every=0):
    """Evaluates the given model.

    Args:
      accuracy: The metric that is being evaluated.
      num_steps: The number of steps to run in the evaluator.
      feed_vars: A list or tuple of the variables that will be fed.
      feed_data: A generator that produces tuples of the same length as
        feed_vars.
      summary_tag: If provided, the final result of running the model will be
        published to this tag.
      print_every: Print a summary every so many steps, use 0 to disable.
    Returns:
      The accuracy.
    """
    test_vars = tf.get_collection(bookkeeper.GraphKeys.TEST_VARIABLES)
    if test_vars:
      tf.initialize_variables(test_vars).run()
    result = self.run_model([accuracy],
                            num_steps,
                            feed_vars=feed_vars,
                            feed_data=feed_data,
                            print_every=print_every,
                            allow_initialize=False)
    if summary_tag and self._summary_writer:
      summary = tf.Summary(
          value=[tf.Summary.Value(tag=summary_tag,
                                  simple_value=float(result[1]))])
      event = tf.Event(wall_time=time.time(),
                       summary=summary,
                       step=int(result[0]))
      self._summary_writer.add_event(event)
    return result[1]
开发者ID:pombredanne,项目名称:prettytensor,代码行数:34,代码来源:local_trainer.py

示例4: testVarScopeRegularizer

 def testVarScopeRegularizer(self):
   with self.test_session() as sess:
     init = tf.constant_initializer(0.3)
     def regularizer1(v):
       return tf.reduce_mean(v) + 0.1
     def regularizer2(v):
       return tf.reduce_mean(v) + 0.2
     with tf.variable_scope("tower", regularizer=regularizer1) as tower:
       with tf.variable_scope("foo", initializer=init):
         v = tf.get_variable("v", [])
         sess.run(tf.initialize_variables([v]))
         losses = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)
         self.assertEqual(1, len(losses))
         self.assertAllClose(losses[0].eval(), 0.4)
       with tf.variable_scope(tower, initializer=init) as vs:
         u = tf.get_variable("u", [])
         vs.set_regularizer(regularizer2)
         w = tf.get_variable("w", [])
         # Next 3 variable not regularized to test disabling regularization.
         x = tf.get_variable("x", [], regularizer=tf.no_regularizer)
         with tf.variable_scope("baz", regularizer=tf.no_regularizer):
           y = tf.get_variable("y", [])
         vs.set_regularizer(tf.no_regularizer)
         z = tf.get_variable("z", [])
         # Check results.
         losses = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)
         self.assertEqual(3, len(losses))
         sess.run(tf.initialize_variables([u, w, x, y, z]))
         self.assertAllClose(losses[0].eval(), 0.4)
         self.assertAllClose(losses[1].eval(), 0.4)
         self.assertAllClose(losses[2].eval(), 0.5)
       with tf.variable_scope("foo", reuse=True):
         v = tf.get_variable("v", [])  # "v" is alredy there, reused
         losses = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)
         self.assertEqual(3, len(losses))  # No new loss added.
开发者ID:285219011,项目名称:hello-world,代码行数:35,代码来源:variable_scope_test.py

示例5: train

    def train(self, session, text, num_steps):
        """ Train embeddings on given text"""
        generator = bigram_batch.SkipgramGenerator(
            text, self._batch_size, self._num_skips)

        is_own = lambda x: x.name.startswith(self._scope_name)
        tf.initialize_variables(filter(is_own, tf.all_variables())).run()
        print('Initialized')
        average_loss = 0
        step = 0
        while step < num_steps:
            batches_labels = zip(*generator.next())
            for step, (batch, label) in enumerate(batches_labels, step):
                feed_dict = {self._train_dataset: batch,
                             self._train_labels: label.reshape(label.shape[0], 1)}

                _, l = session.run(
                    [self._optimizer, self._loss], feed_dict=feed_dict)
                average_loss += l
                if step % 2000 == 0:
                    if step > 0:
                        average_loss = average_loss / 2000
                    # The average loss is an estimate of the loss over the last
                    # 2000 batches.
                    print('Average loss at step %d: %f' % (step, average_loss))
                    average_loss = 0
开发者ID:frenzykryger,项目名称:lstm_dorway,代码行数:26,代码来源:skipgram_embedding.py

示例6: test_variable

 def test_variable(self):
   with self.test_session() as sess:
     x = tf.Variable(2.0, name="CustomName")
     y = tf.constant(3.0)
     z = x * y
     z_new = copy(z)
     tf.initialize_variables([x]).run()
     self.assertEqual(z_new.eval(), 6.0)
开发者ID:blei-lab,项目名称:edward,代码行数:8,代码来源:test_copy.py

示例7: test_tensor_variable

 def test_tensor_variable(self):
   with self.test_session() as sess:
     x = tf.constant(2.0)
     y = tf.constant(3.0)
     z = x * y
     qx = tf.Variable(4.0, name="CustomName")
     z_new = copy(z, {x: qx})
     tf.initialize_variables([qx]).run()
     self.assertEqual(z_new.eval(), 12.0)
开发者ID:blei-lab,项目名称:edward,代码行数:9,代码来源:test_copy.py

示例8: init_vars

 def init_vars(self, init_hp, session, reset_hp=False):
     print(init_hp)
     init_feed = dict()
     init_feed[self.ph_hypers] = init_hp
     if os.path.exists(self.save_path):
         # Restore variables from disk.
         self.saver.restore(session, self.save_path)
         if reset_hp:
             tf.initialize_variables(var_list=self.reset_vars).run(feed_dict=init_feed)
     else:
         tf.initialize_all_variables().run(feed_dict=init_feed)
开发者ID:ahangchen,项目名称:NN,代码行数:11,代码来源:hp2trend.py

示例9: testVarScopeIntializer

 def testVarScopeIntializer(self):
   with self.test_session() as sess:
     init = tf.constant_initializer(0.3)
     with variable_scope.variable_scope("tower") as tower:
       with variable_scope.variable_scope("foo", initializer=init):
         v = variable_scope.get_variable("v", [])
         sess.run(tf.initialize_variables([v]))
         self.assertAllClose(v.eval(), 0.3)
       with variable_scope.variable_scope(tower, initializer=init):
         w = variable_scope.get_variable("w", [])
         sess.run(tf.initialize_variables([w]))
         self.assertAllClose(w.eval(), 0.3)
开发者ID:peace195,项目名称:tensorflow,代码行数:12,代码来源:variable_scope_test.py

示例10: testInitFromNonTensorValue

    def testInitFromNonTensorValue(self):
        with self.test_session() as sess:
            v = tf.get_variable("v", initializer=4, dtype=tf.int32)
            sess.run(tf.initialize_variables([v]))
            self.assertAllClose(v.eval(), 4)

            w = tf.get_variable("w", initializer=numpy.array([1, 2, 3]), dtype=tf.int32)
            sess.run(tf.initialize_variables([w]))
            self.assertAllClose(w.eval(), [1, 2, 3])

            with self.assertRaises(TypeError):
                tf.get_variable("x", initializer={})
开发者ID:abhishekns,项目名称:tensorflow,代码行数:12,代码来源:variable_scope_test.py

示例11: var_collection_example

def var_collection_example():
    g1 = tf.Graph()
    with g1.as_default():
        with tf.name_scope('scope1') as scope1:
            a = tf.Variable(tf.constant(1.0, shape=[1]), name='a')
            b = tf.Variable(tf.constant(2.0, shape=[1]), name='b')
        with tf.name_scope('scope2') as scope2:
            c = tf.Variable(tf.constant(3.0, shape=[1]), name='c')

    g2 = tf.Graph()
    with g2.as_default():
        with tf.name_scope('scope1') as scope1:
            a = tf.Variable(tf.constant(4.0, shape=[1]), name='a')
            b = tf.Variable(tf.constant(5.0, shape=[1]), name='b')
        with tf.name_scope('scope2') as scope2:
            c = tf.Variable(tf.constant(6.0, shape=[1]), name='c')

    vars_g1 = var_collect.collect_all(graph=g1)
    vars_g1_scope1 = var_collect.collect_scope('scope1', graph=g1)
    var_g1_scope1_a = var_collect.collect_name('scope1/a', graph=g1)

    vars_g2 = var_collect.collect_all(graph=g2)
    vars_g2_dict = var_collect.collect_list(
        ['scope1/a', 'scope1/b', 'scope2/c'],
        graph=g2)

    sess = tf.Session(graph=g1)
    sess.run(tf.initialize_variables(vars_g1))
    y_hat = [var.eval(sess)[0] for var in vars_g1]
    y = [1.0, 2.0, 3.0]
    print 'Graph g1: '
    print 'y: [' + ', '.join([str(l) for l in y]) + ']'
    print 'y_hat: [' + ', '.join([str(l) for l in y_hat]) + ']'
    sess.close()

    sess = tf.Session(graph=g2)
    sess.run(tf.initialize_variables(vars_g2))
    y_hat = [var.eval(sess)[0] for var in vars_g2]
    y = [4.0, 5.0, 6.0]
    print 'Graph g2: '
    print 'y: [' + ', '.join([str(l) for l in y]) + ']'
    print 'y_hat: [' + ', '.join([str(l) for l in y_hat]) + ']'

    var_collect.print_var_list(vars_g1, name='vars_g1')
    var_collect.print_var_list(vars_g2, name='vars_g2')
    var_collect.print_var_list(vars_g1_scope1, name='vars_g1_scope1')
    var_collect.print_var_list([var_g1_scope1_a], name='vars_g1_scope1_a')

    print 'vars_g2_dict = {'
    for key, value in vars_g2_dict.items():
        print '    {}: {},'.format(key, value.eval(sess)[0])
    print '}'
    sess.close()
开发者ID:aiUIUC,项目名称:pyAIUtils,代码行数:53,代码来源:multi_graph_var_collection.py

示例12: test_local_variable

 def test_local_variable(self):
   with self.test_session() as sess:
     self.assertEquals([], tf.local_variables())
     value0 = 42
     tf.contrib.framework.local_variable(value0)
     value1 = 43
     tf.contrib.framework.local_variable(value1)
     variables = tf.local_variables()
     self.assertEquals(2, len(variables))
     self.assertRaises(tf.OpError, sess.run, variables)
     tf.initialize_variables(variables).run()
     self.assertAllEqual(set([value0, value1]), set(sess.run(variables)))
开发者ID:2er0,项目名称:tensorflow,代码行数:12,代码来源:variables_test.py

示例13: __init__

    def __init__(self, settings, session):
        self.s = session

        self.action_type = settings["action"]["type"]
        if self.action_type == "discrete":
            self.num_actions = settings["action"]["num_actions"]
        else:
            assert False, "Unknown action type:" % (self.action_type,)

        self.create_variables(settings)
        self.s.run(tf.initialize_variables(self.variables()))
        self.s.run(tf.initialize_variables(self.gradients()))
开发者ID:amoliu,项目名称:deeprl,代码行数:12,代码来源:enc_dec.py

示例14: _create_state

    def _create_state(self):
        """Prepare stateful variables modified during the recurrence."""

        # Both the queue and the stack are flattened stack_size * batch_size
        # tensors. `stack_size` many blocks of `batch_size` values
        stack_shape = (self.stack_size * self.batch_size, self.model_dim)
        self.stack = tf.Variable(tf.zeros(stack_shape, dtype=tf.float32),
                                 trainable=False, name="stack")
        self.queue = tf.Variable(tf.zeros((self.stack_size * self.batch_size,), dtype=tf.float32),
                                 trainable=False, name="queue")

        self.buff_cursors = tf.Variable(tf.zeros((self.batch_size,), dtype=tf.float32),
                                          trainable=False, name="buff_cursors")
        self.cursors = tf.Variable(tf.ones((self.batch_size,), dtype=tf.float32) * - 1,
                                   trainable=False, name="cursors")

        # TODO make parameterizable
        self.tracking_value = tf.Variable(tf.zeros((self.batch_size, self.tracking_dim), dtype=tf.float32),
                                          trainable=False, name="tracking_value")

        # Create an Op which will (re-)initialize the auxiliary variables
        # declared above.
        self._aux_vars = [self.stack, self.queue, self.buff_cursors, self.cursors,
                          self.tracking_value]
        self.variable_initializer = tf.initialize_variables(self._aux_vars)
开发者ID:hans,项目名称:thinstack-rl,代码行数:25,代码来源:thin_stack.py

示例15: __init__

    def __init__(self, session, optimizer_critic, optimizer_actor, critic_network, actor_network, gamma_lmbda,
                 state_dim, num_actions, summary_writer=None, summary_every=5):

        self.session = session
        self.summary_writer = summary_writer
        self.optimizer_critic = optimizer_critic
        self.optimizer_actor = optimizer_actor

        self.actor_network = actor_network
        self.critic_network = critic_network

        self.state_dim = state_dim
        self.num_actions = num_actions
        self.gamma_lmbda = tf.constant(gamma_lmbda)

        # initialize the graph on tensorflow
        self.create_variables()
        var_lists = tf.get_collection(tf.GraphKeys.VARIABLES)
        self.session.run(tf.initialize_variables(var_lists))

        # make sure the variables in graph are initialized
        self.session.run(tf.assert_variables_initialized())

        if self.summary_writer is not None:
            self.summary_writer.add_graph(self.session.graph)
            self.summary_every = summary_every
开发者ID:gauthamvasan,项目名称:OpenAI-Gym,代码行数:26,代码来源:actor_critic_nn.py


注:本文中的tensorflow.initialize_variables函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。