当前位置: 首页>>代码示例>>Python>>正文


Python tensorflow.variables_initializer方法代码示例

本文整理汇总了Python中tensorflow.variables_initializer方法的典型用法代码示例。如果您正苦于以下问题:Python tensorflow.variables_initializer方法的具体用法?Python tensorflow.variables_initializer怎么用?Python tensorflow.variables_initializer使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在tensorflow的用法示例。


在下文中一共展示了tensorflow.variables_initializer方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: initialize_interdependent_variables

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import variables_initializer [as 别名]
def initialize_interdependent_variables(session, vars_list, feed_dict):
    """Initialize a list of variables one at a time, which is useful if
    initialization of some variables depends on initialization of the others.
    """
    vars_left = vars_list
    while len(vars_left) > 0:
        new_vars_left = []
        for v in vars_left:
            try:
                # If using an older version of TensorFlow, uncomment the line
                # below and comment out the line after it.
		#session.run(tf.initialize_variables([v]), feed_dict)
                session.run(tf.variables_initializer([v]), feed_dict)
            except tf.errors.FailedPreconditionError:
                new_vars_left.append(v)
        if len(new_vars_left) >= len(vars_left):
            # This can happend if the variables all depend on each other, or more likely if there's
            # another variable outside of the list, that still needs to be initialized. This could be
            # detected here, but life's finite.
            raise Exception("Cycle in variable dependencies, or extenrnal precondition unsatisfied.")
        else:
            vars_left = new_vars_left 
开发者ID:xuwd11,项目名称:cs294-112_hws,代码行数:24,代码来源:dqn_utils.py

示例2: create_session

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import variables_initializer [as 别名]
def create_session(config_dict=dict(), force_as_default=False):
    config = tf.ConfigProto()
    for key, value in config_dict.items():
        fields = key.split('.')
        obj = config
        for field in fields[:-1]:
            obj = getattr(obj, field)
        setattr(obj, fields[-1], value)
    session = tf.Session(config=config)
    if force_as_default:
        session._default_session = session.as_default()
        session._default_session.enforce_nesting = False
        session._default_session.__enter__()
    return session

#----------------------------------------------------------------------------
# Initialize all tf.Variables that have not already been initialized.
# Equivalent to the following, but more efficient and does not bloat the tf graph:
#   tf.variables_initializer(tf.report_unitialized_variables()).run() 
开发者ID:zalandoresearch,项目名称:disentangling_conditional_gans,代码行数:21,代码来源:tfutil.py

示例3: initialize_uninitialized_global_variables

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import variables_initializer [as 别名]
def initialize_uninitialized_global_variables(sess):
    """
    Only initializes the variables of a TensorFlow session that were not
    already initialized.
    :param sess: the TensorFlow session
    :return:
    """
    # List all global variables
    global_vars = tf.global_variables()

    # Find initialized status for all variables
    is_var_init = [tf.is_variable_initialized(var) for var in global_vars]
    is_initialized = sess.run(is_var_init)

    # List all variables that were not initialized previously
    not_initialized_vars = [var for (var, init) in
                            zip(global_vars, is_initialized) if not init]

    # Initialize all uninitialized variables found, if any
    if len(not_initialized_vars):
        sess.run(tf.variables_initializer(not_initialized_vars)) 
开发者ID:StephanZheng,项目名称:neural-fingerprinting,代码行数:23,代码来源:utils_tf.py

示例4: __init__

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import variables_initializer [as 别名]
def __init__(self, epsilon=1e-4, shape=(), scope=''):
        sess = get_session()

        self._new_mean = tf.placeholder(shape=shape, dtype=tf.float64)
        self._new_var = tf.placeholder(shape=shape, dtype=tf.float64)
        self._new_count = tf.placeholder(shape=(), dtype=tf.float64)

        
        with tf.variable_scope(scope, reuse=tf.AUTO_REUSE):
            self._mean  = tf.get_variable('mean',  initializer=np.zeros(shape, 'float64'),      dtype=tf.float64)
            self._var   = tf.get_variable('std',   initializer=np.ones(shape, 'float64'),       dtype=tf.float64)    
            self._count = tf.get_variable('count', initializer=np.full((), epsilon, 'float64'), dtype=tf.float64)

        self.update_ops = tf.group([
            self._var.assign(self._new_var),
            self._mean.assign(self._new_mean),
            self._count.assign(self._new_count)
        ])

        sess.run(tf.variables_initializer([self._mean, self._var, self._count]))
        self.sess = sess
        self._set_mean_var_count() 
开发者ID:MaxSobolMark,项目名称:HardRLWithYoutube,代码行数:24,代码来源:running_mean_std.py

示例5: __init__

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import variables_initializer [as 别名]
def __init__(self, ckpt_dir, **kwargs_saver):
        """
        :param ckpt_dir: where to save data
        :param kwargs_saver: Passed on to the tf.train.Saver that will be created
        """
        os.makedirs(ckpt_dir, exist_ok=True)
        self.ckpt_dir = ckpt_dir
        self.ckpt_base_file_path = path.join(ckpt_dir, _CKPT_FN)

        all_saveable_vars = tf_helpers.all_saveable_objects()
        var_list = kwargs_saver.get('var_list', all_saveable_vars)
        var_names = VarNames(ckpt_dir)
        if not var_names.exists():
            print('Saver for {} saves {} variables...'.format(self.ckpt_dir, len(var_list)))
            var_names.write([v.name for v in var_list])

        unrestored_vars = [v for v in all_saveable_vars if v not in var_list]
        if unrestored_vars:
            print('Found {} unrestored variables'.format(len(unrestored_vars)))

        self.init_unrestored_op = (tf.variables_initializer(unrestored_vars)
                                   if unrestored_vars else tf.no_op())

        self.saver = tf.train.Saver(**kwargs_saver) 
开发者ID:fab-jul,项目名称:imgcomp-cvpr,代码行数:26,代码来源:saver.py

示例6: __init__

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import variables_initializer [as 别名]
def __init__(self, epsilon=1e-4, shape=(), scope=''):
        sess = get_session()

        self._new_mean = tf.placeholder(shape=shape, dtype=tf.float64)
        self._new_var = tf.placeholder(shape=shape, dtype=tf.float64)
        self._new_count = tf.placeholder(shape=(), dtype=tf.float64)


        with tf.variable_scope(scope, reuse=tf.AUTO_REUSE):
            self._mean  = tf.get_variable('mean',  initializer=np.zeros(shape, 'float64'),      dtype=tf.float64)
            self._var   = tf.get_variable('std',   initializer=np.ones(shape, 'float64'),       dtype=tf.float64)
            self._count = tf.get_variable('count', initializer=np.full((), epsilon, 'float64'), dtype=tf.float64)

        self.update_ops = tf.group([
            self._var.assign(self._new_var),
            self._mean.assign(self._new_mean),
            self._count.assign(self._new_count)
        ])

        sess.run(tf.variables_initializer([self._mean, self._var, self._count]))
        self.sess = sess
        self._set_mean_var_count() 
开发者ID:quantumiracle,项目名称:Reinforcement_Learning_for_Traffic_Light_Control,代码行数:24,代码来源:running_mean_std.py

示例7: test_sample_buffer

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import variables_initializer [as 别名]
def test_sample_buffer():
    capacity = 100
    batch = 17
    lots = 100
    with tf.Graph().as_default(), tf.Session() as sess:
        buffer = utils.SampleBuffer(capacity=capacity, schemas=dict(x=utils.Schema(tf.int32, ())))
        tf.variables_initializer(tf.global_variables() + tf.local_variables()).run()
        i_p = tf.placeholder(dtype=tf.int32, shape=())
        add = buffer.add(x=batch * i_p + tf.range(batch))
        sample = buffer.sample(lots, seed=7)['x']
        all_data_1 = buffer.data()
        all_data_2 = buffer.read(tf.range(buffer.size()))
        for i in range(20):
            add.run(feed_dict={i_p: i})
            samples = sample.eval()
            hi = batch * (i + 1)
            lo = max(0, hi - capacity)
            assert lo <= samples.min() <= lo + 3
            assert hi - 5 <= samples.max() < hi
            np.testing.assert_equal(sess.run(all_data_1), sess.run(all_data_2)) 
开发者ID:openai,项目名称:lm-human-preferences,代码行数:22,代码来源:test_core_utils.py

示例8: variables_initializer

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import variables_initializer [as 别名]
def variables_initializer(var_list, name="init"):
  """Returns an Op that initializes a list of variables.

  After you launch the graph in a session, you can run the returned Op to
  initialize all the variables in `var_list`. This Op runs all the
  initializers of the variables in `var_list` in parallel.

  Calling `initialize_variables()` is equivalent to passing the list of
  initializers to `Group()`.

  If `var_list` is empty, however, the function still returns an Op that can
  be run. That Op just has no effect.

  Args:
    var_list: List of `Variable` objects to initialize.
    name: Optional name for the returned operation.

  Returns:
    An Op that run the initializers of all the specified variables.
  """
  if var_list:
    return control_flow_ops.group(*[v.initializer for v in var_list], name=name)
  return control_flow_ops.no_op(name=name) 
开发者ID:ryfeus,项目名称:lambda-packs,代码行数:25,代码来源:variables.py

示例9: initialize

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import variables_initializer [as 别名]
def initialize(sess=None):
    """
    Initialize all the uninitialized variables in the global scope.

    :param sess: (TensorFlow Session)
    """
    if sess is None:
        sess = tf.get_default_session()
    new_variables = set(tf.global_variables()) - ALREADY_INITIALIZED
    sess.run(tf.variables_initializer(new_variables))
    ALREADY_INITIALIZED.update(new_variables)


# ================================================================
# Theano-like Function
# ================================================================ 
开发者ID:Stable-Baselines-Team,项目名称:stable-baselines,代码行数:18,代码来源:tf_util.py

示例10: build_optimization

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import variables_initializer [as 别名]
def build_optimization(self, regularization_losses, *args, **kwargs):
        #kwargs to allow overriding beta1, beta2, and epsilon of adam,
        # kwargs will be passed as is to Adam's initialization 
        assert self.adv_pred is not None, "build_model must be called before build_optimization"
        assert self.output_shape is not None

        self.attack_target =  tf.placeholder(tf.float32, shape=self.output_shape, name="attack_target")
        self.learning_rate = tf.placeholder(tf.float32, shape=(), name="learning_rate")
        self.reg_names = regularization_losses
        self.total_loss = self._build_loss(regularization_losses, self.adv_pred, self.attack_target, self.noise)
        
        with tf.name_scope("adamoptimizer"):
            self.optimization_op = tf.train.AdamOptimizer(learning_rate=self.learning_rate, **kwargs) \
                                                .minimize(self.total_loss, \
                                                var_list=[self.noise])

        self.init_adam = tf.variables_initializer(filter(lambda x: "adam" in x.name.lower(), tf.global_variables()))
        self.init_noise = tf.variables_initializer(set(tf.global_variables()) - set(self.model_vars))
        return self.optimization_op 
开发者ID:evtimovi,项目名称:robust_physical_perturbations,代码行数:21,代码来源:attack_util.py

示例11: fit

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import variables_initializer [as 别名]
def fit(self, X):
        """
        Fit a model given data.
        :param X: array-like, shape = (n_samples, n_features)
        :return:
        """
        self.n_visible_units = X.shape[1]

        # Initialize RBM parameters
        self._build_model()

        sess.run(tf.variables_initializer([self.W, self.c, self.b]))

        if self.optimization_algorithm == 'sgd':
            self._stochastic_gradient_descent(X)
        else:
            raise ValueError("Invalid optimization algorithm.")
        return 
开发者ID:PacktPublishing,项目名称:Deep-Learning-with-TensorFlow-Second-Edition,代码行数:20,代码来源:tf_models.py

示例12: _stochastic_gradient_descent

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import variables_initializer [as 别名]
def _stochastic_gradient_descent(self, _data):
        """
        Performs stochastic gradient descend optimization algorithm.
        :param _data: array-like, shape = (n_samples, n_features)
        :return:
        """
        for iteration in range(1, self.n_epochs + 1):
            idx = np.random.permutation(len(_data))
            data = _data[idx]
            for batch in batch_generator(self.batch_size, data):
                if len(batch) < self.batch_size:
                    # Pad with zeros
                    pad = np.zeros((self.batch_size - batch.shape[0], batch.shape[1]), dtype=batch.dtype)
                    batch = np.vstack((batch, pad))
                sess.run(tf.variables_initializer(self.random_variables))  # Need to re-sample from uniform distribution
                sess.run([self.update_W, self.update_b, self.update_c],
                         feed_dict={self.visible_units_placeholder: batch})
            if self.verbose:
                error = self._compute_reconstruction_error(data)
                print(">> Epoch %d finished \tRBM Reconstruction error %f" % (iteration, error)) 
开发者ID:PacktPublishing,项目名称:Deep-Learning-with-TensorFlow-Second-Edition,代码行数:22,代码来源:tf_models.py

示例13: test_normalization

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import variables_initializer [as 别名]
def test_normalization():
    embedding_initializer = tf.contrib.layers.xavier_initializer()

    embedding_layer = tf.get_variable('embeddings', shape=[1024, 100], initializer=embedding_initializer)
    unit_sphere_embeddings = constraints.unit_sphere(embedding_layer, norm=1.0)

    init_op = tf.variables_initializer([embedding_layer])

    with tf.Session() as session:
        for _ in range(256):
            session.run(init_op)

            embeddings = session.run(embedding_layer)

            # Use TensorFlow for normalizing the embeddings
            session.run(unit_sphere_embeddings)
            normalized_v1 = session.run(embedding_layer)

            # Use NumPy for normalizing the embeddings
            normalized_v2 = embeddings / np.linalg.norm(embeddings, axis=1).reshape((-1, 1))

            np.testing.assert_allclose(normalized_v1, normalized_v2, rtol=1e-6) 
开发者ID:uclnlp,项目名称:inferbeddings,代码行数:24,代码来源:test_normalization.py

示例14: _initialize_metrics

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import variables_initializer [as 别名]
def _initialize_metrics(self):
        """ Initialize the model metrics """
        self.metrics = {}
        self.metric_values = {}
        self.update_metrics = {}
        self.reset_metrics = {}
        for data_scope in (Data.TRAIN, Data.VALIDATE, Data.TEST):
            metrics = self.collect_metrics(data_scope)
            self.metrics[data_scope] = metrics

            self.metric_values[data_scope] = {
                name: metric['scalar']
                for name, metric in iteritems(metrics)}

            self.update_metrics[data_scope] = [
                metric['update_op']
                for metric in itervalues(metrics)]

            metric_variables = []
            with stats_utils.metric_scope(data_scope, graph=self.graph) as scope:
                for local in tf.get_collection(tf.GraphKeys.LOCAL_VARIABLES, scope):
                    metric_variables.append(local)
            self.reset_metrics[data_scope] = tf.variables_initializer(metric_variables) 
开发者ID:dojoteef,项目名称:dvae,代码行数:25,代码来源:trainer.py

示例15: _init_variables

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import variables_initializer [as 别名]
def _init_variables(self):
        """ Create the initialization operation for the variables """
        # Adam optimizer uses two variables that can only be accessed through the use of a protected
        # function since the variables aren't scoped in anyway. Trying to add a tf.variable_scope
        # around apply_gradients where the variables are created did not help.
        var_list = set(self.optimizer._get_beta_accumulators()) # pylint: disable=protected-access
        slot_names = self.optimizer.get_slot_names()
        for tower in self.towers:
            variables = tower.global_variables
            var_list.update(variables)

            for slot_name in slot_names:
                for variable in variables:
                    slot = self.optimizer.get_slot(variable, slot_name)
                    if slot is not None:
                        var_list.add(slot)

        # Initialize all the variables
        self.initialization_operation = tf.group(
            tf.variables_initializer(var_list),

            # Apparently local variables are not part of 'all' variables... go figure
            # This is needed for metrics for example
            tf.local_variables_initializer()) 
开发者ID:dojoteef,项目名称:dvae,代码行数:26,代码来源:trainer.py


注:本文中的tensorflow.variables_initializer方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。