本文整理汇总了Python中tensorflow.python.ops.variables.global_variables方法的典型用法代码示例。如果您正苦于以下问题:Python variables.global_variables方法的具体用法?Python variables.global_variables怎么用?Python variables.global_variables使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tensorflow.python.ops.variables
的用法示例。
在下文中一共展示了variables.global_variables方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _initialize_variables
# 需要导入模块: from tensorflow.python.ops import variables [as 别名]
# 或者: from tensorflow.python.ops.variables import global_variables [as 别名]
def _initialize_variables(session):
"""Utility to initialize uninitialized variables on the fly."""
variables = variables_module.global_variables()
candidate_vars = []
for v in variables:
if not getattr(v, '_keras_initialized', False):
candidate_vars.append(v)
# This step is expensive, so we only run it on variables not already
# marked as initialized.
is_initialized = session.run(
[variables_module.is_variable_initialized(v) for v in candidate_vars])
uninitialized_vars = []
for flag, v in zip(is_initialized, candidate_vars):
if not flag:
uninitialized_vars.append(v)
v._keras_initialized = True
if uninitialized_vars:
session.run(variables_module.variables_initializer(uninitialized_vars))
开发者ID:PacktPublishing,项目名称:Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda,代码行数:20,代码来源:backend.py
示例2: _init_saver
# 需要导入模块: from tensorflow.python.ops import variables [as 别名]
# 或者: from tensorflow.python.ops.variables import global_variables [as 别名]
def _init_saver(self, saver=USE_DEFAULT):
"""Initializes saver.
Args:
saver: A `Saver` object. If set to USE_DEFAULT, create one that
saves all the variables.
"""
if saver is Supervisor.USE_DEFAULT:
saver = self._get_first_op_from_collection(ops.GraphKeys.SAVERS)
if saver is None and variables.global_variables():
saver = saver_mod.Saver()
ops.add_to_collection(ops.GraphKeys.SAVERS, saver)
self._saver = saver
示例3: _initialize_variables
# 需要导入模块: from tensorflow.python.ops import variables [as 别名]
# 或者: from tensorflow.python.ops.variables import global_variables [as 别名]
def _initialize_variables():
"""Utility to initialize uninitialized variables on the fly.
"""
variables = variables_module.global_variables()
uninitialized_variables = []
for v in variables:
if not hasattr(v, '_keras_initialized') or not v._keras_initialized:
uninitialized_variables.append(v)
v._keras_initialized = True
if uninitialized_variables:
sess = get_session()
sess.run(variables_module.variables_initializer(uninitialized_variables))
示例4: _get_saver
# 需要导入模块: from tensorflow.python.ops import variables [as 别名]
# 或者: from tensorflow.python.ops.variables import global_variables [as 别名]
def _get_saver():
"""Lazy init and return saver."""
saver = _get_first_op_from_collection(ops.GraphKeys.SAVERS)
if saver is not None:
if saver:
saver = saver[0]
else:
saver = None
if saver is None and variables.global_variables():
saver = tf_saver.Saver()
ops.add_to_collection(ops.GraphKeys.SAVERS, saver)
return saver
示例5: _get_saver
# 需要导入模块: from tensorflow.python.ops import variables [as 别名]
# 或者: from tensorflow.python.ops.variables import global_variables [as 别名]
def _get_saver():
"""Lazy init and return saver."""
saver = _get_first_op_from_collection(ops.GraphKeys.SAVERS)
if saver is None and variables.global_variables():
saver = tf_saver.Saver()
ops.add_to_collection(ops.GraphKeys.SAVERS, saver)
return saver
示例6: testTrainWithInitFromFn
# 需要导入模块: from tensorflow.python.ops import variables [as 别名]
# 或者: from tensorflow.python.ops.variables import global_variables [as 别名]
def testTrainWithInitFromFn(self):
logdir1 = os.path.join(
tempfile.mkdtemp(prefix=self.get_temp_dir()), 'tmp_logs1')
logdir2 = os.path.join(
tempfile.mkdtemp(prefix=self.get_temp_dir()), 'tmp_logs2')
# First, train the model one step (make sure the error is high).
with ops.Graph().as_default():
random_seed.set_random_seed(0)
train_op = self.create_train_op()
loss = learning.train(train_op, logdir1, number_of_steps=1)
self.assertGreater(loss, .5)
# Next, train the model to convergence.
with ops.Graph().as_default():
random_seed.set_random_seed(1)
train_op = self.create_train_op()
loss = learning.train(
train_op, logdir1, number_of_steps=300, log_every_n_steps=10)
self.assertIsNotNone(loss)
self.assertLess(loss, .015)
# Finally, advance the model a single step and validate that the loss is
# still low.
with ops.Graph().as_default():
random_seed.set_random_seed(2)
train_op = self.create_train_op()
model_variables = variables_lib.global_variables()
model_path = os.path.join(logdir1, 'model.ckpt-300')
saver = saver_lib.Saver(model_variables)
def RestoreFn(sess):
saver.restore(sess, model_path)
loss = learning.train(
train_op, logdir2, number_of_steps=1, init_fn=RestoreFn)
self.assertIsNotNone(loss)
self.assertLess(loss, .015)
示例7: DISABLED_testShared
# 需要导入模块: from tensorflow.python.ops import variables [as 别名]
# 或者: from tensorflow.python.ops.variables import global_variables [as 别名]
def DISABLED_testShared(self):
with self.test_session():
with specs.ops:
# pylint: disable=undefined-variable
f = Shared(Fr(100))
g = f | f | f | f
inputs = constant_op.constant(_rand(10, 100))
_ = g.funcall(inputs)
self.assertEqual(len(variables.global_variables()), 2)
示例8: testLocalVariableNotInAllVariables
# 需要导入模块: from tensorflow.python.ops import variables [as 别名]
# 或者: from tensorflow.python.ops.variables import global_variables [as 别名]
def testLocalVariableNotInAllVariables(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.local_variable(0)
self.assertNotIn(a, variables_lib.global_variables())
self.assertIn(a, variables_lib.local_variables())
示例9: test_global_variable
# 需要导入模块: from tensorflow.python.ops import variables [as 别名]
# 或者: from tensorflow.python.ops.variables import global_variables [as 别名]
def test_global_variable(self):
with self.cached_session() as sess:
self.assertEqual([], variables_lib.global_variables())
value0 = 42
variables_lib2.global_variable(value0)
value1 = 43
variables_lib2.global_variable(value1)
variables = variables_lib.global_variables()
self.assertEqual(2, len(variables))
with self.assertRaises(errors_impl.FailedPreconditionError):
sess.run(variables)
variables_lib.variables_initializer(variables).run()
self.assertAllEqual(set([value0, value1]), set(sess.run(variables)))
示例10: testVariableNameAndShape
# 需要导入模块: from tensorflow.python.ops import variables [as 别名]
# 或者: from tensorflow.python.ops.variables import global_variables [as 别名]
def testVariableNameAndShape(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.global_variable([1, 1, 1, 1, 1], name='a')
self.assertEqual(a.op.name, 'A/a')
self.assertListEqual(a.get_shape().as_list(), [5])
self.assertListEqual([a], variables_lib.global_variables())
示例11: testGlobalVariableNotInLocalVariables
# 需要导入模块: from tensorflow.python.ops import variables [as 别名]
# 或者: from tensorflow.python.ops.variables import global_variables [as 别名]
def testGlobalVariableNotInLocalVariables(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.global_variable(0)
self.assertNotIn(a, variables_lib.local_variables())
self.assertIn(a, variables_lib.global_variables())
示例12: testNotInLocalVariables
# 需要导入模块: from tensorflow.python.ops import variables [as 别名]
# 或者: from tensorflow.python.ops.variables import global_variables [as 别名]
def testNotInLocalVariables(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.model_variable('a', [5])
self.assertIn(a, variables_lib.global_variables())
self.assertIn(a, ops.get_collection(ops.GraphKeys.MODEL_VARIABLES))
self.assertNotIn(a, variables_lib.local_variables())
示例13: testAverages
# 需要导入模块: from tensorflow.python.ops import variables [as 别名]
# 或者: from tensorflow.python.ops.variables import global_variables [as 别名]
def testAverages(self):
with self.cached_session() as session:
scale = 2.
grad = array_ops.ones([3, 4]) * scale
log_norm = np.log(np.sqrt(scale**2 * grad.get_shape().num_elements()))
grads_and_vars = [(grad, grad)]
grads_and_vars = optimizers_lib.adaptive_clipping_fn(
decay=0.5)(grads_and_vars)
var_dict = {}
for var in variables.global_variables():
if var.name.startswith("AdaptiveMaxNorm"):
var_dict[var.name.split(":")[0]] = var
self.assertEqual(2, len(var_dict))
moving_mean = var_dict["AdaptiveMaxNorm/mean"]
moving_sq_mean = var_dict["AdaptiveMaxNorm/sq_mean"]
variables.global_variables_initializer().run()
mean, sq_mean = session.run([moving_mean, moving_sq_mean])
self.assertEqual([0], mean)
self.assertEqual([0], sq_mean)
for i in range(20):
mean, sq_mean, _ = session.run(
[moving_mean, moving_sq_mean, grads_and_vars[0][0]])
if i == 0:
self.assertLess(mean, 0.9 * log_norm)
self.assertLess(sq_mean, 0.9 * log_norm**2)
self.assertAlmostEqual(float(mean), log_norm, places=4)
self.assertAlmostEqual(float(sq_mean), log_norm**2, places=4)
示例14: testReuse
# 需要导入模块: from tensorflow.python.ops import variables [as 别名]
# 或者: from tensorflow.python.ops.variables import global_variables [as 别名]
def testReuse(self):
def f(x):
return core_layers.dense(x, self.CHANNELS // 2)
def g(x):
return core_layers.dense(x, self.CHANNELS // 2)
x = random_ops.random_uniform(
[self.BATCH_SIZE, self.CHANNELS], dtype=dtypes.float32)
x1, x2 = array_ops.split(x, 2, axis=-1)
with variable_scope.variable_scope("test"):
y1, y2 = rev_block_lib.rev_block(x1, x2, f, g, num_layers=self.NUM_LAYERS)
num_vars_before = len(variables.global_variables())
with variable_scope.variable_scope("test", reuse=True):
y1, y2 = rev_block_lib.rev_block(x1, x2, f, g, num_layers=self.NUM_LAYERS)
num_vars_after = len(variables.global_variables())
self.assertEqual(num_vars_before, num_vars_after)
loss = math_ops.reduce_mean(y1 + y2)
_ = gradients_impl.gradients(loss,
[x] + variables.trainable_variables())
with variable_scope.variable_scope("test", reuse=True):
y1, y2 = rev_block_lib.rev_block(x1, x2, f, g, num_layers=self.NUM_LAYERS)
num_vars_after = len(variables.global_variables())
self.assertEqual(num_vars_before, num_vars_after)
示例15: testTrainWithInitFromFn
# 需要导入模块: from tensorflow.python.ops import variables [as 别名]
# 或者: from tensorflow.python.ops.variables import global_variables [as 别名]
def testTrainWithInitFromFn(self):
logdir1 = tempfile.mkdtemp('tmp_logs1')
logdir2 = tempfile.mkdtemp('tmp_logs2')
# First, train the model one step (make sure the error is high).
with ops.Graph().as_default():
random_seed.set_random_seed(0)
train_op = self.create_train_op()
loss = learning.train(train_op, logdir1, number_of_steps=1)
self.assertGreater(loss, .5)
# Next, train the model to convergence.
with ops.Graph().as_default():
random_seed.set_random_seed(1)
train_op = self.create_train_op()
loss = learning.train(
train_op, logdir1, number_of_steps=300, log_every_n_steps=10)
self.assertIsNotNone(loss)
self.assertLess(loss, .015)
# Finally, advance the model a single step and validate that the loss is
# still low.
with ops.Graph().as_default():
random_seed.set_random_seed(2)
train_op = self.create_train_op()
model_variables = variables_lib.global_variables()
model_path = os.path.join(logdir1, 'model.ckpt-300')
saver = saver_lib.Saver(model_variables)
def RestoreFn(sess):
saver.restore(sess, model_path)
loss = learning.train(
train_op, logdir2, number_of_steps=1, init_fn=RestoreFn)
self.assertIsNotNone(loss)
self.assertLess(loss, .015)