本文整理汇总了Python中tensorflow.python.summary.summary.merge_all方法的典型用法代码示例。如果您正苦于以下问题:Python summary.merge_all方法的具体用法?Python summary.merge_all怎么用?Python summary.merge_all使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tensorflow.python.summary.summary
的用法示例。
在下文中一共展示了summary.merge_all方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: testTrainWithNoneAsLogdirWhenUsingSummariesRaisesError
# 需要导入模块: from tensorflow.python.summary import summary [as 别名]
# 或者: from tensorflow.python.summary.summary import merge_all [as 别名]
def testTrainWithNoneAsLogdirWhenUsingSummariesRaisesError(self):
with ops.Graph().as_default():
random_seed.set_random_seed(0)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_predictions = LogisticClassifier(tf_inputs)
loss_ops.log_loss(tf_predictions, tf_labels)
total_loss = loss_ops.get_total_loss()
summary.scalar('total_loss', total_loss)
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = learning.create_train_op(total_loss, optimizer)
summary_op = summary.merge_all()
with self.assertRaises(ValueError):
learning.train(
train_op, None, number_of_steps=300, summary_op=summary_op)
示例2: testTrainWithNoneAsLogdirWhenUsingSummariesRaisesError
# 需要导入模块: from tensorflow.python.summary import summary [as 别名]
# 或者: from tensorflow.python.summary.summary import merge_all [as 别名]
def testTrainWithNoneAsLogdirWhenUsingSummariesRaisesError(self):
with ops.Graph().as_default():
random_seed.set_random_seed(0)
tf_inputs = tf.constant(self._inputs, dtype=tf.float32)
tf_labels = tf.constant(self._labels, dtype=tf.float32)
tf_predictions = LogisticClassifier(tf_inputs)
loss_ops.log_loss(tf_labels, tf_predictions)
total_loss = loss_ops.get_total_loss()
summary.scalar('total_loss', total_loss)
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = learning.create_train_op(total_loss, optimizer)
summary_op = summary.merge_all()
with self.assertRaises(ValueError):
learning.train(
train_op, None, number_of_steps=300, summary_op=summary_op)
示例3: verify_scalar_summary_is_written
# 需要导入模块: from tensorflow.python.summary import summary [as 别名]
# 或者: from tensorflow.python.summary.summary import merge_all [as 别名]
def verify_scalar_summary_is_written(self, print_summary):
value = 3
tensor = array_ops.ones([]) * value
name = 'my_score'
prefix = 'eval'
summaries.add_scalar_summary(tensor, name, prefix, print_summary)
output_dir = tempfile.mkdtemp('scalar_summary_no_print_test')
summary_op = summary.merge_all()
summary_writer = summary.FileWriter(output_dir)
with self.cached_session() as sess:
new_summary = sess.run(summary_op)
summary_writer.add_summary(new_summary, 1)
summary_writer.flush()
self.assert_scalar_summary(output_dir, {
'%s/%s' % (prefix, name): value
})
示例4: _init_summary_op
# 需要导入模块: from tensorflow.python.summary import summary [as 别名]
# 或者: from tensorflow.python.summary.summary import merge_all [as 别名]
def _init_summary_op(self, summary_op=USE_DEFAULT):
"""Initializes summary_op.
Args:
summary_op: An Operation that returns a Summary for the event logs.
If set to USE_DEFAULT, create an op that merges all the summaries.
"""
if summary_op is Supervisor.USE_DEFAULT:
summary_op = self._get_first_op_from_collection(ops.GraphKeys.SUMMARY_OP)
if summary_op is None:
summary_op = _summary.merge_all()
if summary_op is not None:
ops.add_to_collection(ops.GraphKeys.SUMMARY_OP, summary_op)
self._summary_op = summary_op
示例5: begin
# 需要导入模块: from tensorflow.python.summary import summary [as 别名]
# 或者: from tensorflow.python.summary.summary import merge_all [as 别名]
def begin(self):
if self._replace_summary_op:
self._summary_op = summary.merge_all()
self._global_step = variables.get_or_create_global_step()
示例6: begin
# 需要导入模块: from tensorflow.python.summary import summary [as 别名]
# 或者: from tensorflow.python.summary.summary import merge_all [as 别名]
def begin(self):
if self._summary_op is None:
self._summary_op = summary.merge_all()
示例7: begin
# 需要导入模块: from tensorflow.python.summary import summary [as 别名]
# 或者: from tensorflow.python.summary.summary import merge_all [as 别名]
def begin(self):
if self._replace_summary_op:
# This can still remain None if there are no summaries.
self._summary_op = summary.merge_all()
self._global_step = training_util.get_or_create_global_step()
示例8: finalize
# 需要导入模块: from tensorflow.python.summary import summary [as 别名]
# 或者: from tensorflow.python.summary.summary import merge_all [as 别名]
def finalize(self):
"""Creates operations if needed and finalizes the graph."""
if self._init_op is None:
def default_init_op():
return control_flow_ops.group(
variables.global_variables_initializer(),
resources.initialize_resources(resources.shared_resources()))
self._init_op = Scaffold.get_or_default(
'init_op',
ops.GraphKeys.INIT_OP,
default_init_op)
if self._ready_op is None:
def default_ready_op():
return array_ops.concat([
variables.report_uninitialized_variables(),
resources.report_uninitialized_resources()
], 0)
self._ready_op = Scaffold.get_or_default(
'ready_op', ops.GraphKeys.READY_OP,
default_ready_op)
if self._ready_for_local_init_op is None:
def default_ready_for_local_init_op():
return variables.report_uninitialized_variables(
variables.global_variables())
self._ready_for_local_init_op = Scaffold.get_or_default(
'ready_for_local_init_op', ops.GraphKeys.READY_FOR_LOCAL_INIT_OP,
default_ready_for_local_init_op)
if self._local_init_op is None:
self._local_init_op = Scaffold.get_or_default(
'local_init_op', ops.GraphKeys.LOCAL_INIT_OP,
Scaffold._default_local_init_op)
if self._summary_op is None:
self._summary_op = Scaffold.get_or_default('summary_op',
ops.GraphKeys.SUMMARY_OP,
summary.merge_all)
# pylint: disable=g-long-lambda
if self._saver is None:
self._saver = training_saver._get_saver_or_default() # pylint: disable=protected-access
# pylint: enable=g-long-lambda
self._saver.build()
ops.get_default_graph().finalize()
return self
示例9: finalize
# 需要导入模块: from tensorflow.python.summary import summary [as 别名]
# 或者: from tensorflow.python.summary.summary import merge_all [as 别名]
def finalize(self):
"""Creates operations if needed and finalizes the graph."""
if self._init_op is None:
def default_init_op():
return control_flow_ops.group(
variables.global_variables_initializer(),
resources.initialize_resources(resources.shared_resources()))
self._init_op = Scaffold.get_or_default(
'init_op',
ops.GraphKeys.INIT_OP,
default_init_op)
if self._ready_op is None:
def default_ready_op():
return array_ops.concat([
variables.report_uninitialized_variables(),
resources.report_uninitialized_resources()
], 0)
self._ready_op = Scaffold.get_or_default(
'ready_op', ops.GraphKeys.READY_OP,
default_ready_op)
if self._ready_for_local_init_op is None:
def default_ready_for_local_init_op():
return variables.report_uninitialized_variables(
variables.global_variables())
self._ready_for_local_init_op = Scaffold.get_or_default(
'ready_for_local_init_op', ops.GraphKeys.READY_FOR_LOCAL_INIT_OP,
default_ready_for_local_init_op)
if self._local_init_op is None:
self._local_init_op = Scaffold.get_or_default(
'local_init_op', ops.GraphKeys.LOCAL_INIT_OP,
Scaffold._default_local_init_op)
if self._summary_op is None:
self._summary_op = Scaffold.get_or_default('summary_op',
ops.GraphKeys.SUMMARY_OP,
summary.merge_all)
# pylint: disable=g-long-lambda
if self._saver is None:
self._saver = Scaffold.get_or_default(
'saver',
ops.GraphKeys.SAVERS,
lambda: training_saver.Saver(sharded=True, allow_empty=True,
write_version=saver_pb2.SaverDef.V2))
# pylint: enable=g-long-lambda
self._saver.build()
ops.get_default_graph().finalize()
return self
示例10: set_model
# 需要导入模块: from tensorflow.python.summary import summary [as 别名]
# 或者: from tensorflow.python.summary.summary import merge_all [as 别名]
def set_model(self, model):
self.model = model
self.sess = K.get_session()
if self.histogram_freq and self.merged is None:
for layer in self.model.layers:
for weight in layer.weights:
mapped_weight_name = weight.name.replace(':', '_')
tf_summary.histogram(mapped_weight_name, weight)
if self.write_grads:
grads = model.optimizer.get_gradients(model.total_loss, weight)
def is_indexed_slices(grad):
return type(grad).__name__ == 'IndexedSlices'
grads = [grad.values if is_indexed_slices(grad) else grad
for grad in grads]
tf_summary.histogram('{}_grad'.format(mapped_weight_name), grads)
if self.write_images:
w_img = array_ops.squeeze(weight)
shape = K.int_shape(w_img)
if len(shape) == 2: # dense layer kernel case
if shape[0] > shape[1]:
w_img = array_ops.transpose(w_img)
shape = K.int_shape(w_img)
w_img = array_ops.reshape(w_img, [1, shape[0], shape[1], 1])
elif len(shape) == 3: # convnet case
if K.image_data_format() == 'channels_last':
# switch to channels_first to display
# every kernel as a separate image
w_img = array_ops.transpose(w_img, perm=[2, 0, 1])
shape = K.int_shape(w_img)
w_img = array_ops.reshape(w_img,
[shape[0], shape[1], shape[2], 1])
elif len(shape) == 1: # bias case
w_img = array_ops.reshape(w_img, [1, shape[0], 1, 1])
else:
# not possible to handle 3D convnets etc.
continue
shape = K.int_shape(w_img)
assert len(shape) == 4 and shape[-1] in [1, 3, 4]
tf_summary.image(mapped_weight_name, w_img)
if hasattr(layer, 'output'):
tf_summary.histogram('{}_out'.format(layer.name), layer.output)
self.merged = tf_summary.merge_all()
if self.write_graph:
self.writer = tf_summary.FileWriter(self.log_dir, self.sess.graph)
else:
self.writer = tf_summary.FileWriter(self.log_dir)
开发者ID:PacktPublishing,项目名称:Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda,代码行数:53,代码来源:callbacks.py