本文整理汇总了Python中tensorflow.summary方法的典型用法代码示例。如果您正苦于以下问题:Python tensorflow.summary方法的具体用法?Python tensorflow.summary怎么用?Python tensorflow.summary使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tensorflow
的用法示例。
在下文中一共展示了tensorflow.summary方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: propagate_summary
# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import summary [as 别名]
def propagate_summary(self, summary_key):
"""
Propagates a single summary op of this Component to its parents' summaries registries.
Args:
summary_key (str): The lookup key for the summary to propagate.
"""
# Return if there is no parent.
if self.parent_component is None:
return
# If already there -> Error.
if summary_key in self.parent_component.summaries:
raise RLGraphError("ERROR: Summary registry of '{}' already has a summary under key '{}'!".
format(self.parent_component.name, summary_key))
self.parent_component.summaries[summary_key] = self.summaries[summary_key]
# Recurse up the container hierarchy.
self.parent_component.propagate_summary(summary_key)
示例2: set_summary
# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import summary [as 别名]
def set_summary(self, summary_op, summary_description=None, collections=None):
"""Annotates a tensor with a tf.summary operation
This causes self.out_tensor to be logged to Tensorboard.
Parameters
----------
summary_op: str
summary operation to annotate node
summary_description: object, optional
Optional summary_pb2.SummaryDescription()
collections: list of graph collections keys, optional
New summary op is added to these collections. Defaults to [GraphKeys.SUMMARIES]
"""
supported_ops = {'tensor_summary', 'scalar', 'histogram'}
if summary_op not in supported_ops:
raise ValueError(
"Invalid summary_op arg. Only 'tensor_summary', 'scalar', 'histogram' supported"
)
self.summary_op = summary_op
self.summary_description = summary_description
self.collections = collections
self.tensorboard = True
示例3: add_summary_to_tg
# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import summary [as 别名]
def add_summary_to_tg(self, tb_input=None):
"""
Create the summary operation for this layer, if set_summary() has been called on it.
Can only be called after self.create_layer to gaurentee that name is not None.
Parameters
----------
tb_input: tensor
the tensor to log to Tensorboard. If None, self.out_tensor is used.
"""
if self.tensorboard == False:
return
if tb_input == None:
tb_input = self.out_tensor
if self.summary_op == "tensor_summary":
tf.summary.tensor_summary(self.name, tb_input, self.summary_description,
self.collections)
elif self.summary_op == 'scalar':
tf.summary.scalar(self.name, tb_input, self.collections)
elif self.summary_op == 'histogram':
tf.summary.histogram(self.name, tb_input, self.collections)
示例4: summarize_features
# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import summary [as 别名]
def summarize_features(features, num_shards=1):
"""Generate summaries for features."""
if not common_layers.should_generate_summaries():
return
with tf.name_scope("input_stats"):
for (k, v) in sorted(six.iteritems(features)):
if (isinstance(v, tf.Tensor) and (v.get_shape().ndims > 1) and
(v.dtype != tf.string)):
tf.summary.scalar("%s_batch" % k, tf.shape(v)[0] // num_shards)
tf.summary.scalar("%s_length" % k, tf.shape(v)[1])
nonpadding = tf.to_float(tf.not_equal(v, 0))
nonpadding_tokens = tf.reduce_sum(nonpadding)
tf.summary.scalar("%s_nonpadding_tokens" % k, nonpadding_tokens)
tf.summary.scalar("%s_nonpadding_fraction" % k,
tf.reduce_mean(nonpadding))
示例5: estimator_spec_eval
# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import summary [as 别名]
def estimator_spec_eval(self, features, logits, labels, loss, losses_dict):
"""Constructs `tf.estimator.EstimatorSpec` for EVAL (evaluation) mode."""
estimator_spec = super(TransformerAE, self).estimator_spec_eval(
features, logits, labels, loss, losses_dict)
if common_layers.is_xla_compiled():
# For TPUs (and XLA more broadly?), do not add summary hooks that depend
# on losses; they are not supported.
return estimator_spec
summary_op = tf.get_collection(tf.GraphKeys.SUMMARIES, scope="losses")
summary_op.extend(tf.get_collection(tf.GraphKeys.SUMMARIES, scope="loss"))
summary_op.append(tf.summary.scalar("loss", loss))
summary_saver_hook = tf.train.SummarySaverHook(
save_steps=100,
summary_op=summary_op,
output_dir=os.path.join(self.hparams.model_dir, "eval"))
hooks = list(estimator_spec.evaluation_hooks)
hooks.append(summary_saver_hook)
return estimator_spec._replace(evaluation_hooks=hooks)
示例6: on_epoch_begin
# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import summary [as 别名]
def on_epoch_begin(self, epoch, logs=None):
"""Add user-def. op to Model eval_function callbacks, reset batch count."""
# check if histogram summary should be run for this epoch
if self.user_defined_freq and epoch % self.user_defined_freq == 0:
self._epoch = epoch
# pylint: disable=protected-access
# add the user-defined summary ops if it should run this epoch
self.model._make_eval_function()
if self.merged not in self.model._eval_function.fetches:
self.model._eval_function.fetches.append(self.merged)
self.model._eval_function.fetch_callbacks[
self.merged] = self._fetch_callback
# pylint: enable=protected-access
super(CustomTensorBoard, self).on_epoch_begin(epoch, logs=None)
示例7: test_tf_summary_export
# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import summary [as 别名]
def test_tf_summary_export(self):
# Ensure that TF wasn't already imported, since we want this test to cover
# the entire flow of "import tensorflow; use tf.summary" and if TF was in
# fact already imported that reduces the comprehensiveness of the test.
# This means this test has to be kept in its own file and that no other
# test methods in this file should import tensorflow.
self.assertEqual("notfound", sys.modules.get("tensorflow", "notfound"))
import tensorflow as tf
if not tf.__version__.startswith("2."):
if hasattr(tf, "compat") and hasattr(tf.compat, "v2"):
tf = tf.compat.v2
else:
self.skipTest("TF v2 summary API not available")
# Check that tf.summary contains both TB-provided and TF-provided symbols.
expected_symbols = frozenset(
["scalar", "image", "audio", "histogram", "text"]
+ ["write", "create_file_writer", "SummaryWriter"]
)
self.assertLessEqual(expected_symbols, frozenset(dir(tf.summary)))
# Ensure we can dereference symbols as well.
print(tf.summary.scalar)
print(tf.summary.write)
示例8: _try_listen_tf_v1
# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import summary [as 别名]
def _try_listen_tf_v1(self):
if not _tf_version().startswith("1."):
raise util.TryFailed()
try:
# pylint: disable=import-error,no-name-in-module
from tensorflow.compat.v1.summary import FileWriter
except Exception as e:
self.log.debug(
"error importing tensorflow.compat.v1.summary.FileWriter: %s", e
)
raise util.TryFailed()
else:
self.log.debug(
"wrapping tensorflow.compat.v1.summary.FileWriter.add_summary"
)
python_util.listen_method(FileWriter, "add_summary", self._handle_summary)
示例9: execute
# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import summary [as 别名]
def execute(self, *api_method_calls):
# Fetch inputs for the different API-methods.
fetch_dict, feed_dict = self.graph_builder.get_execution_inputs(*api_method_calls)
for api_name in fetch_dict.keys():
if api_name in self.summary_ops:
fetch_dict[api_name].append(self.summary_ops[api_name])
fetch_dict["__GLOBAL_TRAINING_TIMESTEP"] = self.global_training_timestep
ret = self.monitored_session.run(
fetch_dict, feed_dict=feed_dict, options=self.tf_session_options, run_metadata=self.run_metadata
)
global_training_timestep_value = ret["__GLOBAL_TRAINING_TIMESTEP"]
del ret["__GLOBAL_TRAINING_TIMESTEP"]
for api_name in fetch_dict.keys():
if api_name in self.summary_ops:
assert len(ret[api_name]) > 1, "Expected multiple values, but {} found".format(len(fetch_dict[api_name]))
summary = ret[api_name].pop()
# Assuming that all API methods are on the training timesteps.
self.summary_writer.add_summary(summary, global_training_timestep_value)
if self.profiling_enabled:
self.update_profiler_if_necessary()
if self.timeline_enabled:
self.update_timeline_if_necessary()
# Return single values instead of lists of 1 item, but keep inner dicts as-are.
ret = {key: (value[0] if len(ret[key]) == 1 and not isinstance(ret[key], dict) else tuple(value)
if not isinstance(value, dict) else value) for key, value in ret.items()}
# If only one key in ret, remove it.
if len(api_method_calls) == 1:
ret = ret[next(iter(ret))]
return ret
示例10: register_variables
# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import summary [as 别名]
def register_variables(self, *variables):
"""
Adds already created Variables to our registry. This could be useful if the variables are not created
by our own `self.get_variable` method, but by some backend-specific object (e.g. tf.layers).
Also auto-creates summaries (regulated by `self.summary_regexp`) for the given variables.
Args:
# TODO check if we warp PytorchVariable
variables (Union[PyTorchVariable, SingleDataOp]): The Variable objects to register.
"""
for var in variables:
# Use our global_scope plus the var's name without anything in between.
# e.g. var.name = "dense-layer/dense/kernel:0" -> key = "dense-layer/kernel"
# key = re.sub(r'({}).*?([\w\-.]+):\d+$'.format(self.global_scope), r'\1/\2', var.name)
key = re.sub(r':\d+$', "", var.name)
# Already registered: Must be the same (shared) variable.
if key in self.variable_registry:
assert self.variable_registry[key] is var,\
"ERROR: Key '{}' in {}.variables already exists, but holds a different variable " \
"({} vs {})!".format(key, self.global_scope, self.variable_registry[key], var)
# New variable: Register.
else:
self.variable_registry[key] = var
# Auto-create the summary for the variable.
scope_to_use = self.reuse_variable_scope or self.global_scope
summary_name = var.name[len(scope_to_use) + (1 if scope_to_use else 0):]
summary_name = re.sub(r':\d+$', "", summary_name)
self.create_summary(summary_name, var)
示例11: create_summary
# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import summary [as 别名]
def create_summary(self, name, values, summary_type="histogram"):
"""
Creates a summary op (and adds it to the graph).
Skips those, whose full name does not match `self.summary_regexp`.
Args:
name (str): The name for the summary. This has to match `self.summary_regexp`.
The name should not contain a "summary"-prefix or any global scope information
(both will be added automatically by this method).
values (op): The op to summarize.
summary_type (str): The summary type to create. Currently supported are:
"histogram", "scalar" and "text".
"""
# Prepend the "summaries/"-prefix.
name = "summaries/" + name
# Get global name.
global_name = ((self.global_scope + "/") if self.global_scope else "") + name
# Skip non matching summaries (all if summary_regexp is None).
if self.summary_regexp is None or not re.search(self.summary_regexp, global_name):
return
summary = None
if get_backend() == "tf":
ctor = getattr(tf.summary, summary_type)
summary = ctor(name, values)
# Registers the new summary with this Component.
if global_name in self.summaries:
raise RLGraphError("ERROR: Summary with name '{}' already exists in {}'s summary "
"registry!".format(global_name, self.name))
self.summaries[global_name] = summary
self.propagate_summary(global_name)
示例12: pop_summary_ops_buffer
# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import summary [as 别名]
def pop_summary_ops_buffer(self):
"""
*Internal use only!* Pops the last frame of the summary ops buffer stack.
Returns:
The accumulated summary ops.
"""
return self._summary_ops_buffer_stack.pop()
示例13: start_summary_ops_buffer
# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import summary [as 别名]
def start_summary_ops_buffer(self):
"""
*Internal use only!* Starts a new frame in the summary ops buffer stack.
"""
self._summary_ops_buffer_stack.append([])
示例14: summarize_hparams
# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import summary [as 别名]
def summarize_hparams(self):
def create_hparams_summary(hparams, name):
hparams_strs = [tf.convert_to_tensor([k, str(v)])
for k, v in hparams.values().items()]
tf.summary.text(name, tf.cast(tf.stack(hparams_strs), tf.string))
create_hparams_summary(self._hparams, "%s_hparams" % self.name)
if self._problem_hparams:
create_hparams_summary(self._problem_hparams,
"%s_problem_hparams" % self.name)
# Replace the two methods below in order to add custom SessionRunHooks to
# the training procedure.
示例15: loss
# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import summary [as 别名]
def loss(self, logits, features):
if isinstance(logits, dict):
losses = {}
for k, v in six.iteritems(logits):
losses[k] = self._loss_single(
v,
k,
features[k],
weights=features.get(k + "_mask"))
n, d = losses[k]
if common_layers.should_generate_summaries():
tf.summary.scalar(k + "_loss", n / d)
tf.summary.scalar(k + "_loss_num", n)
tf.summary.scalar(k + "_loss_den", d)
if getattr(self.hparams, "visualize_logits_histogram", False):
hist = tf.summary.histogram
hist(k + "_predict", tf.argmax(tf.squeeze(v), axis=-1))
hist(k + "_targets", features[k])
return tf.add_n([n / d for n, d in losses.values()])
else:
return self._loss_single(
logits,
"targets",
features["targets"],
weights=features.get("targets_mask"))