本文整理汇总了Python中tensorflow.python.summary.summary.FileWriter方法的典型用法代码示例。如果您正苦于以下问题:Python summary.FileWriter方法的具体用法?Python summary.FileWriter怎么用?Python summary.FileWriter使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tensorflow.python.summary.summary
的用法示例。
在下文中一共展示了summary.FileWriter方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: verify_scalar_summary_is_written
# 需要导入模块: from tensorflow.python.summary import summary [as 别名]
# 或者: from tensorflow.python.summary.summary import FileWriter [as 别名]
def verify_scalar_summary_is_written(self, print_summary):
value = 3
tensor = array_ops.ones([]) * value
name = 'my_score'
prefix = 'eval'
summaries.add_scalar_summary(tensor, name, prefix, print_summary)
output_dir = tempfile.mkdtemp('scalar_summary_no_print_test')
summary_op = summary.merge_all()
summary_writer = summary.FileWriter(output_dir)
with self.cached_session() as sess:
new_summary = sess.run(summary_op)
summary_writer.add_summary(new_summary, 1)
summary_writer.flush()
self.assert_scalar_summary(output_dir, {
'%s/%s' % (prefix, name): value
})
示例2: create_tfevent_from_pb
# 需要导入模块: from tensorflow.python.summary import summary [as 别名]
# 或者: from tensorflow.python.summary.summary import FileWriter [as 别名]
def create_tfevent_from_pb(model,optimized=False):
print("> creating tfevent of model: {}".format(model))
if optimized:
model_path=ROOT_DIR+'/models/{}/optimized_inference_graph.pb'.format(model)
log_dir=ROOT_DIR+'/models/{}/log_opt/'.format(model)
else:
model_path=ROOT_DIR+'/models/{}/frozen_inference_graph.pb'.format(model)
log_dir=ROOT_DIR+'/models/{}/log/'.format(model)
with session.Session(graph=ops.Graph()) as sess:
with gfile.FastGFile(model_path, "rb") as f:
graph_def = graph_pb2.GraphDef()
graph_def.ParseFromString(f.read())
importer.import_graph_def(graph_def)
pb_visual_writer = summary.FileWriter(log_dir)
pb_visual_writer.add_graph(sess.graph)
print("> Model {} Imported. \nVisualize by running: \
tensorboard --logdir={}".format(model_path, log_dir))
# Gather all Model Names in models/
示例3: import_to_tensorboard
# 需要导入模块: from tensorflow.python.summary import summary [as 别名]
# 或者: from tensorflow.python.summary.summary import FileWriter [as 别名]
def import_to_tensorboard(saved_model, output_dir):
"""View an imported saved_model.pb as a graph in Tensorboard.
Args:
saved_model: The location of the saved_model.pb to visualize.
output_dir: The location for the Tensorboard log to begin visualization from.
Usage:
Call this function with your model location and desired log directory.
Launch Tensorboard by pointing it to the log directory.
View your imported `.pb` model as a graph.
"""
with open(saved_model, "rb") as f:
sm = saved_model_pb2.SavedModel()
sm.ParseFromString(f.read())
if 1 != len(sm.meta_graphs):
print('More than one graph found. Not sure which to write')
sys.exit(1)
graph_def = sm.meta_graphs[0].graph_def
pb_visual_writer = summary.FileWriter(output_dir)
pb_visual_writer.add_graph(None, graph_def=graph_def)
print("Model Imported. Visualize by running: "
"tensorboard --logdir={}".format(output_dir))
示例4: dump_graph_into_tensorboard
# 需要导入模块: from tensorflow.python.summary import summary [as 别名]
# 或者: from tensorflow.python.summary.summary import FileWriter [as 别名]
def dump_graph_into_tensorboard(tf_graph):
# type: (_tf.Graph) -> None
_tb_log_dir = os.environ.get('TB_LOG_DIR')
if _tb_log_dir:
if is_tf2:
from tensorflow.python.ops.summary_ops_v2 import graph as write_graph
pb_visual_writer = _tf.summary.create_file_writer(_tb_log_dir)
with pb_visual_writer.as_default():
write_graph(tf_graph)
else:
from tensorflow.python.summary import summary
pb_visual_writer = summary.FileWriter(_tb_log_dir)
pb_visual_writer.add_graph(tf_graph)
示例5: set_model
# 需要导入模块: from tensorflow.python.summary import summary [as 别名]
# 或者: from tensorflow.python.summary.summary import FileWriter [as 别名]
def set_model(self, model):
self.model = model
self.sess = K.get_session()
if self.histogram_freq and self.merged is None:
for layer in self.model.layers:
for weight in layer.weights:
mapped_weight_name = weight.name.replace(':', '_')
tf_summary.histogram(mapped_weight_name, weight)
if self.write_grads:
grads = model.optimizer.get_gradients(model.total_loss, weight)
def is_indexed_slices(grad):
return type(grad).__name__ == 'IndexedSlices'
grads = [grad.values if is_indexed_slices(grad) else grad
for grad in grads]
tf_summary.histogram('{}_grad'.format(mapped_weight_name), grads)
if self.write_images:
w_img = array_ops.squeeze(weight)
shape = K.int_shape(w_img)
if len(shape) == 2: # dense layer kernel case
if shape[0] > shape[1]:
w_img = array_ops.transpose(w_img)
shape = K.int_shape(w_img)
w_img = array_ops.reshape(w_img, [1, shape[0], shape[1], 1])
elif len(shape) == 3: # convnet case
if K.image_data_format() == 'channels_last':
# switch to channels_first to display
# every kernel as a separate image
w_img = array_ops.transpose(w_img, perm=[2, 0, 1])
shape = K.int_shape(w_img)
w_img = array_ops.reshape(w_img,
[shape[0], shape[1], shape[2], 1])
elif len(shape) == 1: # bias case
w_img = array_ops.reshape(w_img, [1, shape[0], 1, 1])
else:
# not possible to handle 3D convnets etc.
continue
shape = K.int_shape(w_img)
assert len(shape) == 4 and shape[-1] in [1, 3, 4]
tf_summary.image(mapped_weight_name, w_img)
if hasattr(layer, 'output'):
tf_summary.histogram('{}_out'.format(layer.name), layer.output)
self.merged = tf_summary.merge_all()
if self.write_graph:
self.writer = tf_summary.FileWriter(self.log_dir, self.sess.graph)
else:
self.writer = tf_summary.FileWriter(self.log_dir)
开发者ID:PacktPublishing,项目名称:Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda,代码行数:53,代码来源:callbacks.py