本文整理匯總了Python中tensorflow.python.framework.meta_graph.export_scoped_meta_graph方法的典型用法代碼示例。如果您正苦於以下問題:Python meta_graph.export_scoped_meta_graph方法的具體用法?Python meta_graph.export_scoped_meta_graph怎麽用?Python meta_graph.export_scoped_meta_graph使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類tensorflow.python.framework.meta_graph
的用法示例。
在下文中一共展示了meta_graph.export_scoped_meta_graph方法的11個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: _testScopedImportWithQueue
# 需要導入模塊: from tensorflow.python.framework import meta_graph [as 別名]
# 或者: from tensorflow.python.framework.meta_graph import export_scoped_meta_graph [as 別名]
def _testScopedImportWithQueue(self, test_dir, exported_filename,
new_exported_filename):
graph = tf.Graph()
meta_graph.import_scoped_meta_graph(
os.path.join(test_dir, exported_filename),
graph=graph,
import_scope="new_queue1")
graph.as_graph_element("new_queue1/dequeue:0")
graph.as_graph_element("new_queue1/close")
with graph.as_default():
new_meta_graph, _ = meta_graph.export_scoped_meta_graph(
filename=os.path.join(test_dir, new_exported_filename),
graph=graph, export_scope="new_queue1")
return new_meta_graph
# Verifies that we can export the subgraph containing a FIFOQueue under
# "queue1" and import it into "new_queue1" in a new graph.
示例2: testPotentialCycle
# 需要導入模塊: from tensorflow.python.framework import meta_graph [as 別名]
# 或者: from tensorflow.python.framework.meta_graph import export_scoped_meta_graph [as 別名]
def testPotentialCycle(self):
graph1 = tf.Graph()
with graph1.as_default():
a = tf.constant(1.0, shape=[2, 2])
b = tf.constant(2.0, shape=[2, 2])
matmul = tf.matmul(a, b)
with tf.name_scope("hidden1"):
c = tf.nn.relu(matmul)
d = tf.constant(3.0, shape=[2, 2])
matmul = tf.matmul(c, d)
orig_meta_graph, _ = meta_graph.export_scoped_meta_graph(
export_scope="hidden1", graph=graph1)
graph2 = tf.Graph()
with graph2.as_default():
with self.assertRaisesRegexp(ValueError, "Graph contains unbound inputs"):
meta_graph.import_scoped_meta_graph(
orig_meta_graph, import_scope="new_hidden1")
meta_graph.import_scoped_meta_graph(
orig_meta_graph, import_scope="new_hidden1",
input_map={"$unbound_inputs_MatMul": tf.constant(4.0, shape=[2, 2])})
示例3: testExportGraphDefWithScope
# 需要導入模塊: from tensorflow.python.framework import meta_graph [as 別名]
# 或者: from tensorflow.python.framework.meta_graph import export_scoped_meta_graph [as 別名]
def testExportGraphDefWithScope(self):
test_dir = _TestDir("export_graph_def")
saver0_ckpt = os.path.join(test_dir, "saver0.ckpt")
graph1 = tf.Graph()
with graph1.as_default():
with tf.name_scope("hidden1"):
images = tf.constant(1.0, tf.float32, shape=[3, 2], name="images")
weights1 = tf.Variable([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]],
name="weights")
biases1 = tf.Variable([0.1] * 3, name="biases")
tf.nn.relu(tf.matmul(images, weights1) + biases1, name="relu")
# Run the graph and save scoped checkpoint.
with self.test_session(graph=graph1) as sess:
sess.run(tf.global_variables_initializer())
_, var_list_1 = meta_graph.export_scoped_meta_graph(
graph_def=graph1.as_graph_def(), export_scope="hidden1")
saver = saver_module.Saver(var_list=var_list_1, max_to_keep=1)
saver.save(sess, saver0_ckpt, write_state=False)
expected = np.reshape([[5.0999999, 7.0999999, 9.10000038] * 3], (3, 3))
# Verifies that we can run successfully after restoring.
graph2 = tf.Graph()
new_var_list_1 = meta_graph.copy_scoped_meta_graph(
from_scope="hidden1", to_scope="new_hidden1",
from_graph=graph1, to_graph=graph2)
with self.test_session(graph=graph2) as sess:
saver3 = saver_module.Saver(var_list=new_var_list_1, max_to_keep=1)
saver3.restore(sess, saver0_ckpt)
self.assertAllClose(expected, sess.run("new_hidden1/relu:0"))
示例4: testExportNestedNames
# 需要導入模塊: from tensorflow.python.framework import meta_graph [as 別名]
# 或者: from tensorflow.python.framework.meta_graph import export_scoped_meta_graph [as 別名]
def testExportNestedNames(self):
graph1 = tf.Graph()
with graph1.as_default():
with tf.name_scope("hidden1/hidden2/hidden3"):
images = tf.constant(1.0, tf.float32, shape=[3, 2], name="images")
weights1 = tf.Variable([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]],
name="weights")
biases1 = tf.Variable([0.1] * 3, name="biases")
tf.nn.relu(tf.matmul(images, weights1) + biases1, name="relu")
orig_meta_graph, var_list = meta_graph.export_scoped_meta_graph(
export_scope="hidden1/hidden2", graph=graph1)
var_names = [v.name for _, v in var_list.items()]
self.assertEqual(["hidden3/biases:0", "hidden3/weights:0"],
sorted(var_list.keys()))
self.assertEqual(["hidden1/hidden2/hidden3/biases:0",
"hidden1/hidden2/hidden3/weights:0"],
sorted(var_names))
for node in orig_meta_graph.graph_def.node:
self.assertTrue(node.name.startswith("hidden3"))
graph2 = tf.Graph()
new_var_list = meta_graph.import_scoped_meta_graph(
orig_meta_graph, import_scope="new_hidden1/new_hidden2",
graph=graph2)
self.assertEqual(["hidden3/biases:0", "hidden3/weights:0"],
sorted(new_var_list.keys()))
new_var_names = [v.name for _, v in new_var_list.items()]
self.assertEqual(["new_hidden1/new_hidden2/hidden3/biases:0",
"new_hidden1/new_hidden2/hidden3/weights:0"],
sorted(new_var_names))
nodes = ["new_hidden1/new_hidden2/hidden3/biases/Assign",
"new_hidden1/new_hidden2/hidden3/weights/Assign"]
expected = [b"loc:@new_hidden1/new_hidden2/hidden3/biases",
b"loc:@new_hidden1/new_hidden2/hidden3/weights"]
for n, e in zip(nodes, expected):
self.assertEqual([e], graph2.get_operation_by_name(n).get_attr("_class"))
示例5: meta_graph_def
# 需要導入模塊: from tensorflow.python.framework import meta_graph [as 別名]
# 或者: from tensorflow.python.framework.meta_graph import export_scoped_meta_graph [as 別名]
def meta_graph_def(self):
meta_graph_def = None
with self._g.as_default():
with tf.device(self._device):
for compiler in self._compilers:
if hasattr(compiler, "finish"):
compiler.finish()
meta_graph_def, _ = meta_graph.export_scoped_meta_graph()
return meta_graph_def
示例6: export_meta_graph
# 需要導入模塊: from tensorflow.python.framework import meta_graph [as 別名]
# 或者: from tensorflow.python.framework.meta_graph import export_scoped_meta_graph [as 別名]
def export_meta_graph(filename=None,
meta_info_def=None,
graph_def=None,
saver_def=None,
collection_list=None,
as_text=False,
graph=None,
export_scope=None,
clear_devices=False,
**kwargs):
"""Returns `MetaGraphDef` proto. Optionally writes it to filename.
This function exports the graph, saver, and collection objects into
`MetaGraphDef` protocol buffer with the intention of it being imported
at a later time or location to restart training, run inference, or be
a subgraph.
Args:
filename: Optional filename including the path for writing the
generated `MetaGraphDef` protocol buffer.
meta_info_def: `MetaInfoDef` protocol buffer.
graph_def: `GraphDef` protocol buffer.
saver_def: `SaverDef` protocol buffer.
collection_list: List of string keys to collect.
as_text: If `True`, writes the `MetaGraphDef` as an ASCII proto.
graph: The `Graph` to import into. If `None`, use the default graph.
export_scope: Optional `string`. Name scope under which to extract
the subgraph. The scope name will be striped from the node definitions
for easy import later into new name scopes. If `None`, the whole graph
is exported. graph_def and export_scope cannot both be specified.
clear_devices: Whether or not to clear the device field for an `Operation`
or `Tensor` during export.
**kwargs: Optional keyed arguments.
Returns:
A `MetaGraphDef` proto.
Raises:
ValueError: When the `GraphDef` is larger than 2GB.
"""
meta_graph_def, _ = meta_graph.export_scoped_meta_graph(
filename=filename,
meta_info_def=meta_info_def,
graph_def=graph_def,
saver_def=saver_def,
collection_list=collection_list,
as_text=as_text,
graph=graph,
export_scope=export_scope,
clear_devices=clear_devices,
**kwargs)
return meta_graph_def
示例7: _testScopedSave
# 需要導入模塊: from tensorflow.python.framework import meta_graph [as 別名]
# 或者: from tensorflow.python.framework.meta_graph import export_scoped_meta_graph [as 別名]
def _testScopedSave(self, test_dir, exported_filename, ckpt_filename):
graph = tf.Graph()
with graph.as_default():
# Creates an inference graph.
# Hidden 1
images = tf.constant(1.2, tf.float32, shape=[100, 28], name="images")
with tf.name_scope("hidden1"):
weights1 = tf.Variable(
tf.truncated_normal([28, 128],
stddev=1.0 / math.sqrt(float(28))),
name="weights")
# The use of control_flow_ops.cond here is purely for adding test
# coverage the save and restore of control flow context (which doesn't
# make any sense here from a machine learning perspective). The typical
# biases is a simple Variable without the conditions.
biases1 = tf.Variable(
control_flow_ops.cond(tf.less(random.random(), 0.5),
lambda: tf.ones([128]),
lambda: tf.zeros([128])),
name="biases")
hidden1 = tf.nn.relu(tf.matmul(images, weights1) + biases1)
# Hidden 2
with tf.name_scope("hidden2"):
weights2 = tf.Variable(
tf.truncated_normal([128, 32],
stddev=1.0 / math.sqrt(float(128))),
name="weights")
# The use of control_flow_ops.while_loop here is purely for adding test
# coverage the save and restore of control flow context (which doesn't
# make any sense here from a machine learning perspective). The typical
# biases is a simple Variable without the conditions.
def loop_cond(it, _):
return it < 2
def loop_body(it, biases2):
biases2 += tf.constant(0.1, shape=[32])
return it + 1, biases2
_, biases2 = control_flow_ops.while_loop(
loop_cond, loop_body,
[tf.constant(0), tf.Variable(tf.zeros([32]))])
hidden2 = tf.nn.relu(tf.matmul(hidden1, weights2) + biases2)
# Linear
with tf.name_scope("softmax_linear"):
weights3 = tf.Variable(
tf.truncated_normal([32, 10],
stddev=1.0 / math.sqrt(float(32))),
name="weights")
biases3 = tf.Variable(tf.zeros([10]), name="biases")
logits = tf.matmul(hidden2, weights3) + biases3
tf.add_to_collection("logits", logits)
_, var_list = meta_graph.export_scoped_meta_graph(
filename=os.path.join(test_dir, exported_filename),
graph=tf.get_default_graph(), export_scope="hidden1")
self.assertEqual(["biases:0", "weights:0"], sorted(var_list.keys()))
with self.test_session(graph=graph) as sess:
sess.run(tf.global_variables_initializer())
saver = saver_module.Saver(var_list=var_list, max_to_keep=1)
saver.save(sess, os.path.join(test_dir, ckpt_filename),
write_state=False)
示例8: testCopyScopedGraph
# 需要導入模塊: from tensorflow.python.framework import meta_graph [as 別名]
# 或者: from tensorflow.python.framework.meta_graph import export_scoped_meta_graph [as 別名]
def testCopyScopedGraph(self):
test_dir = _TestDir("scoped_copy")
saver0_ckpt = os.path.join(test_dir, "saver0.ckpt")
graph1 = tf.Graph()
with graph1.as_default():
with tf.name_scope("hidden1"):
images = tf.constant(1.0, tf.float32, shape=[3, 2], name="images")
weights1 = tf.Variable([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]],
name="weights")
biases1 = tf.Variable([0.1] * 3, name="biases")
tf.nn.relu(tf.matmul(images, weights1) + biases1, name="relu")
# Run the graph and save scoped checkpoint.
with self.test_session(graph=graph1) as sess:
sess.run(tf.global_variables_initializer())
_, var_list_1 = meta_graph.export_scoped_meta_graph(
export_scope="hidden1")
saver = saver_module.Saver(var_list=var_list_1, max_to_keep=1)
saver.save(sess, saver0_ckpt, write_state=False)
expected = np.reshape([[5.0999999, 7.0999999, 9.10000038] * 3], (3, 3))
# Verifies copy to the same graph with the same name fails.
with graph1.as_default():
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: "need to be different" in str(e)):
meta_graph.copy_scoped_meta_graph(from_scope="hidden1",
to_scope="hidden1")
# Verifies copy to the same graph.
with graph1.as_default():
var_list_2 = meta_graph.copy_scoped_meta_graph(from_scope="hidden1",
to_scope="hidden2")
with self.test_session(graph=graph1) as sess:
saver1 = saver_module.Saver(var_list=var_list_1, max_to_keep=1)
saver1.restore(sess, saver0_ckpt)
saver2 = saver_module.Saver(var_list=var_list_2, max_to_keep=1)
saver2.restore(sess, saver0_ckpt)
self.assertAllClose(expected, sess.run("hidden1/relu:0"))
self.assertAllClose(expected, sess.run("hidden2/relu:0"))
# Verifies copy to differen graph.
graph2 = tf.Graph()
new_var_list_1 = meta_graph.copy_scoped_meta_graph(
from_scope="hidden1", to_scope="new_hidden1",
from_graph=graph1, to_graph=graph2)
with self.test_session(graph=graph2) as sess:
saver3 = saver_module.Saver(var_list=new_var_list_1, max_to_keep=1)
saver3.restore(sess, saver0_ckpt)
self.assertAllClose(expected, sess.run("new_hidden1/relu:0"))
示例9: testNoVariables
# 需要導入模塊: from tensorflow.python.framework import meta_graph [as 別名]
# 或者: from tensorflow.python.framework.meta_graph import export_scoped_meta_graph [as 別名]
def testNoVariables(self):
test_dir = _TestDir("no_variables")
filename = os.path.join(test_dir, "metafile")
input_feed_value = -10 # Arbitrary input value for feed_dict.
orig_graph = tf.Graph()
with self.test_session(graph=orig_graph) as sess:
# Create a minimal graph with zero variables.
input_tensor = tf.placeholder(tf.float32, shape=[], name="input")
offset = tf.constant(42, dtype=tf.float32, name="offset")
output_tensor = tf.add(input_tensor, offset, name="add_offset")
# Add input and output tensors to graph collections.
tf.add_to_collection("input_tensor", input_tensor)
tf.add_to_collection("output_tensor", output_tensor)
output_value = sess.run(output_tensor, {input_tensor: input_feed_value})
self.assertEqual(output_value, 32)
# Generates MetaGraphDef.
meta_graph_def, var_list = meta_graph.export_scoped_meta_graph(
filename=filename,
graph_def=tf.get_default_graph().as_graph_def(add_shapes=True),
collection_list=["input_tensor", "output_tensor"],
saver_def=None)
self.assertEqual({}, var_list)
# Create a clean graph and import the MetaGraphDef nodes.
new_graph = tf.Graph()
with self.test_session(graph=new_graph) as sess:
# Import the previously export meta graph.
meta_graph.import_scoped_meta_graph(filename)
# Re-exports the current graph state for comparison to the original.
new_meta_graph_def, _ = meta_graph.export_scoped_meta_graph(
filename + "_new")
self.assertProtoEquals(meta_graph_def, new_meta_graph_def)
# Ensures that we can still get a reference to our graph collections.
new_input_tensor = tf.get_collection("input_tensor")[0]
new_output_tensor = tf.get_collection("output_tensor")[0]
# Verifies that the new graph computes the same result as the original.
new_output_value = sess.run(
new_output_tensor, {new_input_tensor: input_feed_value})
self.assertEqual(new_output_value, output_value)
示例10: testClearDevices
# 需要導入模塊: from tensorflow.python.framework import meta_graph [as 別名]
# 或者: from tensorflow.python.framework.meta_graph import export_scoped_meta_graph [as 別名]
def testClearDevices(self):
graph1 = tf.Graph()
with graph1.as_default():
with tf.device("/device:CPU:0"):
a = tf.Variable(tf.constant(1.0, shape=[2, 2]), name="a")
with tf.device("/job:ps/replica:0/task:0/gpu:0"):
b = tf.Variable(tf.constant(2.0, shape=[2, 2]), name="b")
with tf.device("/job:localhost/replica:0/task:0/cpu:0"):
tf.matmul(a, b, name="matmul")
self.assertEqual("/device:CPU:0", str(graph1.as_graph_element("a").device))
self.assertEqual("/job:ps/replica:0/task:0/device:GPU:0",
str(graph1.as_graph_element("b").device))
self.assertEqual("/job:localhost/replica:0/task:0/device:CPU:0",
str(graph1.as_graph_element("matmul").device))
# Verifies that devices are cleared on export.
orig_meta_graph, _ = meta_graph.export_scoped_meta_graph(
graph=graph1, clear_devices=True)
graph2 = tf.Graph()
with graph2.as_default():
meta_graph.import_scoped_meta_graph(orig_meta_graph, clear_devices=False)
self.assertEqual("", str(graph2.as_graph_element("a").device))
self.assertEqual("", str(graph2.as_graph_element("b").device))
self.assertEqual("", str(graph2.as_graph_element("matmul").device))
# Verifies that devices are cleared on export when passing in graph_def.
orig_meta_graph, _ = meta_graph.export_scoped_meta_graph(
graph_def=graph1.as_graph_def(), clear_devices=True)
graph2 = tf.Graph()
with graph2.as_default():
meta_graph.import_scoped_meta_graph(orig_meta_graph, clear_devices=False)
self.assertEqual("", str(graph2.as_graph_element("a").device))
self.assertEqual("", str(graph2.as_graph_element("b").device))
self.assertEqual("", str(graph2.as_graph_element("matmul").device))
# Verifies that devices are cleared on import.
orig_meta_graph, _ = meta_graph.export_scoped_meta_graph(
graph=graph1, clear_devices=False)
graph2 = tf.Graph()
with graph2.as_default():
meta_graph.import_scoped_meta_graph(orig_meta_graph, clear_devices=True)
self.assertEqual("", str(graph2.as_graph_element("a").device))
self.assertEqual("", str(graph2.as_graph_element("b").device))
self.assertEqual("", str(graph2.as_graph_element("matmul").device))
示例11: export_meta_graph
# 需要導入模塊: from tensorflow.python.framework import meta_graph [as 別名]
# 或者: from tensorflow.python.framework.meta_graph import export_scoped_meta_graph [as 別名]
def export_meta_graph(filename=None,
meta_info_def=None,
graph_def=None,
saver_def=None,
collection_list=None,
as_text=False,
graph=None,
export_scope=None,
clear_devices=False,
clear_extraneous_savers=False,
**kwargs):
"""Returns `MetaGraphDef` proto. Optionally writes it to filename.
This function exports the graph, saver, and collection objects into
`MetaGraphDef` protocol buffer with the intention of it being imported
at a later time or location to restart training, run inference, or be
a subgraph.
Args:
filename: Optional filename including the path for writing the
generated `MetaGraphDef` protocol buffer.
meta_info_def: `MetaInfoDef` protocol buffer.
graph_def: `GraphDef` protocol buffer.
saver_def: `SaverDef` protocol buffer.
collection_list: List of string keys to collect.
as_text: If `True`, writes the `MetaGraphDef` as an ASCII proto.
graph: The `Graph` to import into. If `None`, use the default graph.
export_scope: Optional `string`. Name scope under which to extract
the subgraph. The scope name will be striped from the node definitions
for easy import later into new name scopes. If `None`, the whole graph
is exported. graph_def and export_scope cannot both be specified.
clear_devices: Whether or not to clear the device field for an `Operation`
or `Tensor` during export.
clear_extraneous_savers: Remove any Saver-related information from the
graph (both Save/Restore ops and SaverDefs) that are not associated
with the provided SaverDef.
**kwargs: Optional keyed arguments.
Returns:
A `MetaGraphDef` proto.
Raises:
ValueError: When the `GraphDef` is larger than 2GB.
"""
if context.in_eager_mode():
raise ValueError("Exporting/importing meta graphs is not supported when "
"eager execution is enabled. No graph exists when eager "
"execution is enabled.")
meta_graph_def, _ = meta_graph.export_scoped_meta_graph(
filename=filename,
meta_info_def=meta_info_def,
graph_def=graph_def,
saver_def=saver_def,
collection_list=collection_list,
as_text=as_text,
graph=graph,
export_scope=export_scope,
clear_devices=clear_devices,
clear_extraneous_savers=clear_extraneous_savers,
**kwargs)
return meta_graph_def
開發者ID:PacktPublishing,項目名稱:Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda,代碼行數:63,代碼來源:saver.py