當前位置: 首頁>>代碼示例>>Python>>正文


Python saver.export_meta_graph方法代碼示例

本文整理匯總了Python中tensorflow.python.training.saver.export_meta_graph方法的典型用法代碼示例。如果您正苦於以下問題:Python saver.export_meta_graph方法的具體用法?Python saver.export_meta_graph怎麽用?Python saver.export_meta_graph使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在tensorflow.python.training.saver的用法示例。


在下文中一共展示了saver.export_meta_graph方法的4個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: setUp

# 需要導入模塊: from tensorflow.python.training import saver [as 別名]
# 或者: from tensorflow.python.training.saver import export_meta_graph [as 別名]
def setUp(self):
    self.base_path = os.path.join(test.get_temp_dir(), "no_vars")
    if not os.path.exists(self.base_path):
      os.mkdir(self.base_path)

    # Create a simple graph with a variable, then convert variables to
    # constants and export the graph.
    with ops.Graph().as_default() as g:
      x = array_ops.placeholder(dtypes.float32, name="x")
      w = variables.Variable(3.0)
      y = math_ops.subtract(w * x, 7.0, name="y")  # pylint: disable=unused-variable
      ops.add_to_collection("meta", "this is meta")

      with self.test_session(graph=g) as session:
        variables.global_variables_initializer().run()
        new_graph_def = graph_util.convert_variables_to_constants(
            session, g.as_graph_def(), ["y"])

      filename = os.path.join(self.base_path, constants.META_GRAPH_DEF_FILENAME)
      saver.export_meta_graph(
          filename, graph_def=new_graph_def, collection_list=["meta"]) 
開發者ID:abhisuri97,項目名稱:auto-alt-text-lambda-api,代碼行數:23,代碼來源:session_bundle_test.py

示例2: testGraphFromMetaGraphBecomesAvailable

# 需要導入模塊: from tensorflow.python.training import saver [as 別名]
# 或者: from tensorflow.python.training.saver import export_meta_graph [as 別名]
def testGraphFromMetaGraphBecomesAvailable(self):
    """Test accumulator by writing values and then reading them."""

    directory = os.path.join(self.get_temp_dir(), 'metagraph_test_values_dir')
    if gfile.IsDirectory(directory):
      gfile.DeleteRecursively(directory)
    gfile.MkDir(directory)

    writer = tf.train.SummaryWriter(directory, max_queue=100)

    with tf.Graph().as_default() as graph:
      _ = tf.constant([2.0, 1.0])
    # Add a graph to the summary writer.
    meta_graph_def = saver.export_meta_graph(
        graph_def=graph.as_graph_def(add_shapes=True))
    writer.add_meta_graph(meta_graph_def)

    writer.flush()

    # Verify that we can load those events properly
    acc = ea.EventAccumulator(directory)
    acc.Reload()
    self.assertTagsEqual(
        acc.Tags(),
        {
            ea.IMAGES: [],
            ea.AUDIO: [],
            ea.SCALARS: [],
            ea.HISTOGRAMS: [],
            ea.COMPRESSED_HISTOGRAMS: [],
            ea.GRAPH: True,
            ea.META_GRAPH: True,
            ea.RUN_METADATA: []
        })
    self.assertProtoEquals(graph.as_graph_def(add_shapes=True), acc.Graph())
    self.assertProtoEquals(meta_graph_def, acc.MetaGraph()) 
開發者ID:tobegit3hub,項目名稱:deep_image_model,代碼行數:38,代碼來源:event_accumulator_test.py

示例3: _run_tf_optimizer

# 需要導入模塊: from tensorflow.python.training import saver [as 別名]
# 或者: from tensorflow.python.training.saver import export_meta_graph [as 別名]
def _run_tf_optimizer(config: ConfigProto,
                      graph: tf.Graph,
                      signature_def: SignatureDef) -> GraphDef:
    """Run the TF optimizer ("grappler") on a graph"""
    graph_def = graph.as_graph_def()
    meta_graph = export_meta_graph(graph_def=graph_def, graph=graph)
    meta_graph.signature_def['not_used_key'].CopyFrom(signature_def)
    return tf_optimizer.OptimizeGraph(config, meta_graph) 
開發者ID:patlevin,項目名稱:tfjs-to-tf,代碼行數:10,代碼來源:optimization.py

示例4: _run_inline_graph_optimization

# 需要導入模塊: from tensorflow.python.training import saver [as 別名]
# 或者: from tensorflow.python.training.saver import export_meta_graph [as 別名]
def _run_inline_graph_optimization(func, lower_control_flow):
  """Apply function inline optimization to the graph.

  Returns the GraphDef after Grappler's function inlining optimization is
  applied. This optimization does not work on models with control flow.

  Args:
    func: ConcreteFunction.
    lower_control_flow: Boolean indicating whether or not to lower control flow
      ops such as If and While. (default True)

  Returns:
    GraphDef
  """
  graph_def = func.graph.as_graph_def()
  if not lower_control_flow:
    graph_def = disable_lower_using_switch_merge(graph_def)

  # In some cases, a secondary implementation of the function (e.g. for GPU) is
  # written to the "api_implements" attribute. (e.g. `tf.keras.layers.LSTM` in
  # TF2 produces a CuDNN-based RNN for GPU).
  # This function suppose to inline all functions calls, but "api_implements"
  # prevents this from happening. Removing the attribute solves the problem.
  # To learn more about "api_implements", see:
  #   tensorflow/core/grappler/optimizers/implementation_selector.h
  for function in graph_def.library.function:
    if "api_implements" in function.attr:
      del function.attr["api_implements"]

  meta_graph = export_meta_graph(graph_def=graph_def, graph=func.graph)

  # Clear the initializer_name for the variables collections, since they are not
  # needed after saved to saved_model.
  for name in [
      "variables", "model_variables", "trainable_variables", "local_variables"
  ]:
    raw_list = []
    for raw in meta_graph.collection_def["variables"].bytes_list.value:
      variable = variable_pb2.VariableDef()
      variable.ParseFromString(raw)
      variable.ClearField("initializer_name")
      raw_list.append(variable.SerializeToString())
    meta_graph.collection_def[name].bytes_list.value[:] = raw_list

  # Add a collection 'train_op' so that Grappler knows the outputs.
  fetch_collection = meta_graph_pb2.CollectionDef()
  for array in func.inputs + func.outputs:
    fetch_collection.node_list.value.append(array.name)
  meta_graph.collection_def["train_op"].CopyFrom(fetch_collection)

  # Initialize RewriterConfig with everything disabled except function inlining.
  config = config_pb2.ConfigProto()
  rewrite_options = config.graph_options.rewrite_options
  rewrite_options.min_graph_nodes = -1  # do not skip small graphs
  rewrite_options.optimizers.append("function")
  return tf_optimizer.OptimizeGraph(config, meta_graph) 
開發者ID:onnx,項目名稱:keras-onnx,代碼行數:58,代碼來源:_graph_cvt.py


注:本文中的tensorflow.python.training.saver.export_meta_graph方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。