本文整理匯總了Python中tensorflow.python.saved_model.tag_constants.SERVING屬性的典型用法代碼示例。如果您正苦於以下問題:Python tag_constants.SERVING屬性的具體用法?Python tag_constants.SERVING怎麽用?Python tag_constants.SERVING使用的例子?那麽, 這裏精選的屬性代碼示例或許可以為您提供幫助。您也可以進一步了解該屬性所在類tensorflow.python.saved_model.tag_constants
的用法示例。
在下文中一共展示了tag_constants.SERVING屬性的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: export_h5_to_pb
# 需要導入模塊: from tensorflow.python.saved_model import tag_constants [as 別名]
# 或者: from tensorflow.python.saved_model.tag_constants import SERVING [as 別名]
def export_h5_to_pb(path_to_h5, export_path):
# Set the learning phase to Test since the model is already trained.
K.set_learning_phase(0)
# Load the Keras model
keras_model = load_model(path_to_h5)
# Build the Protocol Buffer SavedModel at 'export_path'
builder = saved_model_builder.SavedModelBuilder(export_path)
# Create prediction signature to be used by TensorFlow Serving Predict API
signature = predict_signature_def(inputs={"images": keras_model.input},
outputs={"scores": keras_model.output})
with K.get_session() as sess:
# Save the meta graph and the variables
builder.add_meta_graph_and_variables(sess=sess, tags=[tag_constants.SERVING],
signature_def_map={"predict": signature})
builder.save()
示例2: __init__
# 需要導入模塊: from tensorflow.python.saved_model import tag_constants [as 別名]
# 或者: from tensorflow.python.saved_model.tag_constants import SERVING [as 別名]
def __init__(self):
model_path = os.environ.get('MODEL_PATH', '/model')
self.sess = tf.Session(graph=tf.Graph())
saved_metagraphdef = tf.saved_model.loader.load(self.sess,
[tag_constants.SERVING], model_path)
self.inputs_tensor_info = signature_def_utils.get_signature_def_by_key(
saved_metagraphdef,
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY).inputs
outputs_tensor_info = signature_def_utils.get_signature_def_by_key(
saved_metagraphdef,
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY).outputs
self.output_tensor_keys_sorted = sorted(outputs_tensor_info.keys())
self.output_tensor_names_sorted = [
outputs_tensor_info[tensor_key].name
for tensor_key in self.output_tensor_keys_sorted
]
示例3: export
# 需要導入模塊: from tensorflow.python.saved_model import tag_constants [as 別名]
# 或者: from tensorflow.python.saved_model.tag_constants import SERVING [as 別名]
def export(self, last_checkpoint, output_dir):
"""Builds a prediction graph and xports the model.
Args:
last_checkpoint: Path to the latest checkpoint file from training.
output_dir: Path to the folder to be used to output the model.
"""
logging.info('Exporting prediction graph to %s', output_dir)
with tf.Session(graph=tf.Graph()) as sess:
# Build and save prediction meta graph and trained variable values.
inputs, outputs = self.build_prediction_graph()
init_op = tf.global_variables_initializer()
sess.run(init_op)
self.restore_from_checkpoint(sess, self.inception_checkpoint_file,
last_checkpoint)
signature_def = build_signature(inputs=inputs, outputs=outputs)
signature_def_map = {
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: signature_def
}
builder = saved_model_builder.SavedModelBuilder(output_dir)
builder.add_meta_graph_and_variables(
sess,
tags=[tag_constants.SERVING],
signature_def_map=signature_def_map)
builder.save()
示例4: export_model
# 需要導入模塊: from tensorflow.python.saved_model import tag_constants [as 別名]
# 或者: from tensorflow.python.saved_model.tag_constants import SERVING [as 別名]
def export_model(self, model_dir, global_step_val, last_checkpoint):
"""Exports the model so that it can used for batch predictions."""
with self.graph.as_default():
with tf.Session() as session:
session.run(tf.global_variables_initializer())
self.saver.restore(session, last_checkpoint)
signature = signature_def_utils.build_signature_def(
inputs=self.inputs,
outputs=self.outputs,
method_name=signature_constants.PREDICT_METHOD_NAME)
signature_map = {signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
signature}
model_builder = saved_model_builder.SavedModelBuilder(model_dir)
model_builder.add_meta_graph_and_variables(session,
tags=[tag_constants.SERVING],
signature_def_map=signature_map,
clear_devices=True)
model_builder.save()
示例5: to_savedmodel
# 需要導入模塊: from tensorflow.python.saved_model import tag_constants [as 別名]
# 或者: from tensorflow.python.saved_model.tag_constants import SERVING [as 別名]
def to_savedmodel(model, export_path):
"""Convert the Keras HDF5 model into TensorFlow SavedModel."""
builder = saved_model_builder.SavedModelBuilder(export_path)
signature = predict_signature_def(
inputs={'input': model.inputs[0]}, outputs={'income': model.outputs[0]})
with K.get_session() as sess:
builder.add_meta_graph_and_variables(
sess=sess,
tags=[tag_constants.SERVING],
signature_def_map={
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: signature
})
builder.save()
示例6: testSavedModelBasic
# 需要導入模塊: from tensorflow.python.saved_model import tag_constants [as 別名]
# 或者: from tensorflow.python.saved_model.tag_constants import SERVING [as 別名]
def testSavedModelBasic(self):
base_path = test.test_src_dir_path(SAVED_MODEL_PATH)
ops.reset_default_graph()
sess, meta_graph_def = (
bundle_shim.load_session_bundle_or_saved_model_bundle_from_path(
base_path,
tags=[tag_constants.SERVING],
target="",
config=config_pb2.ConfigProto(device_count={"CPU": 2})))
self.assertTrue(sess)
# Check basic signature def property.
signature_def = meta_graph_def.signature_def
self.assertEqual(len(signature_def), 2)
self.assertEqual(
signature_def[signature_constants.REGRESS_METHOD_NAME].method_name,
signature_constants.REGRESS_METHOD_NAME)
signature = signature_def["tensorflow/serving/regress"]
asset_path = os.path.join(base_path, saved_model_constants.ASSETS_DIRECTORY)
with sess.as_default():
output1 = sess.run(["filename_tensor:0"])
self.assertEqual(["foo.txt"], output1)
示例7: main
# 需要導入模塊: from tensorflow.python.saved_model import tag_constants [as 別名]
# 或者: from tensorflow.python.saved_model.tag_constants import SERVING [as 別名]
def main(base_model_name, weights_file, export_path):
# Load model and weights
nima = Nima(base_model_name, weights=None)
nima.build()
nima.nima_model.load_weights(weights_file)
# Tell keras that this will be used for making predictions
K.set_learning_phase(0)
# CustomObject required by MobileNet
with CustomObjectScope({'relu6': relu6, 'DepthwiseConv2D': DepthwiseConv2D}):
builder = saved_model_builder.SavedModelBuilder(export_path)
signature = predict_signature_def(
inputs={'input_image': nima.nima_model.input},
outputs={'quality_prediction': nima.nima_model.output}
)
builder.add_meta_graph_and_variables(
sess=K.get_session(),
tags=[tag_constants.SERVING],
signature_def_map={'image_quality': signature}
)
builder.save()
print(f'TF model exported to: {export_path}')
示例8: export_model
# 需要導入模塊: from tensorflow.python.saved_model import tag_constants [as 別名]
# 或者: from tensorflow.python.saved_model.tag_constants import SERVING [as 別名]
def export_model(self, model_dir, global_step_val, last_checkpoint):
"""Exports the model so that it can used for batch predictions."""
with self.graph.as_default():
with tf.Session() as session:
session.run(tf.global_variables_initializer())
self.saver.restore(session, last_checkpoint)
signature = signature_def_utils.build_signature_def(
inputs=self.inputs,
outputs=self.outputs,
method_name=signature_constants.PREDICT_METHOD_NAME)
signature_map = {
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: signature
}
model_builder = saved_model_builder.SavedModelBuilder(model_dir)
model_builder.add_meta_graph_and_variables(
session,
tags=[tag_constants.SERVING],
signature_def_map=signature_map,
clear_devices=True)
model_builder.save()
示例9: _store_tf
# 需要導入模塊: from tensorflow.python.saved_model import tag_constants [as 別名]
# 或者: from tensorflow.python.saved_model.tag_constants import SERVING [as 別名]
def _store_tf(self, name, session):
json_model_file = open(os.path.join(self.model_path, name + '.json'), "r").read()
loaded_model = model_from_json(json_model_file)
loaded_model.load_weights(os.path.join(self.model_path, name + '.h5'))
builder = saved_model_builder.SavedModelBuilder(os.path.join(self.model_path, 'tf.txt'))
signature = predict_signature_def(inputs={'states': loaded_model.input},
outputs={'price': loaded_model.output})
builder.add_meta_graph_and_variables(sess=session,
tags=[tag_constants.SERVING],
signature_def_map={'helpers': signature})
builder.save()
_logger.info("Saved tf.txt model to disk")
示例10: export_model
# 需要導入模塊: from tensorflow.python.saved_model import tag_constants [as 別名]
# 或者: from tensorflow.python.saved_model.tag_constants import SERVING [as 別名]
def export_model(self, model_dir, global_step_val, last_checkpoint):
"""Exports the model so that it can used for batch predictions."""
with self.graph.as_default():
with tf.Session() as session:
session.run(tf.global_variables_initializer())
self.saver.restore(session, last_checkpoint)
signature = signature_def_utils.build_signature_def(
inputs=self.inputs,
outputs=self.outputs,
method_name=signature_constants.PREDICT_METHOD_NAME)
signature_map = {signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
signature}
model_builder = saved_model_builder.SavedModelBuilder(model_dir)
model_builder.add_meta_graph_and_variables(session,
tags=[tag_constants.SERVING],
signature_def_map=signature_map,
clear_devices=True)
model_builder.save()
示例11: __init__
# 需要導入模塊: from tensorflow.python.saved_model import tag_constants [as 別名]
# 或者: from tensorflow.python.saved_model.tag_constants import SERVING [as 別名]
def __init__(self, name, export_dir):
"""Init Tensorflow Go Engine
Args:
name (str): name of entity
export_dir (str): path to exported tensorflow model
"""
assert os.path.isdir(export_dir)
# we will use gnugo to guide our network
self.assistant = GnuGoEngine('assistant', verbose=False)
# create session and load network
self.sess = tf.Session(graph=tf.Graph(), config=tf.ConfigProto(allow_soft_placement=True))
tf.saved_model.loader.load(self.sess, [tag_constants.SERVING], export_dir)
# get input node, output node
self.features = self.sess.graph.get_tensor_by_name('board_plhdr:0')
self.prob = self.sess.graph.get_tensor_by_name('probabilities:0')
示例12: save_signature
# 需要導入模塊: from tensorflow.python.saved_model import tag_constants [as 別名]
# 或者: from tensorflow.python.saved_model.tag_constants import SERVING [as 別名]
def save_signature(self, directory):
signature = signature_def_utils.build_signature_def(
inputs={
'input':
saved_model_utils.build_tensor_info(self.input),
'dropout_rate':
saved_model_utils.build_tensor_info(self.dropout_rate)
},
outputs={
'output': saved_model_utils.build_tensor_info(self.output)
},
method_name=signature_constants.PREDICT_METHOD_NAME)
signature_map = {
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: signature
}
model_builder = saved_model_builder.SavedModelBuilder(directory)
model_builder.add_meta_graph_and_variables(
self.sess,
tags=[tag_constants.SERVING],
signature_def_map=signature_map,
clear_devices=True)
model_builder.save(as_text=False)
示例13: freeze_model
# 需要導入模塊: from tensorflow.python.saved_model import tag_constants [as 別名]
# 或者: from tensorflow.python.saved_model.tag_constants import SERVING [as 別名]
def freeze_model(saved_model_dir, output_node_names, output_filename):
output_graph_filename = os.path.join(saved_model_dir, output_filename)
initializer_nodes = ''
freeze_graph.freeze_graph(
input_saved_model_dir=saved_model_dir,
output_graph=output_graph_filename,
saved_model_tags = tag_constants.SERVING,
output_node_names=output_node_names,
initializer_nodes=initializer_nodes,
input_graph=None,
input_saver=False,
input_binary=False,
input_checkpoint=None,
restore_op_name=None,
filename_tensor_name=None,
clear_devices=True,
input_meta_graph=False,
)
示例14: export_h5_to_pb
# 需要導入模塊: from tensorflow.python.saved_model import tag_constants [as 別名]
# 或者: from tensorflow.python.saved_model.tag_constants import SERVING [as 別名]
def export_h5_to_pb(path_to_h5, export_path):
# Set the learning phase to Test since the model is already trained.
K.set_learning_phase(0)
# Load the Keras model
keras_model = load_model(path_to_h5)
# Build the Protocol Buffer SavedModel at 'export_path'
builder = saved_model_builder.SavedModelBuilder(export_path)
# Create prediction signature to be used by TensorFlow Serving Predict API
signature = predict_signature_def(inputs={ "http": keras_model.input},
outputs={"probability": keras_model.output})
with K.get_session() as sess:
# Save the meta graph and the variables
builder.add_meta_graph_and_variables(sess=sess, tags=[tag_constants.SERVING],
signature_def_map={"predict": signature})
builder.save()
示例15: export_model
# 需要導入模塊: from tensorflow.python.saved_model import tag_constants [as 別名]
# 或者: from tensorflow.python.saved_model.tag_constants import SERVING [as 別名]
def export_model(self, model_dir, global_step_val, last_checkpoint):
"""Exports the model so that it can used for batch predictions."""
with self.graph.as_default():
with tf.Session() as session:
session.run(tf.global_variables_initializer())
self.saver.restore(session, last_checkpoint)
signature = signature_def_utils.build_signature_def(
inputs=self.inputs,
outputs=self.outputs,
method_name=signature_constants.PREDICT_METHOD_NAME)
signature_map = {signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
signature}
model_builder = saved_model_builder.SavedModelBuilder(model_dir)
model_builder.add_meta_graph_and_variables(session,
tags=[tag_constants.SERVING],
signature_def_map=signature_map,
clear_devices=True)
model_builder.save()