本文整理汇总了Python中tensorflow.python.platform.gfile.FastGFile方法的典型用法代码示例。如果您正苦于以下问题:Python gfile.FastGFile方法的具体用法?Python gfile.FastGFile怎么用?Python gfile.FastGFile使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tensorflow.python.platform.gfile
的用法示例。
在下文中一共展示了gfile.FastGFile方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: load_graph
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import FastGFile [as 别名]
def load_graph(graph_path,tensorboard=False,**kwargs):
'''
:param graph_filename: the path of the pb file
:return: tensorflow graph
'''
with gfile.FastGFile(graph_path,'rb') as f:
graph_def = graph_pb2.GraphDef()
graph_def.ParseFromString(f.read())
with tf.Graph().as_default() as graph:
tf.import_graph_def(graph_def,name="")
if tensorboard:
writer = tf.summary.FileWriter("log/")
writer.add_graph(graph)
return graph
示例2: create_inception_graph
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import FastGFile [as 别名]
def create_inception_graph():
""""Creates a graph from saved GraphDef file and returns a Graph object.
Returns:
Graph holding the trained Inception network, and various tensors we'll be
manipulating.
"""
with tf.Graph().as_default() as graph:
model_filename = os.path.join(
FLAGS.model_dir, 'classify_image_graph_def.pb')
with gfile.FastGFile(model_filename, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
bottleneck_tensor, jpeg_data_tensor, resized_input_tensor = (
tf.import_graph_def(graph_def, name='', return_elements=[
BOTTLENECK_TENSOR_NAME, JPEG_DATA_TENSOR_NAME,
RESIZED_INPUT_TENSOR_NAME]))
return graph, bottleneck_tensor, jpeg_data_tensor, resized_input_tensor
示例3: create_bottleneck_file
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import FastGFile [as 别名]
def create_bottleneck_file(bottleneck_path, image_lists, label_name, index,
image_dir, category, sess, jpeg_data_tensor,
bottleneck_tensor):
"""Create a single bottleneck file."""
print('Creating bottleneck at ' + bottleneck_path)
image_path = get_image_path(image_lists, label_name, index,
image_dir, category)
if not gfile.Exists(image_path):
tf.logging.fatal('File does not exist %s', image_path)
image_data = gfile.FastGFile(image_path, 'rb').read()
try:
bottleneck_values = run_bottleneck_on_image(
sess, image_data, jpeg_data_tensor, bottleneck_tensor)
except:
raise RuntimeError('Error during processing file %s' % image_path)
bottleneck_string = ','.join(str(x) for x in bottleneck_values)
with open(bottleneck_path, 'w') as bottleneck_file:
bottleneck_file.write(bottleneck_string)
示例4: create_inception_graph
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import FastGFile [as 别名]
def create_inception_graph():
""""Creates a graph from saved GraphDef file and returns a Graph object.
Returns:
Graph holding the trained Inception network, and various tensors we'll be
manipulating.
"""
with tf.Session() as sess:
model_filename = os.path.join(
FLAGS.model_dir, 'classify_image_graph_def.pb')
with gfile.FastGFile(model_filename, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
bottleneck_tensor, jpeg_data_tensor, resized_input_tensor = (
tf.import_graph_def(graph_def, name='', return_elements=[
BOTTLENECK_TENSOR_NAME, JPEG_DATA_TENSOR_NAME,
RESIZED_INPUT_TENSOR_NAME]))
return sess.graph, bottleneck_tensor, jpeg_data_tensor, resized_input_tensor
示例5: load_model
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import FastGFile [as 别名]
def load_model(model, input_map=None):
# Check if the model is a model directory (containing a metagraph and a checkpoint file)
# or if it is a protobuf file with a frozen graph
model_exp = os.path.expanduser(model)
if (os.path.isfile(model_exp)):
print('Model filename: %s' % model_exp)
with gfile.FastGFile(model_exp,'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
tf.import_graph_def(graph_def, input_map=input_map, name='')
else:
print('Model directory: %s' % model_exp)
meta_file, ckpt_file = get_model_filenames(model_exp)
print('Metagraph file: %s' % meta_file)
print('Checkpoint file: %s' % ckpt_file)
saver = tf.train.import_meta_graph(os.path.join(model_exp, meta_file), input_map=input_map)
saver.restore(tf.get_default_session(), os.path.join(model_exp, ckpt_file))
示例6: create_model_graph
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import FastGFile [as 别名]
def create_model_graph(model_info):
""""Creates a graph from saved GraphDef file and returns a Graph object.
Args:
model_info: Dictionary containing information about the model architecture.
Returns:
Graph holding the trained Inception network, and various tensors we'll be
manipulating.
"""
with tf.Graph().as_default() as graph:
model_path = os.path.join(FLAGS.model_dir, model_info['model_file_name'])
with gfile.FastGFile(model_path, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
bottleneck_tensor, resized_input_tensor = (tf.import_graph_def(
graph_def,
name='',
return_elements=[
model_info['bottleneck_tensor_name'],
model_info['resized_input_tensor_name'],
]))
return graph, bottleneck_tensor, resized_input_tensor
示例7: create_model_graph_bis
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import FastGFile [as 别名]
def create_model_graph_bis(model_info, model_dir):
""""Creates a graph from saved GraphDef file and returns a Graph object.
Args:
model_info: Dictionary containing information about the model architecture.
model_dir: String containing where the model is saved.
Returns:
Graph holding the trained Inception network, and various tensors we'll be
manipulating.
"""
with tf.Graph().as_default() as graph:
model_path = os.path.join(model_dir, model_info['model_file_name'])
with gfile.FastGFile(model_path, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
bottleneck_tensor, resized_input_tensor = (tf.import_graph_def(
graph_def,
name='',
return_elements=[
model_info['bottleneck_tensor_name'],
model_info['resized_input_tensor_name'],
]))
return graph, bottleneck_tensor, resized_input_tensor
示例8: load_model
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import FastGFile [as 别名]
def load_model(model):
"""to load the deep learning model
Args:
model: model path
Returns:
tensorflow graph
"""
logger.info(msg="load_model called")
model_exp = os.path.expanduser(model)
if os.path.isfile(model_exp):
with gfile.FastGFile(model_exp, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
graph = tf.import_graph_def(graph_def, name='')
return graph
else:
meta_file, ckpt_file = get_model_filenames(model_exp)
saver = tf.train.import_meta_graph(os.path.join(model_exp, meta_file))
graph = saver.restore(tf.get_default_session(),
os.path.join(model_exp, ckpt_file))
return graph
示例9: load_model
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import FastGFile [as 别名]
def load_model(model, input_map=None):
logger.info(msg="load_model called")
# Check if the model is a model directory (containing a metagraph and
# a checkpoint file) or if it is a protobuf file with a frozen graph
model_exp = os.path.expanduser(model)
if (os.path.isfile(model_exp)):
print('Model filename: %s' % model_exp)
with gfile.FastGFile(model_exp, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
tf.import_graph_def(graph_def, input_map=input_map, name='')
else:
print('Model directory: %s' % model_exp)
meta_file, ckpt_file = get_model_filenames(model_exp)
print('Metagraph file: %s' % meta_file)
print('Checkpoint file: %s' % ckpt_file)
saver = tf.train.import_meta_graph(os.path.join(model_exp, meta_file),
input_map=input_map)
saver.restore(tf.get_default_session(),
os.path.join(model_exp, ckpt_file))
示例10: create_model_graph
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import FastGFile [as 别名]
def create_model_graph(model_info):
""""Creates a graph from saved GraphDef file and returns a Graph object.
Args:
model_info: Dictionary containing information about the model architecture.
Returns:
Graph holding the trained Inception network, and various tensors we'll be
manipulating.
"""
with tf.Graph().as_default() as graph:
model_path = os.path.join(FLAGS.model_dir, model_info['model_file_name'])
with gfile.FastGFile(model_path, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
bottleneck_tensor, resized_input_tensor = (tf.import_graph_def(
graph_def,
name='',
return_elements=[
model_info['bottleneck_tensor_name'],
model_info['resized_input_tensor_name'],
]))
return graph, bottleneck_tensor, resized_input_tensor
示例11: create_bottleneck_file
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import FastGFile [as 别名]
def create_bottleneck_file(bottleneck_path, image_lists, label_name, index,
image_dir, category, sess, jpeg_data_tensor,
decoded_image_tensor, resized_input_tensor,
bottleneck_tensor):
"""Create a single bottleneck file."""
tf.logging.info('Creating bottleneck at ' + bottleneck_path)
image_path = get_image_path(image_lists, label_name, index,
image_dir, category)
if not gfile.Exists(image_path):
tf.logging.fatal('File does not exist %s', image_path)
image_data = gfile.FastGFile(image_path, 'rb').read()
try:
bottleneck_values = run_bottleneck_on_image(
sess, image_data, jpeg_data_tensor, decoded_image_tensor,
resized_input_tensor, bottleneck_tensor)
except Exception as e:
raise RuntimeError('Error during processing file %s (%s)' % (image_path,
str(e)))
bottleneck_string = ','.join(str(x) for x in bottleneck_values)
with open(bottleneck_path, 'w') as bottleneck_file:
bottleneck_file.write(bottleneck_string)
示例12: load_model
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import FastGFile [as 别名]
def load_model(model):
# Check if the model is a model directory (containing a metagraph and a checkpoint file)
# or if it is a protobuf file with a frozen graph
model_exp = os.path.expanduser(model)
if (os.path.isfile(model_exp)):
print('Model filename: %s' % model_exp)
with gfile.FastGFile(model_exp,'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
tf.import_graph_def(graph_def, name='')
else:
print('Model directory: %s' % model_exp)
meta_file, ckpt_file = get_model_filenames(model_exp)
print('Metagraph file: %s' % meta_file)
print('Checkpoint file: %s' % ckpt_file)
saver = tf.train.import_meta_graph(os.path.join(model_exp, meta_file))
saver.restore(tf.get_default_session(), os.path.join(model_exp, ckpt_file))
示例13: create_tfevent_from_pb
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import FastGFile [as 别名]
def create_tfevent_from_pb(model,optimized=False):
print("> creating tfevent of model: {}".format(model))
if optimized:
model_path=ROOT_DIR+'/models/{}/optimized_inference_graph.pb'.format(model)
log_dir=ROOT_DIR+'/models/{}/log_opt/'.format(model)
else:
model_path=ROOT_DIR+'/models/{}/frozen_inference_graph.pb'.format(model)
log_dir=ROOT_DIR+'/models/{}/log/'.format(model)
with session.Session(graph=ops.Graph()) as sess:
with gfile.FastGFile(model_path, "rb") as f:
graph_def = graph_pb2.GraphDef()
graph_def.ParseFromString(f.read())
importer.import_graph_def(graph_def)
pb_visual_writer = summary.FileWriter(log_dir)
pb_visual_writer.add_graph(sess.graph)
print("> Model {} Imported. \nVisualize by running: \
tensorboard --logdir={}".format(model_path, log_dir))
# Gather all Model Names in models/
示例14: create_model_graph
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import FastGFile [as 别名]
def create_model_graph(model_info):
""""Creates a graph from saved GraphDef file and returns a Graph object.
Args:
model_info: Dictionary containing information about the model architecture.
Returns:
Graph holding the trained Inception network, and various tensors we'll be
manipulating.
"""
with tf.Graph().as_default() as graph:
model_path = os.path.join(FLAGS.model_dir, model_info['model_file_name'])
print('Model path: ', model_path)
with gfile.FastGFile(model_path, 'rb') as f:
graph_def = tf.compat.v1.GraphDef()
graph_def.ParseFromString(f.read())
bottleneck_tensor, resized_input_tensor = (tf.import_graph_def(
graph_def,
name='',
return_elements=[
model_info['bottleneck_tensor_name'],
model_info['resized_input_tensor_name'],
]))
return graph, bottleneck_tensor, resized_input_tensor
示例15: RewriteContext
# 需要导入模块: from tensorflow.python.platform import gfile [as 别名]
# 或者: from tensorflow.python.platform.gfile import FastGFile [as 别名]
def RewriteContext():
context = task_spec_pb2.TaskSpec()
with gfile.FastGFile(FLAGS.task_context, 'rb') as fin:
text_format.Merge(fin.read(), context)
for resource in context.input:
if resource.creator == StageName():
del resource.part[:]
part = resource.part.add()
part.file_pattern = os.path.join(OutputPath(resource.name))
with gfile.FastGFile(OutputPath('context'), 'w') as fout:
fout.write(str(context))