本文整理匯總了Python中tensorflow.compat.v1.tables_initializer方法的典型用法代碼示例。如果您正苦於以下問題:Python v1.tables_initializer方法的具體用法?Python v1.tables_initializer怎麽用?Python v1.tables_initializer使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類tensorflow.compat.v1
的用法示例。
在下文中一共展示了v1.tables_initializer方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: testSingleElement
# 需要導入模塊: from tensorflow.compat import v1 [as 別名]
# 或者: from tensorflow.compat.v1 import tables_initializer [as 別名]
def testSingleElement(self):
with tf.Session() as sess:
with tempfile.NamedTemporaryFile(mode='w+t', delete=False) as vocab:
vocab.writelines([word + '\n' for word in self.vocab])
vocab.flush()
preprocessing_fn = utils.metrics_preprocessing_fn(
vocab.name, 'text_a', 'lang')
outputs = preprocessing_fn(self.raw_data)
tf.tables_initializer().run()
outputs = sess.run(outputs)
self.assertEqual(outputs['lang'], 'en')
self.assertEqual(outputs['num_non_unk_wordpieces'], 7)
self.assertEqual(outputs['num_preserved_chars'], 20)
self.assertEqual(outputs['num_dropped_chars'], 3)
self.assertSequenceAlmostEqual(outputs['wordpieces'].values,
self.expected_wordpieces)
示例2: test_string_to_int_mapper
# 需要導入模塊: from tensorflow.compat import v1 [as 別名]
# 或者: from tensorflow.compat.v1 import tables_initializer [as 別名]
def test_string_to_int_mapper(self):
with tf.Graph().as_default():
dataset = tf.data.Dataset.from_tensor_slices({
"s": [["a", "b"], ["c", "d"]]
})
dataset = dataset.map(dataset_utils.string_to_int_mapper(
["s"], ["a", "c"]))
dataset = dataset.batch(2)
self.assertDictEqual(dataset.output_types, {"s": tf.string,
"s_id": tf.int32})
iterator = dataset.make_initializable_iterator()
features = iterator.get_next()
with tf.Session() as sess:
sess.run([tf.tables_initializer(), iterator.initializer])
tf_s, tf_s_id = sess.run([features["s"], features["s_id"]])
self.assertAllEqual(tf_s, [["a", "b"], ["c", "d"]])
self.assertAllEqual(tf_s_id, [[0, 2], [1, 2]])
示例3: testExportTokenEmbeddingModule
# 需要導入模塊: from tensorflow.compat import v1 [as 別名]
# 或者: from tensorflow.compat.v1 import tables_initializer [as 別名]
def testExportTokenEmbeddingModule(self):
export.export_module_from_file(
embedding_file=self._embedding_file_path,
export_path=self.get_temp_dir(),
parse_line_fn=export.parse_line,
num_oov_buckets=1,
preprocess_text=False)
with tf.Graph().as_default():
hub_module = hub.Module(self.get_temp_dir())
tokens = tf.constant(["cat", "lizard", "dog"])
embeddings = hub_module(tokens)
with tf.Session() as session:
session.run(tf.tables_initializer())
session.run(tf.global_variables_initializer())
self.assertAllClose(
session.run(embeddings),
[[1.11, 2.56, 3.45], [0.0, 0.0, 0.0], [1.0, 2.0, 3.0]])
示例4: testExportFulltextEmbeddingModule
# 需要導入模塊: from tensorflow.compat import v1 [as 別名]
# 或者: from tensorflow.compat.v1 import tables_initializer [as 別名]
def testExportFulltextEmbeddingModule(self):
export.export_module_from_file(
embedding_file=self._embedding_file_path,
export_path=self.get_temp_dir(),
parse_line_fn=export.parse_line,
num_oov_buckets=1,
preprocess_text=True)
with tf.Graph().as_default():
hub_module = hub.Module(self.get_temp_dir())
tokens = tf.constant(["cat", "cat cat", "lizard. dog", "cat? dog", ""])
embeddings = hub_module(tokens)
with tf.Session() as session:
session.run(tf.tables_initializer())
session.run(tf.global_variables_initializer())
self.assertAllClose(
session.run(embeddings),
[[1.11, 2.56, 3.45], [1.57, 3.62, 4.88], [0.70, 1.41, 2.12],
[1.49, 3.22, 4.56], [0.0, 0.0, 0.0]],
rtol=0.02)
示例5: testEmptyInput
# 需要導入模塊: from tensorflow.compat import v1 [as 別名]
# 或者: from tensorflow.compat.v1 import tables_initializer [as 別名]
def testEmptyInput(self):
export.export_module_from_file(
embedding_file=self._embedding_file_path,
export_path=self.get_temp_dir(),
parse_line_fn=export.parse_line,
num_oov_buckets=1,
preprocess_text=True)
with tf.Graph().as_default():
hub_module = hub.Module(self.get_temp_dir())
tokens = tf.constant(["", "", ""])
embeddings = hub_module(tokens)
with tf.Session() as session:
session.run(tf.tables_initializer())
session.run(tf.global_variables_initializer())
self.assertAllClose(
session.run(embeddings),
[[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]],
rtol=0.02)
示例6: testEmptyLeading
# 需要導入模塊: from tensorflow.compat import v1 [as 別名]
# 或者: from tensorflow.compat.v1 import tables_initializer [as 別名]
def testEmptyLeading(self):
export.export_module_from_file(
embedding_file=self._embedding_file_path,
export_path=self.get_temp_dir(),
parse_line_fn=export.parse_line,
num_oov_buckets=1,
preprocess_text=True)
with tf.Graph().as_default():
hub_module = hub.Module(self.get_temp_dir())
tokens = tf.constant(["", "cat dog"])
embeddings = hub_module(tokens)
with tf.Session() as session:
session.run(tf.tables_initializer())
session.run(tf.global_variables_initializer())
self.assertAllClose(
session.run(embeddings),
[[0.0, 0.0, 0.0], [1.49, 3.22, 4.56]],
rtol=0.02)
示例7: _build_eval_graph
# 需要導入模塊: from tensorflow.compat import v1 [as 別名]
# 或者: from tensorflow.compat.v1 import tables_initializer [as 別名]
def _build_eval_graph(self, scope_name=None):
"""Build the evaluation graph.
Args:
scope_name: String to filter what summaries are collected. Only summary
ops whose name contains `scope_name` will be added, which is useful for
only including evaluation ops.
Returns:
A GraphInfo named_tuple containing various useful ops and tensors of the
evaluation grpah.
"""
with self._do_eval():
input_producer_op, enqueue_ops, fetches = self._build_model()
local_var_init_op = tf.local_variables_initializer()
table_init_ops = tf.tables_initializer()
variable_mgr_init_ops = [local_var_init_op]
if table_init_ops:
variable_mgr_init_ops.extend([table_init_ops])
with tf.control_dependencies([local_var_init_op]):
variable_mgr_init_ops.extend(self.variable_mgr.get_post_init_ops())
local_var_init_op_group = tf.group(*variable_mgr_init_ops)
summary_op = tf.summary.merge_all(scope=scope_name)
# The eval graph has no execution barrier because it doesn't run in
# distributed mode.
execution_barrier = None
# We do not use the global step during evaluation.
global_step = None
return GraphInfo(input_producer_op, enqueue_ops, fetches,
execution_barrier, global_step, local_var_init_op_group,
summary_op)
# TODO(reedwm): For consistency, we should have a similar
# "_initialize_train_graph" function. They can likely be the same function.
示例8: compute_data_mean_and_std
# 需要導入模塊: from tensorflow.compat import v1 [as 別名]
# 或者: from tensorflow.compat.v1 import tables_initializer [as 別名]
def compute_data_mean_and_std(data, axis, num_samples):
"""Computes data mean and std."""
with tf.Session() as sess:
sess.run([
tf.global_variables_initializer(),
tf.local_variables_initializer(),
tf.tables_initializer()
])
with tf_slim.queues.QueueRunners(sess):
data_value = np.concatenate(
[sess.run(data) for _ in range(num_samples)], axis=0)
mean = np.mean(data_value, axis=tuple(axis), keepdims=True)
std = np.std(data_value, axis=tuple(axis), keepdims=True)
return mean, std
示例9: __init__
# 需要導入模塊: from tensorflow.compat import v1 [as 別名]
# 或者: from tensorflow.compat.v1 import tables_initializer [as 別名]
def __init__(self, config_dict, model_location, max_padded_length=0,
num_perturbations=0):
self.graph_tensor_producer = robust_model.RobustModel(**config_dict)
self.batch_size = self.graph_tensor_producer.batch_size
if max_padded_length:
self.graph_tensor_producer.config.max_padded_length = max_padded_length
if num_perturbations:
self.graph_tensor_producer.config.num_perturbations = num_perturbations
self.graph_tensors = self.graph_tensor_producer()
network_saver = tf.train.Saver(self.graph_tensor_producer.variables)
self.open_session = tf.Session()
self.open_session.run(tf.tables_initializer())
network_saver.restore(self.open_session, model_location)
示例10: testLargerBatchSize
# 需要導入模塊: from tensorflow.compat import v1 [as 別名]
# 或者: from tensorflow.compat.v1 import tables_initializer [as 別名]
def testLargerBatchSize(self):
with tf.Session() as sess:
with tempfile.NamedTemporaryFile(mode='w+t', delete=False) as vocab:
raw_data = {
'label': ['1', '2'],
'text_a': ['The boy jumped into the air.', 'The cat sat on a hat.'],
'lang': ['en', 'en'],
}
expected_wordpieces = ['The', '[UNK]', 'jumped', 'in', '##to', 'the',
'air', '.', 'The', 'cat', 'sat', 'on', 'a', 'h',
'##at', '.']
vocab.writelines([word + '\n' for word in self.vocab])
vocab.flush()
preprocessing_fn = utils.metrics_preprocessing_fn(
vocab.name, 'text_a', 'lang')
outputs = preprocessing_fn(raw_data)
tf.tables_initializer().run()
outputs = sess.run(outputs)
self.assertSequenceAlmostEqual(outputs['lang'], ['en', 'en'])
self.assertSequenceAlmostEqual(outputs['num_preserved_chars'], [20, 16])
self.assertSequenceAlmostEqual(outputs['num_dropped_chars'], [3, 0])
self.assertSequenceAlmostEqual(outputs['wordpieces'].values,
expected_wordpieces)
self.assertSequenceAlmostEqual(outputs['num_non_unk_wordpieces'],
[7, 8])
示例11: evaluate
# 需要導入模塊: from tensorflow.compat import v1 [as 別名]
# 或者: from tensorflow.compat.v1 import tables_initializer [as 別名]
def evaluate():
"""Evaluate a model on the dev set."""
sess = tf.Session()
tf.logging.info("Building graph...")
embeddings = load_embeddings()
tf_data = load_batched_dataset(False, embeddings)
it = tf_data.make_initializable_iterator()
features, labels = it.get_next()
logits = predict(False, embeddings, features["premise"],
features["hypothesis"])
accuracy, update_ops = tf.metrics.accuracy(
tf.argmax(logits, 1, output_type=tf.int32), tf.to_int32(labels))
tf.logging.info("Running initializers...")
checkpoint_file = FLAGS.checkpoint_file
if checkpoint_file is not None:
saver = tf.train.Saver(tf.trainable_variables())
tf.logging.info("Restoring from checkpoint: " + checkpoint_file)
saver.restore(sess, checkpoint_file)
else:
tf.logging.warning("No checkpoint given, evaling model with random weights")
sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer())
sess.run(tf.tables_initializer())
sess.run(it.initializer)
tf.logging.info("Starting loop....")
while True:
try:
sess.run(update_ops)
except tf.errors.OutOfRangeError:
break
tf.logging.info("Done")
accuracy = sess.run(accuracy)
print("Accuracy: %f" % accuracy)
示例12: execute_tpu_tf1
# 需要導入模塊: from tensorflow.compat import v1 [as 別名]
# 或者: from tensorflow.compat.v1 import tables_initializer [as 別名]
def execute_tpu_tf1(self, compute_fn, inputs, graph=None):
"""Executes compute_fn on TPU with Tensorflow 1.X.
Args:
compute_fn: a function containing Tensorflow computation that takes a list
of input numpy tensors, performs computation and returns output numpy
tensors.
inputs: a list of numpy arrays to feed input to the `compute_fn`.
graph: (optional) If not None, provided `graph` is used for computation
instead of a brand new tf.Graph().
Returns:
A list of numpy arrays or a single numpy array.
"""
with self.session(graph=(graph or tf.Graph())) as sess:
placeholders = [tf.placeholder_with_default(v, v.shape) for v in inputs]
def wrap_graph_fn(*args, **kwargs):
results = compute_fn(*args, **kwargs)
if (not (isinstance(results, dict) or isinstance(results, tf.Tensor))
and hasattr(results, '__iter__')):
results = list(results)
return results
tpu_computation = contrib_tpu.rewrite(wrap_graph_fn, placeholders)
sess.run(contrib_tpu.initialize_system())
sess.run([tf.global_variables_initializer(), tf.tables_initializer(),
tf.local_variables_initializer()])
materialized_results = sess.run(tpu_computation,
feed_dict=dict(zip(placeholders, inputs)))
sess.run(contrib_tpu.shutdown_system())
return self.maybe_extract_single_output(materialized_results)
示例13: execute_cpu_tf1
# 需要導入模塊: from tensorflow.compat import v1 [as 別名]
# 或者: from tensorflow.compat.v1 import tables_initializer [as 別名]
def execute_cpu_tf1(self, compute_fn, inputs, graph=None):
"""Executes compute_fn on CPU with Tensorflow 1.X.
Args:
compute_fn: a function containing Tensorflow computation that takes a list
of input numpy tensors, performs computation and returns output numpy
tensors.
inputs: a list of numpy arrays to feed input to the `compute_fn`.
graph: (optional) If not None, provided `graph` is used for computation
instead of a brand new tf.Graph().
Returns:
A list of numpy arrays or a single numpy array.
"""
if self.is_tf2():
raise ValueError('Required version Tenforflow 1.X is not available.')
with self.session(graph=(graph or tf.Graph())) as sess:
placeholders = [tf.placeholder_with_default(v, v.shape) for v in inputs]
results = compute_fn(*placeholders)
if (not (isinstance(results, dict) or isinstance(results, tf.Tensor)) and
hasattr(results, '__iter__')):
results = list(results)
sess.run([tf.global_variables_initializer(), tf.tables_initializer(),
tf.local_variables_initializer()])
materialized_results = sess.run(results, feed_dict=dict(zip(placeholders,
inputs)))
return self.maybe_extract_single_output(materialized_results)
示例14: main
# 需要導入模塊: from tensorflow.compat import v1 [as 別名]
# 或者: from tensorflow.compat.v1 import tables_initializer [as 別名]
def main(_):
tf.gfile.MakeDirs(os.path.dirname(FLAGS.output_tfrecord))
tokenizer = tokenization.FullTokenizer(vocab_file=FLAGS.vocab_path,
do_lower_case=True)
annotations_zipfn = os.path.join(FLAGS.data_dir, "vcr1annots.zip")
images_zipfn = os.path.join(FLAGS.data_dir, "vcr1images.zip")
# Generate data for all splits:
for split in ["train", "val", "test"]:
jsonl_file = split + ".jsonl"
output_tfrecord = "-".join([FLAGS.output_tfrecord,
split,
"%05d" % FLAGS.shard,
"of",
"%05d" % FLAGS.num_shards])
with tf.python_io.TFRecordWriter(output_tfrecord) as writer:
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
sess.run(tf.tables_initializer())
with zipfile.ZipFile(
tf.gfile.Open(annotations_zipfn)) as annotations_zip:
with zipfile.ZipFile(tf.gfile.Open(images_zipfn)) as images_zip:
with annotations_zip.open(jsonl_file) as jsonl:
for idx, line in enumerate(jsonl):
if idx % FLAGS.num_shards != FLAGS.shard:
continue
example = json.loads(line)
meta_filename = "vcr1images/" + example["metadata_fn"]
meta = json.loads(images_zip.open(meta_filename).read())
del meta["segms"]
try:
image_filename = "vcr1images/" + example["img_fn"]
tf.logging.info("Reading %s", image_filename)
with images_zip.open(image_filename) as image:
image_string = image.read()
except zipfile.BadZipfile as e:
tf.logging.error("Bad Zip file: " + str(e))
image_string = BLANK_JPEG
for box in meta["boxes"]:
box[0] = 0.0
box[1] = 0.0
box[2] = 1.0
box[3] = 1.0
is_test = (split == "test")
for tf_example in create_tf_examples(tokenizer, example,
image_string, meta,
is_test=is_test):
writer.write(tf_example.SerializeToString())
示例15: __init__
# 需要導入模塊: from tensorflow.compat import v1 [as 別名]
# 或者: from tensorflow.compat.v1 import tables_initializer [as 別名]
def __init__(self, annoy_index_path, unique_strings,
use_sentence_piece, module_path):
self.annoy_index_path = annoy_index_path
self.unique_strings = unique_strings
# load the annoy index for mmap speed
# Length of item vector that will be indexed
self.annoy_index = AnnoyIndex(512, metric='angular')
# super fast, will just mmap the file
self.annoy_index.load(self.annoy_index_path)
g = tf.Graph()
with g.as_default():
# define the module
module = hub.Module(module_path, trainable=False)
if use_sentence_piece:
# build an input placeholder
self.input_placeholder = tf.sparse_placeholder(
tf.int64, shape=[None, None])
# build an input / output from the placeholders
self.embeddings = module(inputs=dict(
values=self.input_placeholder.values,
indices=self.input_placeholder.indices,
dense_shape=self.input_placeholder.dense_shape
)
)
else:
# build an input placeholder
self.input_placeholder = tf.placeholder(
tf.string, shape=(None))
self.embeddings = module(self.input_placeholder)
init_op = tf.group([tf.global_variables_initializer(),
tf.tables_initializer()])
# do not finalize the graph if using sentence piece module
if not use_sentence_piece:
g.finalize()
# define the configuration
config = tf.ConfigProto(allow_soft_placement=True)
self.sess = tf.Session(graph=g, config=config)
self.sess.run(init_op)
if use_sentence_piece:
# spm_path now contains a path to the SentencePiece
# model stored inside the TF-Hub module
with g.as_default():
spm_path = self.sess.run(module(signature="spm_path"))
self.sp = spm.SentencePieceProcessor()
self.sp.Load(spm_path)
tf.logging.info('Interactive session is initialized...')