本文整理汇总了Python中beam_search.BeamSearch方法的典型用法代码示例。如果您正苦于以下问题:Python beam_search.BeamSearch方法的具体用法?Python beam_search.BeamSearch怎么用?Python beam_search.BeamSearch使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类beam_search
的用法示例。
在下文中一共展示了beam_search.BeamSearch方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _Decode
# 需要导入模块: import beam_search [as 别名]
# 或者: from beam_search import BeamSearch [as 别名]
def _Decode(self, saver, sess):
"""Restore a checkpoint and decode it.
Args:
saver: Tensorflow checkpoint saver.
sess: Tensorflow session.
Returns:
If success, returns true, otherwise, false.
"""
ckpt_state = tf.train.get_checkpoint_state(FLAGS.log_root)
if not (ckpt_state and ckpt_state.model_checkpoint_path):
tf.logging.info('No model to decode yet at %s', FLAGS.log_root)
return False
tf.logging.info('checkpoint path %s', ckpt_state.model_checkpoint_path)
ckpt_path = os.path.join(
FLAGS.log_root, os.path.basename(ckpt_state.model_checkpoint_path))
tf.logging.info('renamed checkpoint path %s', ckpt_path)
saver.restore(sess, ckpt_path)
self._decode_io.ResetFiles()
for _ in xrange(FLAGS.decode_batches_per_ckpt):
(article_batch, _, _, article_lens, _, _, origin_articles,
origin_abstracts) = self._batch_reader.NextBatch()
for i in xrange(self._hps.batch_size):
bs = beam_search.BeamSearch(
self._model, self._hps.batch_size,
self._vocab.WordToId(data.SENTENCE_START),
self._vocab.WordToId(data.SENTENCE_END),
self._hps.dec_timesteps)
article_batch_cp = article_batch.copy()
article_batch_cp[:] = article_batch[i:i+1]
article_lens_cp = article_lens.copy()
article_lens_cp[:] = article_lens[i:i+1]
best_beam = bs.BeamSearch(sess, article_batch_cp, article_lens_cp)[0]
decode_output = [int(t) for t in best_beam.tokens[1:]]
self._DecodeBatch(
origin_articles[i], origin_abstracts[i], decode_output)
return True
示例2: _init_graph
# 需要导入模块: import beam_search [as 别名]
# 或者: from beam_search import BeamSearch [as 别名]
def _init_graph(self):
"""
init graph
"""
self.ys = (self.input_y, None, None)
self.xs = (self.input_x, None)
self.memory = self.model.encode(self.xs, False)[0]
self.logits = self.model.decode(self.xs, self.ys, self.memory, False)[0]
ckpt = self.tf.train.get_checkpoint_state(self.model_dir).all_model_checkpoint_paths[-1]
graph = self.logits.graph
sess_config = self.tf.ConfigProto(allow_soft_placement=True)
sess_config.gpu_options.allow_growth = True
saver = self.tf.train.Saver()
self.sess = self.tf.Session(config=sess_config, graph=graph)
self.sess.run(self.tf.global_variables_initializer())
self.tf.reset_default_graph()
saver.restore(self.sess, ckpt)
self.bs = BeamSearch(self.model,
self.hp.beam_size,
list(self.idx2token.keys())[2],
list(self.idx2token.keys())[3],
self.idx2token,
self.hp.maxlen2,
self.input_x,
self.input_y,
self.logits)