本文整理匯總了Python中beam_search.BeamSearch方法的典型用法代碼示例。如果您正苦於以下問題:Python beam_search.BeamSearch方法的具體用法?Python beam_search.BeamSearch怎麽用?Python beam_search.BeamSearch使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類beam_search
的用法示例。
在下文中一共展示了beam_search.BeamSearch方法的2個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: _Decode
# 需要導入模塊: import beam_search [as 別名]
# 或者: from beam_search import BeamSearch [as 別名]
def _Decode(self, saver, sess):
"""Restore a checkpoint and decode it.
Args:
saver: Tensorflow checkpoint saver.
sess: Tensorflow session.
Returns:
If success, returns true, otherwise, false.
"""
ckpt_state = tf.train.get_checkpoint_state(FLAGS.log_root)
if not (ckpt_state and ckpt_state.model_checkpoint_path):
tf.logging.info('No model to decode yet at %s', FLAGS.log_root)
return False
tf.logging.info('checkpoint path %s', ckpt_state.model_checkpoint_path)
ckpt_path = os.path.join(
FLAGS.log_root, os.path.basename(ckpt_state.model_checkpoint_path))
tf.logging.info('renamed checkpoint path %s', ckpt_path)
saver.restore(sess, ckpt_path)
self._decode_io.ResetFiles()
for _ in xrange(FLAGS.decode_batches_per_ckpt):
(article_batch, _, _, article_lens, _, _, origin_articles,
origin_abstracts) = self._batch_reader.NextBatch()
for i in xrange(self._hps.batch_size):
bs = beam_search.BeamSearch(
self._model, self._hps.batch_size,
self._vocab.WordToId(data.SENTENCE_START),
self._vocab.WordToId(data.SENTENCE_END),
self._hps.dec_timesteps)
article_batch_cp = article_batch.copy()
article_batch_cp[:] = article_batch[i:i+1]
article_lens_cp = article_lens.copy()
article_lens_cp[:] = article_lens[i:i+1]
best_beam = bs.BeamSearch(sess, article_batch_cp, article_lens_cp)[0]
decode_output = [int(t) for t in best_beam.tokens[1:]]
self._DecodeBatch(
origin_articles[i], origin_abstracts[i], decode_output)
return True
示例2: _init_graph
# 需要導入模塊: import beam_search [as 別名]
# 或者: from beam_search import BeamSearch [as 別名]
def _init_graph(self):
"""
init graph
"""
self.ys = (self.input_y, None, None)
self.xs = (self.input_x, None)
self.memory = self.model.encode(self.xs, False)[0]
self.logits = self.model.decode(self.xs, self.ys, self.memory, False)[0]
ckpt = self.tf.train.get_checkpoint_state(self.model_dir).all_model_checkpoint_paths[-1]
graph = self.logits.graph
sess_config = self.tf.ConfigProto(allow_soft_placement=True)
sess_config.gpu_options.allow_growth = True
saver = self.tf.train.Saver()
self.sess = self.tf.Session(config=sess_config, graph=graph)
self.sess.run(self.tf.global_variables_initializer())
self.tf.reset_default_graph()
saver.restore(self.sess, ckpt)
self.bs = BeamSearch(self.model,
self.hp.beam_size,
list(self.idx2token.keys())[2],
list(self.idx2token.keys())[3],
self.idx2token,
self.hp.maxlen2,
self.input_x,
self.input_y,
self.logits)