本文整理汇总了Python中onmt.translate方法的典型用法代码示例。如果您正苦于以下问题:Python onmt.translate方法的具体用法?Python onmt.translate怎么用?Python onmt.translate使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类onmt
的用法示例。
在下文中一共展示了onmt.translate方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: run
# 需要导入模块: import onmt [as 别名]
# 或者: from onmt import translate [as 别名]
def run(self, inputs):
"""Translate `inputs`
We keep the same format as the Lua version i.e.
[{"id": model_id, "src": "sequence to translate"},{ ...}]
We use inputs[0]["id"] as the model id
"""
model_id = inputs[0].get("id", 0)
if model_id in self.models and self.models[model_id] is not None:
return self.models[model_id].run(inputs)
else:
print("Error No such model '%s'" % str(model_id))
raise ServerModelError("No such model '%s'" % str(model_id))
示例2: main
# 需要导入模块: import onmt [as 别名]
# 或者: from onmt import translate [as 别名]
def main(opt):
translator = make_translator(opt, report_score=True)
translator.translate(opt.src_dir, opt.src, opt.tgt,
opt.batch_size, opt.attn_debug)
示例3: __init__
# 需要导入模块: import onmt [as 别名]
# 或者: from onmt import translate [as 别名]
def __init__(self, model_loc, gpu=-1, beam_size=5, k=5):
# Simulate all commandline args
parser = argparse.ArgumentParser(
description='translate.py',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
translate_opts(parser)
self.opt = parser.parse_known_args()[0]
self.opt.model = model_loc
self.opt.beam_size = beam_size
self.opt.batch_size = 1
self.opt.n_best = k
dummy_parser = argparse.ArgumentParser(description='train.py')
model_opts(dummy_parser)
self.dummy_opt = dummy_parser.parse_known_args([])[0]
# Load the model.
self.fields, self.model, self.model_opt = \
onmt.ModelConstructor.load_test_model(
self.opt, self.dummy_opt.__dict__)
# Make GPU decoding possible
self.opt.gpu = gpu
self.opt.cuda = self.opt.gpu > -1
if self.opt.cuda:
torch.cuda.set_device(self.opt.gpu)
# Translator
self.scorer = onmt.translate.GNMTGlobalScorer(
self.opt.alpha,
self.opt.beta)
self.translator = onmt.translate.Translator(
self.model, self.fields,
beam_size=self.opt.beam_size,
n_best=self.opt.n_best,
global_scorer=self.scorer,
max_length=self.opt.max_sent_length,
copy_attn=self.model_opt.copy_attn,
cuda=self.opt.cuda,
beam_trace=self.opt.dump_beam != "")
示例4: main
# 需要导入模块: import onmt [as 别名]
# 或者: from onmt import translate [as 别名]
def main():
# model = ONMTmodelAPI("model/date_acc_100.00_ppl_1.00_e7.pt")
model = ONMTmodelAPI(
"../S2Splay/model_api/processing/s2s_iwslt_ende/baseline-brnn.en-de.s154_acc_61.58_ppl_7.43_e21.pt")
# Simple Case
# reply = model.translate(["This is a test ."], dump_data=False)
# Case with attn overwrite OR partial
reply = model.translate(["this is madness ."], attn_overwrite=[{2: 0}])
# reply = model.translate(["this is madness ."], partial_decode=["das ist"])
# Complex Case with attn and partial
# reply = model.translate(["this is madness ."],
# attn_overwrite=[{2:0}],
# partial_decode=["das ist"])
# Cases with multiple
# reply = model.translate(["This is a test .", "and another one ."])
# Partial
# reply = model.translate(["This is a test .", "this is a second test ."],
# partial_decode=["Dies ist", "Ein zweiter"])
# Attn overwrite
# reply = model.translate(["this is madness .", "i am awesome ."],
# attn_overwrite=[{2:0}, {}])
# All together - phew
# reply = model.translate(["this is madness .", "i am awesome ."],
# partial_decode=["heute ist", "du bist"],
# attn_overwrite=[{2:0}, {2:2}])
# Debug options
# print("______")
# print(len(reply[0]['decoder']))
# print(len(reply[0]['decoder'][0]))
# print(reply[0]['beam_trace'])
# print(json.dumps(reply, indent=2, sort_keys=True))
示例5: main
# 需要导入模块: import onmt [as 别名]
# 或者: from onmt import translate [as 别名]
def main(opt):
translator = build_translator(opt, report_score=True)
translator.translate(src_path=opt.src,
tgt_path=opt.tgt,
template_path=opt.template,
src_dir=opt.src_dir,
batch_size=opt.batch_size,
attn_debug=opt.attn_debug)