本文整理匯總了Python中tensor2tensor.models.transformer.transformer_big方法的典型用法代碼示例。如果您正苦於以下問題:Python transformer.transformer_big方法的具體用法?Python transformer.transformer_big怎麽用?Python transformer.transformer_big使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類tensor2tensor.models.transformer
的用法示例。
在下文中一共展示了transformer.transformer_big方法的8個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: transformer_revnet_base
# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_big [as 別名]
def transformer_revnet_base():
"""Base hparams for TransformerRevnet."""
hparams = transformer.transformer_big()
# Use settings from transformer_n_da
hparams.layer_preprocess_sequence = "n"
hparams.layer_postprocess_sequence = "da"
hparams.learning_rate = 0.4
return hparams
示例2: universal_transformer_base
# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_big [as 別名]
def universal_transformer_base():
hparams = transformer.transformer_big()
hparams = update_hparams_for_universal_transformer(hparams)
return hparams
示例3: universal_transformer_big
# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_big [as 別名]
def universal_transformer_big():
hparams = transformer.transformer_big()
hparams = update_hparams_for_universal_transformer(hparams)
hparams.hidden_size = 2048
hparams.filter_size = 8192
return hparams
示例4: transformer_big_bs1
# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_big [as 別名]
def transformer_big_bs1():
hparams = transformer.transformer_big()
hparams.add_hparam("block_size", 1)
return hparams
示例5: evolved_transformer_big
# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_big [as 別名]
def evolved_transformer_big():
"""Big parameters for Evolved Transformer model on WMT."""
return add_evolved_transformer_hparams(transformer.transformer_big())
示例6: evolved_transformer_deep
# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_big [as 別名]
def evolved_transformer_deep():
"""Deep parameters for Evolved Transformer model on WMT."""
hparams = add_evolved_transformer_hparams(transformer.transformer_big())
hparams.num_encoder_layers = 9
hparams.num_decoder_layers = 10
hparams.hidden_size = 640
return hparams
示例7: universal_transformer_base1
# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_big [as 別名]
def universal_transformer_base1():
hparams = transformer.transformer_big()
hparams = update_hparams_for_universal_transformer(hparams)
return hparams
示例8: universal_transformer_big1
# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_big [as 別名]
def universal_transformer_big1():
hparams = transformer.transformer_big()
hparams = update_hparams_for_universal_transformer(hparams)
hparams.hidden_size = 2048
hparams.filter_size = 8192
return hparams