当前位置: 首页>>代码示例>>Python>>正文


Python transformer.transformer_base方法代码示例

本文整理汇总了Python中tensor2tensor.models.transformer.transformer_base方法的典型用法代码示例。如果您正苦于以下问题:Python transformer.transformer_base方法的具体用法?Python transformer.transformer_base怎么用?Python transformer.transformer_base使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在tensor2tensor.models.transformer的用法示例。


在下文中一共展示了transformer.transformer_base方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: iwslt_base

# 需要导入模块: from tensor2tensor.models import transformer [as 别名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 别名]
def iwslt_base():
  """Set of hyperparameters."""
  # Model architecture flags.
  hparams = transformer.transformer_base()
  hparams.num_hidden_layers = 5
  hparams.hidden_size = 256
  hparams.filter_size = 1024
  hparams.num_heads = 4
  # Other flags.
  hparams.summarize_grads = False
  hparams.summarize_vars = False
  # Optimization-related flags.
  hparams.clip_grad_norm = 1.0
  hparams.learning_rate_decay_scheme = "noam"
  hparams.learning_rate_warmup_steps = 8000
  hparams.learning_rate = 0.2
  hparams.learning_rate_schedule = (
      "constant*linear_warmup*rsqrt_decay*rsqrt_hidden_size")
  hparams.learning_rate_constant = 2.0
  hparams.add_hparam("predict_target_length", True)
  hparams.add_hparam("lendiff_bound", 30)
  hparams = update_hparams_for_tpu(hparams)
  hparams.add_hparam("pos_attn", False)
  return hparams 
开发者ID:tensorflow,项目名称:tensor2tensor,代码行数:26,代码来源:transformer_vae_flow_prior.py

示例2: _apply_encoder_layer

# 需要导入模块: from tensor2tensor.models import transformer [as 别名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 别名]
def _apply_encoder_layer(translation_layer, output_depth, nonpadding_list):
  """Applies an encoder layer with basic arguments."""

  input_tensor = tf.random_uniform(
      [_BATCH_SIZE, _TOTAL_SEQUENCE_LENGTH, _INPUT_DEPTH]) / 4.0
  nonpadding = tf.constant(nonpadding_list)
  residual_tensor = tf.random_uniform(
      [_BATCH_SIZE, _TOTAL_SEQUENCE_LENGTH, output_depth])
  hparams = transformer.transformer_base()

  return translation_layer.apply_layer(
      input_tensor,
      residual_tensor,
      output_depth,
      tf.nn.relu,
      hparams,
      "",
      mask_future=False,
      nonpadding=nonpadding,
      layer_preprocess_fn=None,
      postprocess_dropout=True) 
开发者ID:tensorflow,项目名称:tensor2tensor,代码行数:23,代码来源:nas_layers_test.py

示例3: universal_transformer_small

# 需要导入模块: from tensor2tensor.models import transformer [as 别名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 别名]
def universal_transformer_small():
  hparams = transformer.transformer_base()
  hparams = update_hparams_for_universal_transformer(hparams)
  return hparams 
开发者ID:akzaidi,项目名称:fine-lm,代码行数:6,代码来源:universal_transformer.py

示例4: transformer_teeny

# 需要导入模块: from tensor2tensor.models import transformer [as 别名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 别名]
def transformer_teeny():
  hparams = transformer.transformer_base()
  hparams.num_rec_steps = 2
  hparams.hidden_size = 128
  hparams.filter_size = 128
  hparams.num_heads = 2
  return hparams 
开发者ID:akzaidi,项目名称:fine-lm,代码行数:9,代码来源:universal_transformer.py

示例5: transformer_aux_base

# 需要导入模块: from tensor2tensor.models import transformer [as 别名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 别名]
def transformer_aux_base():
  """Set of hyperparameters."""
  hparams = transformer.transformer_base()
  hparams.shared_embedding_and_softmax_weights = False
  hparams.add_hparam("shift_values", "1,2,3,4")
  return hparams 
开发者ID:tensorflow,项目名称:tensor2tensor,代码行数:8,代码来源:transformer_aux.py

示例6: wmt_enro_tpu

# 需要导入模块: from tensor2tensor.models import transformer [as 别名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 别名]
def wmt_enro_tpu():
  """HParams for Transformer model on TPU."""
  hparams = transformer.transformer_base()
  hparams = transformer.update_hparams_for_tpu(hparams)
  hparams.batch_size = 512
  return hparams 
开发者ID:tensorflow,项目名称:tensor2tensor,代码行数:8,代码来源:transformer_vae_flow_prior.py

示例7: iwslt_baseline_gpu

# 需要导入模块: from tensor2tensor.models import transformer [as 别名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 别名]
def iwslt_baseline_gpu():
  """HParams for Transformer model on TPU."""
  hparams = transformer.transformer_base()
  hparams.hidden_size = 256
  hparams.filter_size = 1024
  hparams.num_hidden_layers = 5
  hparams.num_heads = 2
  hparams.layer_prepostprocess_dropout = 0.1
  hparams.attention_dropout = 0.1
  hparams.relu_dropout = 0.1
  hparams.dropout = 0.1
  return hparams 
开发者ID:tensorflow,项目名称:tensor2tensor,代码行数:14,代码来源:transformer_vae_flow_prior.py

示例8: iwslt_baseline_tpu

# 需要导入模块: from tensor2tensor.models import transformer [as 别名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 别名]
def iwslt_baseline_tpu():
  """HParams for Transformer model on TPU."""
  hparams = transformer.transformer_base()
  transformer.update_hparams_for_tpu(hparams)
  hparams.hidden_size = 256
  hparams.filter_size = 1024
  hparams.num_hidden_layers = 5
  hparams.num_heads = 2
  hparams.layer_prepostprocess_dropout = 0.1
  hparams.attention_dropout = 0.1
  hparams.relu_dropout = 0.1
  hparams.dropout = 0.1
  hparams.add_hparam("pos_attn", False)
  return hparams 
开发者ID:tensorflow,项目名称:tensor2tensor,代码行数:16,代码来源:transformer_vae_flow_prior.py

示例9: universal_transformer_base_fp16

# 需要导入模块: from tensor2tensor.models import transformer [as 别名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 别名]
def universal_transformer_base_fp16():
  hparams = transformer.transformer_base()
  hparams = update_hparams_for_universal_transformer(hparams)
  hparams.activation_dtype = "float16"
  return hparams 
开发者ID:tensorflow,项目名称:tensor2tensor,代码行数:7,代码来源:universal_transformer.py

示例10: transformer_teeny

# 需要导入模块: from tensor2tensor.models import transformer [as 别名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 别名]
def transformer_teeny():
  hparams = transformer.transformer_base()
  hparams.hidden_size = 128
  hparams.filter_size = 128
  hparams.num_heads = 2
  return hparams 
开发者ID:tensorflow,项目名称:tensor2tensor,代码行数:8,代码来源:universal_transformer.py

示例11: transformer_base_bs1

# 需要导入模块: from tensor2tensor.models import transformer [as 别名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 别名]
def transformer_base_bs1():
  hparams = transformer.transformer_base()
  hparams.add_hparam("block_size", 1)
  return hparams 
开发者ID:tensorflow,项目名称:tensor2tensor,代码行数:6,代码来源:transformer_parallel.py

示例12: transformer_base_bs2

# 需要导入模块: from tensor2tensor.models import transformer [as 别名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 别名]
def transformer_base_bs2():
  hparams = transformer.transformer_base()
  hparams.add_hparam("block_size", 2)
  return hparams 
开发者ID:tensorflow,项目名称:tensor2tensor,代码行数:6,代码来源:transformer_parallel.py

示例13: transformer_base_bs4

# 需要导入模块: from tensor2tensor.models import transformer [as 别名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 别名]
def transformer_base_bs4():
  hparams = transformer.transformer_base()
  hparams.add_hparam("block_size", 4)
  return hparams 
开发者ID:tensorflow,项目名称:tensor2tensor,代码行数:6,代码来源:transformer_parallel.py

示例14: transformer_base_bs5

# 需要导入模块: from tensor2tensor.models import transformer [as 别名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 别名]
def transformer_base_bs5():
  hparams = transformer.transformer_base()
  hparams.add_hparam("block_size", 5)
  return hparams 
开发者ID:tensorflow,项目名称:tensor2tensor,代码行数:6,代码来源:transformer_parallel.py

示例15: transformer_base_bs6

# 需要导入模块: from tensor2tensor.models import transformer [as 别名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 别名]
def transformer_base_bs6():
  hparams = transformer.transformer_base()
  hparams.add_hparam("block_size", 6)
  return hparams 
开发者ID:tensorflow,项目名称:tensor2tensor,代码行数:6,代码来源:transformer_parallel.py


注:本文中的tensor2tensor.models.transformer.transformer_base方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。