當前位置: 首頁>>代碼示例>>Python>>正文


Python transformer.transformer_base方法代碼示例

本文整理匯總了Python中tensor2tensor.models.transformer.transformer_base方法的典型用法代碼示例。如果您正苦於以下問題:Python transformer.transformer_base方法的具體用法?Python transformer.transformer_base怎麽用?Python transformer.transformer_base使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在tensor2tensor.models.transformer的用法示例。


在下文中一共展示了transformer.transformer_base方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: iwslt_base

# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 別名]
def iwslt_base():
  """Set of hyperparameters."""
  # Model architecture flags.
  hparams = transformer.transformer_base()
  hparams.num_hidden_layers = 5
  hparams.hidden_size = 256
  hparams.filter_size = 1024
  hparams.num_heads = 4
  # Other flags.
  hparams.summarize_grads = False
  hparams.summarize_vars = False
  # Optimization-related flags.
  hparams.clip_grad_norm = 1.0
  hparams.learning_rate_decay_scheme = "noam"
  hparams.learning_rate_warmup_steps = 8000
  hparams.learning_rate = 0.2
  hparams.learning_rate_schedule = (
      "constant*linear_warmup*rsqrt_decay*rsqrt_hidden_size")
  hparams.learning_rate_constant = 2.0
  hparams.add_hparam("predict_target_length", True)
  hparams.add_hparam("lendiff_bound", 30)
  hparams = update_hparams_for_tpu(hparams)
  hparams.add_hparam("pos_attn", False)
  return hparams 
開發者ID:tensorflow,項目名稱:tensor2tensor,代碼行數:26,代碼來源:transformer_vae_flow_prior.py

示例2: _apply_encoder_layer

# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 別名]
def _apply_encoder_layer(translation_layer, output_depth, nonpadding_list):
  """Applies an encoder layer with basic arguments."""

  input_tensor = tf.random_uniform(
      [_BATCH_SIZE, _TOTAL_SEQUENCE_LENGTH, _INPUT_DEPTH]) / 4.0
  nonpadding = tf.constant(nonpadding_list)
  residual_tensor = tf.random_uniform(
      [_BATCH_SIZE, _TOTAL_SEQUENCE_LENGTH, output_depth])
  hparams = transformer.transformer_base()

  return translation_layer.apply_layer(
      input_tensor,
      residual_tensor,
      output_depth,
      tf.nn.relu,
      hparams,
      "",
      mask_future=False,
      nonpadding=nonpadding,
      layer_preprocess_fn=None,
      postprocess_dropout=True) 
開發者ID:tensorflow,項目名稱:tensor2tensor,代碼行數:23,代碼來源:nas_layers_test.py

示例3: universal_transformer_small

# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 別名]
def universal_transformer_small():
  hparams = transformer.transformer_base()
  hparams = update_hparams_for_universal_transformer(hparams)
  return hparams 
開發者ID:akzaidi,項目名稱:fine-lm,代碼行數:6,代碼來源:universal_transformer.py

示例4: transformer_teeny

# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 別名]
def transformer_teeny():
  hparams = transformer.transformer_base()
  hparams.num_rec_steps = 2
  hparams.hidden_size = 128
  hparams.filter_size = 128
  hparams.num_heads = 2
  return hparams 
開發者ID:akzaidi,項目名稱:fine-lm,代碼行數:9,代碼來源:universal_transformer.py

示例5: transformer_aux_base

# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 別名]
def transformer_aux_base():
  """Set of hyperparameters."""
  hparams = transformer.transformer_base()
  hparams.shared_embedding_and_softmax_weights = False
  hparams.add_hparam("shift_values", "1,2,3,4")
  return hparams 
開發者ID:tensorflow,項目名稱:tensor2tensor,代碼行數:8,代碼來源:transformer_aux.py

示例6: wmt_enro_tpu

# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 別名]
def wmt_enro_tpu():
  """HParams for Transformer model on TPU."""
  hparams = transformer.transformer_base()
  hparams = transformer.update_hparams_for_tpu(hparams)
  hparams.batch_size = 512
  return hparams 
開發者ID:tensorflow,項目名稱:tensor2tensor,代碼行數:8,代碼來源:transformer_vae_flow_prior.py

示例7: iwslt_baseline_gpu

# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 別名]
def iwslt_baseline_gpu():
  """HParams for Transformer model on TPU."""
  hparams = transformer.transformer_base()
  hparams.hidden_size = 256
  hparams.filter_size = 1024
  hparams.num_hidden_layers = 5
  hparams.num_heads = 2
  hparams.layer_prepostprocess_dropout = 0.1
  hparams.attention_dropout = 0.1
  hparams.relu_dropout = 0.1
  hparams.dropout = 0.1
  return hparams 
開發者ID:tensorflow,項目名稱:tensor2tensor,代碼行數:14,代碼來源:transformer_vae_flow_prior.py

示例8: iwslt_baseline_tpu

# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 別名]
def iwslt_baseline_tpu():
  """HParams for Transformer model on TPU."""
  hparams = transformer.transformer_base()
  transformer.update_hparams_for_tpu(hparams)
  hparams.hidden_size = 256
  hparams.filter_size = 1024
  hparams.num_hidden_layers = 5
  hparams.num_heads = 2
  hparams.layer_prepostprocess_dropout = 0.1
  hparams.attention_dropout = 0.1
  hparams.relu_dropout = 0.1
  hparams.dropout = 0.1
  hparams.add_hparam("pos_attn", False)
  return hparams 
開發者ID:tensorflow,項目名稱:tensor2tensor,代碼行數:16,代碼來源:transformer_vae_flow_prior.py

示例9: universal_transformer_base_fp16

# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 別名]
def universal_transformer_base_fp16():
  hparams = transformer.transformer_base()
  hparams = update_hparams_for_universal_transformer(hparams)
  hparams.activation_dtype = "float16"
  return hparams 
開發者ID:tensorflow,項目名稱:tensor2tensor,代碼行數:7,代碼來源:universal_transformer.py

示例10: transformer_teeny

# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 別名]
def transformer_teeny():
  hparams = transformer.transformer_base()
  hparams.hidden_size = 128
  hparams.filter_size = 128
  hparams.num_heads = 2
  return hparams 
開發者ID:tensorflow,項目名稱:tensor2tensor,代碼行數:8,代碼來源:universal_transformer.py

示例11: transformer_base_bs1

# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 別名]
def transformer_base_bs1():
  hparams = transformer.transformer_base()
  hparams.add_hparam("block_size", 1)
  return hparams 
開發者ID:tensorflow,項目名稱:tensor2tensor,代碼行數:6,代碼來源:transformer_parallel.py

示例12: transformer_base_bs2

# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 別名]
def transformer_base_bs2():
  hparams = transformer.transformer_base()
  hparams.add_hparam("block_size", 2)
  return hparams 
開發者ID:tensorflow,項目名稱:tensor2tensor,代碼行數:6,代碼來源:transformer_parallel.py

示例13: transformer_base_bs4

# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 別名]
def transformer_base_bs4():
  hparams = transformer.transformer_base()
  hparams.add_hparam("block_size", 4)
  return hparams 
開發者ID:tensorflow,項目名稱:tensor2tensor,代碼行數:6,代碼來源:transformer_parallel.py

示例14: transformer_base_bs5

# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 別名]
def transformer_base_bs5():
  hparams = transformer.transformer_base()
  hparams.add_hparam("block_size", 5)
  return hparams 
開發者ID:tensorflow,項目名稱:tensor2tensor,代碼行數:6,代碼來源:transformer_parallel.py

示例15: transformer_base_bs6

# 需要導入模塊: from tensor2tensor.models import transformer [as 別名]
# 或者: from tensor2tensor.models.transformer import transformer_base [as 別名]
def transformer_base_bs6():
  hparams = transformer.transformer_base()
  hparams.add_hparam("block_size", 6)
  return hparams 
開發者ID:tensorflow,項目名稱:tensor2tensor,代碼行數:6,代碼來源:transformer_parallel.py


注:本文中的tensor2tensor.models.transformer.transformer_base方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。