本文整理匯總了Python中transformers.get_linear_schedule_with_warmup方法的典型用法代碼示例。如果您正苦於以下問題:Python transformers.get_linear_schedule_with_warmup方法的具體用法?Python transformers.get_linear_schedule_with_warmup怎麽用?Python transformers.get_linear_schedule_with_warmup使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類transformers
的用法示例。
在下文中一共展示了transformers.get_linear_schedule_with_warmup方法的3個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: _get_scheduler
# 需要導入模塊: import transformers [as 別名]
# 或者: from transformers import get_linear_schedule_with_warmup [as 別名]
def _get_scheduler(self, optimizer, scheduler: str, warmup_steps: int, t_total: int):
"""
Returns the correct learning rate scheduler
"""
scheduler = scheduler.lower()
if scheduler == 'constantlr':
return transformers.get_constant_schedule(optimizer)
elif scheduler == 'warmupconstant':
return transformers.get_constant_schedule_with_warmup(optimizer, num_warmup_steps=warmup_steps)
elif scheduler == 'warmuplinear':
return transformers.get_linear_schedule_with_warmup(optimizer, num_warmup_steps=warmup_steps, num_training_steps=t_total)
elif scheduler == 'warmupcosine':
return transformers.get_cosine_schedule_with_warmup(optimizer, num_warmup_steps=warmup_steps, num_training_steps=t_total)
elif scheduler == 'warmupcosinewithhardrestarts':
return transformers.get_cosine_with_hard_restarts_schedule_with_warmup(optimizer, num_warmup_steps=warmup_steps, num_training_steps=t_total)
else:
raise ValueError("Unknown scheduler {}".format(scheduler))
示例2: configure_optimizers
# 需要導入模塊: import transformers [as 別名]
# 或者: from transformers import get_linear_schedule_with_warmup [as 別名]
def configure_optimizers(self):
"Prepare optimizer"
no_decay = ["bias", "LayerNorm.weight"]
optimizer_grouped_parameters = [
{
"params": [
p
for n, p in self.model.named_parameters()
if not any(nd in n for nd in no_decay)
],
"weight_decay": self.hparams["weight_decay"],
},
{
"params": [
p
for n, p in self.model.named_parameters()
if any(nd in n for nd in no_decay)
],
"weight_decay": 0.0,
},
]
optimizer = AdamW(
optimizer_grouped_parameters,
lr=self.hparams["learning_rate"],
eps=self.hparams["adam_epsilon"],
)
scheduler = get_linear_schedule_with_warmup(
optimizer,
num_warmup_steps=self.hparams["warmup_steps"],
num_training_steps=self.hparams["num_steps"],
)
return [optimizer], [scheduler]
示例3: get_default_scheduler
# 需要導入模塊: import transformers [as 別名]
# 或者: from transformers import get_linear_schedule_with_warmup [as 別名]
def get_default_scheduler(optimizer, warmup_steps, num_training_steps):
scheduler = get_linear_schedule_with_warmup(
optimizer,
num_warmup_steps=warmup_steps,
num_training_steps=num_training_steps,
)
return scheduler