本文整理汇总了Python中transformers.get_linear_schedule_with_warmup方法的典型用法代码示例。如果您正苦于以下问题:Python transformers.get_linear_schedule_with_warmup方法的具体用法?Python transformers.get_linear_schedule_with_warmup怎么用?Python transformers.get_linear_schedule_with_warmup使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类transformers
的用法示例。
在下文中一共展示了transformers.get_linear_schedule_with_warmup方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _get_scheduler
# 需要导入模块: import transformers [as 别名]
# 或者: from transformers import get_linear_schedule_with_warmup [as 别名]
def _get_scheduler(self, optimizer, scheduler: str, warmup_steps: int, t_total: int):
"""
Returns the correct learning rate scheduler
"""
scheduler = scheduler.lower()
if scheduler == 'constantlr':
return transformers.get_constant_schedule(optimizer)
elif scheduler == 'warmupconstant':
return transformers.get_constant_schedule_with_warmup(optimizer, num_warmup_steps=warmup_steps)
elif scheduler == 'warmuplinear':
return transformers.get_linear_schedule_with_warmup(optimizer, num_warmup_steps=warmup_steps, num_training_steps=t_total)
elif scheduler == 'warmupcosine':
return transformers.get_cosine_schedule_with_warmup(optimizer, num_warmup_steps=warmup_steps, num_training_steps=t_total)
elif scheduler == 'warmupcosinewithhardrestarts':
return transformers.get_cosine_with_hard_restarts_schedule_with_warmup(optimizer, num_warmup_steps=warmup_steps, num_training_steps=t_total)
else:
raise ValueError("Unknown scheduler {}".format(scheduler))
示例2: configure_optimizers
# 需要导入模块: import transformers [as 别名]
# 或者: from transformers import get_linear_schedule_with_warmup [as 别名]
def configure_optimizers(self):
"Prepare optimizer"
no_decay = ["bias", "LayerNorm.weight"]
optimizer_grouped_parameters = [
{
"params": [
p
for n, p in self.model.named_parameters()
if not any(nd in n for nd in no_decay)
],
"weight_decay": self.hparams["weight_decay"],
},
{
"params": [
p
for n, p in self.model.named_parameters()
if any(nd in n for nd in no_decay)
],
"weight_decay": 0.0,
},
]
optimizer = AdamW(
optimizer_grouped_parameters,
lr=self.hparams["learning_rate"],
eps=self.hparams["adam_epsilon"],
)
scheduler = get_linear_schedule_with_warmup(
optimizer,
num_warmup_steps=self.hparams["warmup_steps"],
num_training_steps=self.hparams["num_steps"],
)
return [optimizer], [scheduler]
示例3: get_default_scheduler
# 需要导入模块: import transformers [as 别名]
# 或者: from transformers import get_linear_schedule_with_warmup [as 别名]
def get_default_scheduler(optimizer, warmup_steps, num_training_steps):
scheduler = get_linear_schedule_with_warmup(
optimizer,
num_warmup_steps=warmup_steps,
num_training_steps=num_training_steps,
)
return scheduler