本文整理汇总了Python中utility.make_scheduler方法的典型用法代码示例。如果您正苦于以下问题:Python utility.make_scheduler方法的具体用法?Python utility.make_scheduler怎么用?Python utility.make_scheduler使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类utility
的用法示例。
在下文中一共展示了utility.make_scheduler方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: import utility [as 别名]
# 或者: from utility import make_scheduler [as 别名]
def __init__(self, args, loader, my_model, my_loss, ckp):
self.args = args
self.scale = args.scale
self.ckp = ckp
self.loader_train = loader.loader_train
self.loader_test = loader.loader_test
self.model = my_model
self.loss = my_loss
self.optimizer = utility.make_optimizer(args, self.model)
self.scheduler = utility.make_scheduler(args, self.optimizer)
if self.args.load != '':
self.optimizer.load_state_dict(
torch.load(os.path.join(ckp.dir, 'optimizer.pt'))
)
for _ in range(len(ckp.log)): self.scheduler.step()
self.error_last = 1e8
示例2: __init__
# 需要导入模块: import utility [as 别名]
# 或者: from utility import make_scheduler [as 别名]
def __init__(self, args, loader, my_model, my_loss, ckp):
super(TrainerFT, self).__init__(args, loader, my_model, my_loss, ckp)
# self.args = args
# self.scale = args.scale
#
# self.ckp = ckp
# self.loader_train = loader.loader_train
# self.loader_test = loader.loader_test
# self.model = my_model
# self.loss = my_loss
if self.args.model.lower() == 'finetune':
self.optimizer = self.make_optimizer(args, self.model)
# self.scheduler = utility.make_scheduler(args, self.optimizer)
#
# if self.args.load != '.':
# self.optimizer.load_state_dict(
# torch.load(os.path.join(ckp.dir, 'optimizer.pt'))
# )
# for _ in range(len(ckp.log)): self.scheduler.step()
#
# self.error_last = 1e8
示例3: __init__
# 需要导入模块: import utility [as 别名]
# 或者: from utility import make_scheduler [as 别名]
def __init__(self, args, loader, my_model, my_loss, ckp):
self.args = args
self.scale = args.scale
self.ckp = ckp
self.loader_train = loader.loader_train
self.loader_test = loader.loader_test
self.model = my_model
self.loss = my_loss
self.optimizer = utility.make_optimizer(args, self.model)
self.scheduler = utility.make_scheduler(args, self.optimizer)
if self.args.load != '.':
self.optimizer.load_state_dict(
torch.load(os.path.join(ckp.dir, 'optimizer.pt'))
)
for _ in range(len(ckp.log)): self.scheduler.step()
self.error_last = 1e8
示例4: __init__
# 需要导入模块: import utility [as 别名]
# 或者: from utility import make_scheduler [as 别名]
def __init__(self, args, loader, my_model, my_loss, ckp):
self.args = args
self.scale = args.scale
self.ckp = ckp
self.loader_train = loader.loader_train
self.loader_test = loader.loader_test
self.model = my_model
self.loss = my_loss
self.optimizer = utility.make_optimizer(args, self.model)
self.scheduler = utility.make_scheduler(args, self.optimizer)
if self.args.load != '.':
self.optimizer.load_state_dict(
torch.load(os.path.join(ckp.dir, 'optimizer.pt'))
)
for _ in range(len(ckp.log)): self.scheduler.step()
self.error_last = 1e8
del args,ckp,my_model,my_loss
示例5: __init__
# 需要导入模块: import utility [as 别名]
# 或者: from utility import make_scheduler [as 别名]
def __init__(self, args, gan_type):
super(Adversarial, self).__init__()
self.gan_type = gan_type
self.gan_k = args.gan_k
self.discriminator = discriminator.Discriminator(args, gan_type)
if gan_type != 'WGAN_GP':
self.optimizer = utility.make_optimizer(args, self.discriminator)
else:
self.optimizer = optim.Adam(
self.discriminator.parameters(),
betas=(0, 0.9), eps=1e-8, lr=1e-5
)
self.scheduler = utility.make_scheduler(args, self.optimizer)