本文整理匯總了Python中utility.make_scheduler方法的典型用法代碼示例。如果您正苦於以下問題:Python utility.make_scheduler方法的具體用法?Python utility.make_scheduler怎麽用?Python utility.make_scheduler使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類utility
的用法示例。
在下文中一共展示了utility.make_scheduler方法的5個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: __init__
# 需要導入模塊: import utility [as 別名]
# 或者: from utility import make_scheduler [as 別名]
def __init__(self, args, loader, my_model, my_loss, ckp):
self.args = args
self.scale = args.scale
self.ckp = ckp
self.loader_train = loader.loader_train
self.loader_test = loader.loader_test
self.model = my_model
self.loss = my_loss
self.optimizer = utility.make_optimizer(args, self.model)
self.scheduler = utility.make_scheduler(args, self.optimizer)
if self.args.load != '':
self.optimizer.load_state_dict(
torch.load(os.path.join(ckp.dir, 'optimizer.pt'))
)
for _ in range(len(ckp.log)): self.scheduler.step()
self.error_last = 1e8
示例2: __init__
# 需要導入模塊: import utility [as 別名]
# 或者: from utility import make_scheduler [as 別名]
def __init__(self, args, loader, my_model, my_loss, ckp):
super(TrainerFT, self).__init__(args, loader, my_model, my_loss, ckp)
# self.args = args
# self.scale = args.scale
#
# self.ckp = ckp
# self.loader_train = loader.loader_train
# self.loader_test = loader.loader_test
# self.model = my_model
# self.loss = my_loss
if self.args.model.lower() == 'finetune':
self.optimizer = self.make_optimizer(args, self.model)
# self.scheduler = utility.make_scheduler(args, self.optimizer)
#
# if self.args.load != '.':
# self.optimizer.load_state_dict(
# torch.load(os.path.join(ckp.dir, 'optimizer.pt'))
# )
# for _ in range(len(ckp.log)): self.scheduler.step()
#
# self.error_last = 1e8
示例3: __init__
# 需要導入模塊: import utility [as 別名]
# 或者: from utility import make_scheduler [as 別名]
def __init__(self, args, loader, my_model, my_loss, ckp):
self.args = args
self.scale = args.scale
self.ckp = ckp
self.loader_train = loader.loader_train
self.loader_test = loader.loader_test
self.model = my_model
self.loss = my_loss
self.optimizer = utility.make_optimizer(args, self.model)
self.scheduler = utility.make_scheduler(args, self.optimizer)
if self.args.load != '.':
self.optimizer.load_state_dict(
torch.load(os.path.join(ckp.dir, 'optimizer.pt'))
)
for _ in range(len(ckp.log)): self.scheduler.step()
self.error_last = 1e8
示例4: __init__
# 需要導入模塊: import utility [as 別名]
# 或者: from utility import make_scheduler [as 別名]
def __init__(self, args, loader, my_model, my_loss, ckp):
self.args = args
self.scale = args.scale
self.ckp = ckp
self.loader_train = loader.loader_train
self.loader_test = loader.loader_test
self.model = my_model
self.loss = my_loss
self.optimizer = utility.make_optimizer(args, self.model)
self.scheduler = utility.make_scheduler(args, self.optimizer)
if self.args.load != '.':
self.optimizer.load_state_dict(
torch.load(os.path.join(ckp.dir, 'optimizer.pt'))
)
for _ in range(len(ckp.log)): self.scheduler.step()
self.error_last = 1e8
del args,ckp,my_model,my_loss
示例5: __init__
# 需要導入模塊: import utility [as 別名]
# 或者: from utility import make_scheduler [as 別名]
def __init__(self, args, gan_type):
super(Adversarial, self).__init__()
self.gan_type = gan_type
self.gan_k = args.gan_k
self.discriminator = discriminator.Discriminator(args, gan_type)
if gan_type != 'WGAN_GP':
self.optimizer = utility.make_optimizer(args, self.discriminator)
else:
self.optimizer = optim.Adam(
self.discriminator.parameters(),
betas=(0, 0.9), eps=1e-8, lr=1e-5
)
self.scheduler = utility.make_scheduler(args, self.optimizer)