当前位置: 首页>>代码示例>>Python>>正文


Python lr_scheduler.ExponentialLR方法代码示例

本文整理汇总了Python中torch.optim.lr_scheduler.ExponentialLR方法的典型用法代码示例。如果您正苦于以下问题:Python lr_scheduler.ExponentialLR方法的具体用法?Python lr_scheduler.ExponentialLR怎么用?Python lr_scheduler.ExponentialLR使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在torch.optim.lr_scheduler的用法示例。


在下文中一共展示了lr_scheduler.ExponentialLR方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: load_opt_sched

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import ExponentialLR [as 别名]
def load_opt_sched(train_config, model):

    inf_params = model.inference_parameters()
    gen_params = model.generative_parameters()

    inf_opt = Optimizer(train_config['optimizer'], inf_params,
                        lr=train_config['inference_learning_rate'],
                        clip_grad_norm=train_config['clip_grad_norm'])
    inf_sched = ExponentialLR(inf_opt.opt, 0.999)

    gen_opt = Optimizer(train_config['optimizer'], gen_params,
                        lr=train_config['generation_learning_rate'],
                        clip_grad_norm=train_config['clip_grad_norm'])
    gen_sched = ExponentialLR(gen_opt.opt, 0.999)

    return (inf_opt, gen_opt), (inf_sched, gen_sched) 
开发者ID:joelouismarino,项目名称:amortized-variational-filtering,代码行数:18,代码来源:load_opt_sched.py

示例2: set_params

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import ExponentialLR [as 别名]
def set_params(self, transformer, validation_datagen):
        self.validation_datagen = validation_datagen
        self.model = transformer.model
        self.optimizer = transformer.optimizer
        self.loss_function = transformer.loss_function
        self.lr_scheduler = ExponentialLR(self.optimizer, self.gamma, last_epoch=-1) 
开发者ID:minerva-ml,项目名称:steppy-toolkit,代码行数:8,代码来源:callbacks.py

示例3: configure_lr_scheduler

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import ExponentialLR [as 别名]
def configure_lr_scheduler(self, optimizer, cfg):
        if cfg.SCHEDULER == 'step':
            scheduler = lr_scheduler.StepLR(optimizer, step_size=cfg.STEPS[0], gamma=cfg.GAMMA)
        elif cfg.SCHEDULER == 'multi_step':
            scheduler = lr_scheduler.MultiStepLR(optimizer, milestones=cfg.STEPS, gamma=cfg.GAMMA)
        elif cfg.SCHEDULER == 'exponential':
            scheduler = lr_scheduler.ExponentialLR(optimizer, gamma=cfg.GAMMA)
        elif cfg.SCHEDULER == 'SGDR':
            scheduler = lr_scheduler.CosineAnnealingLR(optimizer, T_max=cfg.MAX_EPOCHS)
        else:
            AssertionError('scheduler can not be recognized.')
        return scheduler 
开发者ID:ShuangXieIrene,项目名称:ssds.pytorch,代码行数:14,代码来源:ssds_train.py

示例4: exponential

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import ExponentialLR [as 别名]
def exponential(optimizer, last_epoch, gamma=0.995, **_):
  return lr_scheduler.ExponentialLR(optimizer, gamma=gamma, last_epoch=last_epoch) 
开发者ID:pudae,项目名称:kaggle-humpback,代码行数:4,代码来源:scheduler_factory.py

示例5: set_params

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import ExponentialLR [as 别名]
def set_params(self, transformer, validation_datagen, *args, **kwargs):
        self.validation_datagen = validation_datagen
        self.model = transformer.model
        self.optimizer = transformer.optimizer
        self.loss_function = transformer.loss_function
        self.lr_scheduler = ExponentialLR(self.optimizer, self.gamma, last_epoch=-1) 
开发者ID:neptune-ai,项目名称:open-solution-salt-identification,代码行数:8,代码来源:callbacks.py

示例6: __init__

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import ExponentialLR [as 别名]
def __init__(self, gamma, step_on_iteration=False):
        super().__init__(
            lambda opt: _scheduler.ExponentialLR(opt,
                                                 gamma),
            step_on_iteration=step_on_iteration
        ) 
开发者ID:lRomul,项目名称:argus,代码行数:8,代码来源:lr_schedulers.py

示例7: __init__

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import ExponentialLR [as 别名]
def __init__(self, net_path=None, **kwargs):
        super(TrackerSiamFC, self).__init__('SiamFC', True)
        self.cfg = self.parse_args(**kwargs)

        # setup GPU device if available
        self.cuda = torch.cuda.is_available()
        self.device = torch.device('cuda:0' if self.cuda else 'cpu')

        # setup model
        self.net = Net(
            backbone=AlexNetV1(),
            head=SiamFC(self.cfg.out_scale))
        ops.init_weights(self.net)
        
        # load checkpoint if provided
        if net_path is not None:
            self.net.load_state_dict(torch.load(
                net_path, map_location=lambda storage, loc: storage))
        self.net = self.net.to(self.device)

        # setup criterion
        self.criterion = BalancedLoss()

        # setup optimizer
        self.optimizer = optim.SGD(
            self.net.parameters(),
            lr=self.cfg.initial_lr,
            weight_decay=self.cfg.weight_decay,
            momentum=self.cfg.momentum)
        
        # setup lr scheduler
        gamma = np.power(
            self.cfg.ultimate_lr / self.cfg.initial_lr,
            1.0 / self.cfg.epoch_num)
        self.lr_scheduler = ExponentialLR(self.optimizer, gamma) 
开发者ID:huanglianghua,项目名称:siamfc-pytorch,代码行数:37,代码来源:siamfc.py

示例8: load_sched

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import ExponentialLR [as 别名]
def load_sched(optimizers, last_epoch):
    inf_opt, gen_opt = optimizers
    inf_sched = ExponentialLR(inf_opt.opt, 0.999, last_epoch=last_epoch)
    gen_sched = ExponentialLR(gen_opt.opt, 0.999, last_epoch=last_epoch)
    return (inf_sched, gen_sched) 
开发者ID:joelouismarino,项目名称:amortized-variational-filtering,代码行数:7,代码来源:load_opt_sched.py

示例9: configure_optimizers

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import ExponentialLR [as 别名]
def configure_optimizers(self):
        optimizer = optim.SGD(
            self.parameters(),
            lr=self.lr,
            momentum=self.momentum,
            weight_decay=self.weight_decay
        )
        scheduler = lr_scheduler.ExponentialLR(optimizer, gamma=0.1)
        return [optimizer], [scheduler] 
开发者ID:PyTorchLightning,项目名称:pytorch-lightning,代码行数:11,代码来源:imagenet.py

示例10: lr_decay

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import ExponentialLR [as 别名]
def lr_decay(cfg, optimizer):
    if cfg.SIAMRPN.TRAIN.LR_POLICY == 'exp':
        scheduler = ExponentialLR(optimizer, gamma=0.8685)
    elif cfg.SIAMRPN.TRAIN.LR_POLICY == 'cos':
        scheduler = CosineAnnealingLR(optimizer, T_max=args.epochs)
    elif cfg.SIAMRPN.TRAIN.LR_POLICY == 'Reduce':
        scheduler = ReduceLROnPlateau(optimizer, patience=5)
    elif cfg.SIAMRPN.TRAIN.LR_POLICY == 'log':
        scheduler = np.logspace(math.log10(cfg.SIAMRPN.TRAIN.LR), math.log10(cfg.SIAMRPN.TRAIN.LR_END), cfg.SIAMRPN.TRAIN.END_EPOCH)
    else:
        raise ValueError('unsupported learing rate scheduler')

    return scheduler 
开发者ID:researchmm,项目名称:SiamDW,代码行数:15,代码来源:train_siamrpn.py

示例11: lr_decay

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import ExponentialLR [as 别名]
def lr_decay(cfg, optimizer):
    if cfg.SIAMFC.TRAIN.LR_POLICY == 'exp':
        scheduler = ExponentialLR(optimizer, gamma=0.8685)
    elif cfg.SIAMFC.TRAIN.LR_POLICY == 'cos':
        scheduler = CosineAnnealingLR(optimizer, T_max=args.epochs)
    elif cfg.SIAMFC.TRAIN.LR_POLICY == 'Reduce':
        scheduler = ReduceLROnPlateau(optimizer, patience=5)
    elif cfg.SIAMFC.TRAIN.LR_POLICY == 'log':
        scheduler = np.logspace(math.log10(cfg.SIAMFC.TRAIN.LR), math.log10(cfg.SIAMFC.TRAIN.LR_END), cfg.SIAMFC.TRAIN.END_EPOCH)
    else:
        raise ValueError('unsupported learing rate scheduler')

    return scheduler 
开发者ID:researchmm,项目名称:SiamDW,代码行数:15,代码来源:train_siamfc.py

示例12: build_lr_scheduler

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import ExponentialLR [as 别名]
def build_lr_scheduler(optimizer: Optimizer, args: Namespace, total_epochs: List[int] = None) -> _LRScheduler:
    """
    Builds a learning rate scheduler.

    :param optimizer: The Optimizer whose learning rate will be scheduled.
    :param args: Arguments.
    :return: An initialized learning rate scheduler.
    """
    # Learning rate scheduler
    if args.scheduler == 'noam':
        return NoamLR(
            optimizer=optimizer,
            warmup_epochs=args.warmup_epochs,
            total_epochs=total_epochs or [args.epochs] * args.num_lrs,
            steps_per_epoch=args.train_data_size // args.batch_size,
            init_lr=args.init_lr,
            max_lr=args.max_lr,
            final_lr=args.final_lr
        )

    if args.scheduler == 'none':
        return MockLR(optimizer=optimizer, lr=args.init_lr)

    if args.scheduler == 'decay':
        return ExponentialLR(optimizer, args.lr_decay_rate)

    raise ValueError(f'Learning rate scheduler "{args.scheduler}" not supported.') 
开发者ID:wengong-jin,项目名称:chemprop,代码行数:29,代码来源:utils.py

示例13: ExponentialLR

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import ExponentialLR [as 别名]
def ExponentialLR(T_max,
                  eta_min=0,
                  last_epoch=-1):
    return partial(_lr_scheduler.ExponentialLR, **locals()) 
开发者ID:moskomule,项目名称:homura,代码行数:6,代码来源:lr_scheduler.py

示例14: __init__

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import ExponentialLR [as 别名]
def __init__(self, *, gamma, last_epoch=-1):
        """Decays the learning rate of each parameter group by gamma every epoch.
        When last_epoch=-1, sets initial lr as lr.

        Args:
            gamma (float): Multiplicative factor of learning rate decay.
            last_epoch (int): The index of last epoch. Default: -1.
        """
        super().__init__(lr_scheduler.ExponentialLR, gamma=gamma, last_epoch=last_epoch) 
开发者ID:yoshida-lab,项目名称:XenonPy,代码行数:11,代码来源:lr_scheduler.py

示例15: _get_scheduler

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import ExponentialLR [as 别名]
def _get_scheduler(self, optimizer, config):
        return lr_scheduler.ExponentialLR(optimizer=optimizer, gamma=config['gamma'], last_epoch=-1) 
开发者ID:automl,项目名称:Auto-PyTorch,代码行数:4,代码来源:lr_schedulers.py


注:本文中的torch.optim.lr_scheduler.ExponentialLR方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。