当前位置: 首页>>代码示例>>Python>>正文


Python lr_scheduler.CyclicLR方法代码示例

本文整理汇总了Python中torch.optim.lr_scheduler.CyclicLR方法的典型用法代码示例。如果您正苦于以下问题:Python lr_scheduler.CyclicLR方法的具体用法?Python lr_scheduler.CyclicLR怎么用?Python lr_scheduler.CyclicLR使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在torch.optim.lr_scheduler的用法示例。


在下文中一共展示了lr_scheduler.CyclicLR方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_lr_scheduler_record_batch_step

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import CyclicLR [as 别名]
def test_lr_scheduler_record_batch_step(self, classifier_module, classifier_data):
        X, y = classifier_data
        batch_size = 128

        scheduler = LRScheduler(
            TorchCyclicLR,
            base_lr=1,
            max_lr=5,
            step_size_up=4,
            step_every='batch'
        )
        net = NeuralNetClassifier(
            classifier_module,
            max_epochs=1,
            lr=123.,
            batch_size=batch_size,
            callbacks=[('scheduler', scheduler)]
        )
        net.fit(X, y)
        new_lrs = scheduler.simulate(
            net.history[-1, 'train_batch_count'],
            initial_lr=123.,
        )
        assert np.all(net.history[-1, 'batches', :, 'event_lr'] == new_lrs) 
开发者ID:skorch-dev,项目名称:skorch,代码行数:26,代码来源:test_lr_scheduler.py

示例2: test_cyclic_lr_with_epoch_step_warning

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import CyclicLR [as 别名]
def test_cyclic_lr_with_epoch_step_warning(self,
                                               classifier_module,
                                               classifier_data):
        msg = ("The LRScheduler now makes a step every epoch by default. "
               "To have the cyclic lr scheduler update "
               "every batch set step_every='batch'")
        with pytest.warns(FutureWarning, match=msg) as record:
            scheduler = LRScheduler(
                TorchCyclicLR, base_lr=123, max_lr=999)
            net = NeuralNetClassifier(
                classifier_module,
                max_epochs=0,
                callbacks=[('scheduler', scheduler)],
            )
            net.initialize()
        assert len(record) == 1 
开发者ID:skorch-dev,项目名称:skorch,代码行数:18,代码来源:test_lr_scheduler.py

示例3: find_bounds_clr

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import CyclicLR [as 别名]
def find_bounds_clr(model, loader, optimizer, criterion, device, dtype, min_lr=8e-6, max_lr=8e-5, step_size=2000,
                    mode='triangular', save_path='.'):
    model.train()
    correct1, correct5 = 0, 0
    scheduler = CyclicLR(optimizer, base_lr=min_lr, max_lr=max_lr, step_size_up=step_size, mode=mode)
    epoch_count = step_size // len(loader)  # Assuming step_size is multiple of batch per epoch
    accuracy = []
    for _ in trange(epoch_count):
        for batch_idx, (data, target) in enumerate(tqdm(loader)):
            if scheduler is not None:
                scheduler.step()
            data, target = data.to(device=device, dtype=dtype), target.to(device=device)

            optimizer.zero_grad()
            output = model(data)

            loss = criterion(output, target)
            loss.backward()
            optimizer.step()

            corr = correct(output, target)
            accuracy.append(corr[0] / data.shape[0])

    lrs = np.linspace(min_lr, max_lr, step_size)
    plt.plot(lrs, accuracy)
    plt.show()
    plt.savefig(os.path.join(save_path, 'find_bounds_clr.pdf'))
    np.save(os.path.join(save_path, 'acc.npy'), accuracy)
    return 
开发者ID:Randl,项目名称:MobileNetV3-pytorch,代码行数:31,代码来源:run.py

示例4: batch_step

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import CyclicLR [as 别名]
def batch_step(self, closure=None):
        if isinstance(self.scheduler, CyclicLR) or isinstance(self.scheduler, CosineLR):
            self.scheduler_step()
        self.optimizer_step(closure) 
开发者ID:Randl,项目名称:MobileNetV3-pytorch,代码行数:6,代码来源:optimizer_wrapper.py

示例5: epoch_step

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import CyclicLR [as 别名]
def epoch_step(self):
        if not isinstance(self.scheduler, CyclicLR) and not isinstance(self.scheduler, CosineLR):
            self.scheduler_step() 
开发者ID:Randl,项目名称:MobileNetV3-pytorch,代码行数:5,代码来源:optimizer_wrapper.py

示例6: __init__

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import CyclicLR [as 别名]
def __init__(self,
                 base_lr,
                 max_lr,
                 step_size_up=2000,
                 step_size_down=None,
                 mode='triangular',
                 gamma=1.,
                 scale_fn=None,
                 scale_mode='cycle',
                 cycle_momentum=True,
                 base_momentum=0.8,
                 max_momentum=0.9,
                 step_on_iteration=True):
        super().__init__(
            lambda opt: _scheduler.CyclicLR(opt,
                                            base_lr,
                                            max_lr,
                                            step_size_up=step_size_up,
                                            step_size_down=step_size_down,
                                            mode=mode,
                                            gamma=gamma,
                                            scale_fn=scale_fn,
                                            scale_mode=scale_mode,
                                            cycle_momentum=cycle_momentum,
                                            base_momentum=base_momentum,
                                            max_momentum=max_momentum),
            step_on_iteration=step_on_iteration
        ) 
开发者ID:lRomul,项目名称:argus,代码行数:30,代码来源:lr_schedulers.py

示例7: test_lr_scheduler_set_params

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import CyclicLR [as 别名]
def test_lr_scheduler_set_params(self, classifier_module, classifier_data):
        scheduler = LRScheduler(
            TorchCyclicLR, base_lr=123, max_lr=999, step_every='batch')
        net = NeuralNetClassifier(
            classifier_module,
            max_epochs=0,
            callbacks=[('scheduler', scheduler)],
        )
        net.set_params(callbacks__scheduler__base_lr=456)
        net.fit(*classifier_data)  # we need to trigger on_train_begin
        assert net.callbacks[0][1].lr_scheduler_.base_lrs[0] == 456 
开发者ID:skorch-dev,项目名称:skorch,代码行数:13,代码来源:test_lr_scheduler.py

示例8: initialize

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import CyclicLR [as 别名]
def initialize(self):
        self.policy_ = self._get_policy_cls()
        self.lr_scheduler_ = None
        self.batch_idx_ = 0
        # TODO: Remove this warning on 0.10 release
        if (self.policy_ == TorchCyclicLR or self.policy_ == "TorchCyclicLR"
                and self.step_every == 'epoch'):
            warnings.warn(
                "The LRScheduler now makes a step every epoch by default. "
                "To have the cyclic lr scheduler update "
                "every batch set step_every='batch'",
                FutureWarning
            )

        return self 
开发者ID:skorch-dev,项目名称:skorch,代码行数:17,代码来源:lr_scheduler.py

示例9: __init__

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import CyclicLR [as 别名]
def __init__(self, optimizer, lr, warmup_steps, momentum, decay):
    # cyclic params
    self.optimizer = optimizer
    self.lr = lr
    self.warmup_steps = warmup_steps
    self.momentum = momentum
    self.decay = decay

    # cap to one
    if self.warmup_steps < 1:
      self.warmup_steps = 1

    # cyclic lr
    self.initial_scheduler = toptim.CyclicLR(self.optimizer,
                                             base_lr=0,
                                             max_lr=self.lr,
                                             step_size_up=self.warmup_steps,
                                             step_size_down=self.warmup_steps,
                                             cycle_momentum=False,
                                             base_momentum=self.momentum,
                                             max_momentum=self.momentum)

    # our params
    self.last_epoch = -1  # fix for pytorch 1.1 and below
    self.finished = False  # am i done
    super().__init__(optimizer) 
开发者ID:PRBonn,项目名称:lidar-bonnetal,代码行数:28,代码来源:warmupLR.py

示例10: __init__

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import CyclicLR [as 别名]
def __init__(self, optimizer, base_lr, max_lr, step_size_up, step_size_down, cycle_momentum, base_momentum, max_momentum, post_decay):
    # cyclic params
    self.optimizer = optimizer
    self.initial_lr = base_lr
    self.max_lr = max_lr
    self.step_size_up = step_size_up
    self.step_size_down = step_size_down
    self.cycle_momentum = cycle_momentum
    self.base_momentum = base_momentum
    self.max_momentum = max_momentum
    self.post_decay = post_decay

    # cap to one
    if self.step_size_up < 1:
      self.step_size_up = 1
    if self.step_size_down < 1:
      self.step_size_down = 1

    # cyclic lr
    self.initial_scheduler = toptim.CyclicLR(self.optimizer,
                                             base_lr=self.initial_lr,
                                             max_lr=self.max_lr,
                                             step_size_up=self.step_size_up,
                                             step_size_down=self.step_size_down,
                                             cycle_momentum=self.cycle_momentum,
                                             base_momentum=self.base_momentum,
                                             max_momentum=self.max_momentum)

    # our params
    self.last_epoch = -1  # fix for pytorch 1.1 and below
    self.oneshot_n = self.step_size_up + self.step_size_down   # steps to warm up for
    self.finished = False  # am i done
    super().__init__(optimizer) 
开发者ID:PRBonn,项目名称:bonnetal,代码行数:35,代码来源:oneshot.py

示例11: __init__

# 需要导入模块: from torch.optim import lr_scheduler [as 别名]
# 或者: from torch.optim.lr_scheduler import CyclicLR [as 别名]
def __init__(self, optimizer, lr, warmup_steps, momentum, decay):
        # cyclic params
        self.optimizer = optimizer
        self.lr = lr
        self.warmup_steps = warmup_steps
        self.momentum = momentum
        self.decay = decay

        # cap to one
        if self.warmup_steps < 1:
            self.warmup_steps = 1

        # cyclic lr
        self.initial_scheduler = toptim.CyclicLR(self.optimizer,
                                                 base_lr=0,
                                                 max_lr=self.lr,
                                                 step_size_up=self.warmup_steps,
                                                 step_size_down=self.warmup_steps,
                                                 cycle_momentum=False,
                                                 base_momentum=self.momentum,
                                                 max_momentum=self.momentum)

        # our params
        self.last_epoch = -1  # fix for pytorch 1.1 and below
        self.finished = False  # am i done
        super().__init__(optimizer) 
开发者ID:TiagoCortinhal,项目名称:SalsaNext,代码行数:28,代码来源:warmupLR.py


注:本文中的torch.optim.lr_scheduler.CyclicLR方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。