本文整理匯總了Python中mxnet.lr_scheduler.LRScheduler方法的典型用法代碼示例。如果您正苦於以下問題:Python lr_scheduler.LRScheduler方法的具體用法?Python lr_scheduler.LRScheduler怎麽用?Python lr_scheduler.LRScheduler使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類mxnet.lr_scheduler
的用法示例。
在下文中一共展示了lr_scheduler.LRScheduler方法的4個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: __init__
# 需要導入模塊: from mxnet import lr_scheduler [as 別名]
# 或者: from mxnet.lr_scheduler import LRScheduler [as 別名]
def __init__(self, mode, baselr, niters, nepochs,
step=(30, 60, 90), step_factor=0.1, targetlr=0, power=0.9,
warmup_epochs=0, warmup_lr=0, warmup_mode='linear'):
super(LRScheduler, self).__init__()
assert(mode in ['step', 'poly', 'cosine'])
assert(warmup_mode in ['linear', 'constant'])
self.mode = mode
self.baselr = baselr
self.learning_rate = self.baselr
self.niters = niters
self.step = step
self.step_factor = step_factor
self.targetlr = targetlr
self.power = power
self.warmup_epochs = warmup_epochs
self.warmup_lr = warmup_lr
self.warmup_mode = warmup_mode
self.N = nepochs * niters
self.warmup_N = warmup_epochs * niters
示例2: add
# 需要導入模塊: from mxnet import lr_scheduler [as 別名]
# 或者: from mxnet.lr_scheduler import LRScheduler [as 別名]
def add(self, scheduler):
assert(isinstance(scheduler, LRScheduler))
scheduler.offset = self.count
self.count += scheduler.niters
self.update_sep.append(self.count)
self.schedulers.append(scheduler)
示例3: __init__
# 需要導入模塊: from mxnet import lr_scheduler [as 別名]
# 或者: from mxnet.lr_scheduler import LRScheduler [as 別名]
def __init__(self, mode, base_lr=0.1, target_lr=0,
niters=0, nepochs=0, iters_per_epoch=0, offset=0,
power=2, step_iter=None, step_epoch=None, step_factor=0.1,
baselr=None, targetlr=None):
super(LRScheduler, self).__init__()
assert(mode in ['constant', 'step', 'linear', 'poly', 'cosine'])
self.mode = mode
if mode == 'step':
assert(step_iter is not None or step_epoch is not None)
if baselr is not None:
warnings.warn("baselr is deprecated. Please use base_lr.")
if base_lr == 0.1:
base_lr = baselr
self.base_lr = base_lr
if targetlr is not None:
warnings.warn("targetlr is deprecated. Please use target_lr.")
if target_lr == 0:
target_lr = targetlr
self.target_lr = target_lr
if self.mode == 'constant':
self.target_lr = self.base_lr
self.niters = niters
self.step = step_iter
epoch_iters = nepochs * iters_per_epoch
if epoch_iters > 0:
self.niters = epoch_iters
if step_epoch is not None:
self.step = [s*iters_per_epoch for s in step_epoch]
self.offset = offset
self.power = power
self.step_factor = step_factor
示例4: __init__
# 需要導入模塊: from mxnet import lr_scheduler [as 別名]
# 或者: from mxnet.lr_scheduler import LRScheduler [as 別名]
def __init__(self,
mode,
base_lr,
n_iters,
n_epochs,
step=(30, 60, 90),
step_factor=0.1,
target_lr=0,
power=0.9,
warmup_epochs=0,
warmup_lr=0,
warmup_mode="linear"):
super(LRScheduler, self).__init__(base_lr=base_lr)
assert(mode in ["step", "poly", "cosine"])
assert(warmup_mode in ["constant", "linear", "poly", "cosine"])
self.mode = mode
self.learning_rate = self.base_lr
self.n_iters = n_iters
self.step = step
self.step_factor = step_factor
self.target_lr = target_lr
self.power = power
self.warmup_epochs = warmup_epochs
self.warmup_lr = warmup_lr
self.warmup_mode = warmup_mode
self.N = n_epochs * n_iters
self.warmup_N = warmup_epochs * n_iters