当前位置: 首页>>代码示例>>Python>>正文


Python optimizer.GradientMethod方法代码示例

本文整理汇总了Python中chainer.optimizer.GradientMethod方法的典型用法代码示例。如果您正苦于以下问题:Python optimizer.GradientMethod方法的具体用法?Python optimizer.GradientMethod怎么用?Python optimizer.GradientMethod使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在chainer.optimizer的用法示例。


在下文中一共展示了optimizer.GradientMethod方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_cleargrad

# 需要导入模块: from chainer import optimizer [as 别名]
# 或者: from chainer.optimizer import GradientMethod [as 别名]
def test_cleargrad(self, backend_config):

        class CleargradHook(object):

            name = 'Cleargrad'
            timing = 'pre'

            def __init__(self, _):
                pass

            def __call__(self, opt):
                for param in opt.target.params():
                    # Clear all grads
                    param.cleargrad()

        target = self.target
        target.to_device(backend_config.device)
        # TODO(niboshi): Do not use SGD in GradientMethod test
        opt = optimizers.SGD(lr=1)
        opt.setup(target)
        opt.add_hook(CleargradHook(self))
        opt.add_hook(DummyHook(self))

        opt.update() 
开发者ID:chainer,项目名称:chainer,代码行数:26,代码来源:test_optimizer.py

示例2: setUp

# 需要导入模块: from chainer import optimizer [as 别名]
# 或者: from chainer.optimizer import GradientMethod [as 别名]
def setUp(self):
        self.optimizer = chainer.GradientMethod()
        self.target = chainer.ChainList(
            SimpleLink(np.arange(3).astype(np.float32),
                       np.arange(3).astype(np.float32)),
            SimpleLink(np.arange(3).astype(np.float32),
                       np.arange(3).astype(np.float32)))
        self.optimizer.create_update_rule = mock.MagicMock 
开发者ID:chainer,项目名称:chainer,代码行数:10,代码来源:test_optimizer.py

示例3: test_upate

# 需要导入模块: from chainer import optimizer [as 别名]
# 或者: from chainer.optimizer import GradientMethod [as 别名]
def test_upate(self):
        initial_p = np.array([1., 2., 3.], np.float32)
        x = chainerx.array([2., 4., 6.], np.float32)

        expected_p = 4. * initial_p - 6. * backend.CpuDevice().send(x)

        class ChainerxUpdateRule(optimizer.UpdateRule):
            call_count = 0

            def update_core_chainerx(self, param):
                # p <= 3 * p - 2 * (dy/dp)
                array = param.array
                t1 = param.array.as_grad_stopped() * 3.
                t2 = param.grad.as_grad_stopped() * 2.
                delta = t1 - t2
                array += delta
                self.call_count += 1

        class ChainerxOptimizer(optimizer.GradientMethod):
            def create_update_rule(self):
                return ChainerxUpdateRule(self.hyperparam)

        class Link(chainer.Link):
            def __init__(self):
                super(Link, self).__init__()
                with self.init_scope():
                    self.p = chainer.Parameter(initial_p)

            def forward(self, x):
                return 3. * x * self.p

        link = Link()
        link.to_device('native:0')
        y = link(x)
        y.backward()
        optimizer_ = ChainerxOptimizer()
        optimizer_.setup(link)
        optimizer_.update()

        assert link.p.update_rule.call_count == 1
        np.testing.assert_array_equal(
            backend.CpuDevice().send(link.p.array), expected_p) 
开发者ID:chainer,项目名称:chainer,代码行数:44,代码来源:test_optimizer.py

示例4: create

# 需要导入模块: from chainer import optimizer [as 别名]
# 或者: from chainer.optimizer import GradientMethod [as 别名]
def create(self, device):

        class MyLink(chainer.Link):
            def __init__(self):
                super(MyLink, self).__init__()
                with self.init_scope():
                    self.p1 = chainer.Parameter()  # uninitialized

                    self.p2 = chainer.Parameter(  # initialized, with grad
                        np.array([3, 2], np.float32))
                    self.p2.grad = np.array([13, 12], np.float32)

                    self.p3 = chainer.Parameter(  # initialized, without grad
                        np.array([5, 7], np.float32))

        call_record = []
        override_pattern = self.override_pattern

        class MyUpdateRule(optimizer.UpdateRule):
            if override_pattern == 'generic':
                def update_core(self, param):
                    call_record.append(('update_core', param))

            elif override_pattern == 'cpu_gpu':
                def update_core_cpu(self, param):
                    call_record.append(('update_core_cpu', param))

                def update_core_gpu(self, param):
                    call_record.append(('update_core_gpu', param))

            elif override_pattern == 'cpu_gpu_chx':
                def update_core_cpu(self, param):
                    call_record.append(('update_core_cpu', param))

                def update_core_gpu(self, param):
                    call_record.append(('update_core_gpu', param))

                def update_core_chainerx(self, param):
                    call_record.append(('update_core_chainerx', param))

            else:
                assert False, override_pattern

        class MyOptimizer(optimizer.GradientMethod):
            def create_update_rule(self):
                return MyUpdateRule()

        optimizer_ = MyOptimizer()
        target = MyLink()
        target.to_device(device)
        optimizer_.setup(target)

        return optimizer_, call_record 
开发者ID:chainer,项目名称:chainer,代码行数:55,代码来源:test_optimizer.py


注:本文中的chainer.optimizer.GradientMethod方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。