本文整理汇总了Python中chainer.backend.get_array_module方法的典型用法代码示例。如果您正苦于以下问题:Python backend.get_array_module方法的具体用法?Python backend.get_array_module怎么用?Python backend.get_array_module使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类chainer.backend
的用法示例。
在下文中一共展示了backend.get_array_module方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: backward
# 需要导入模块: from chainer import backend [as 别名]
# 或者: from chainer.backend import get_array_module [as 别名]
def backward(self, inputs, grad_outputs):
xp = backend.get_array_module(*inputs)
grad_dtype = grad_outputs[0].dtype
# convert to float32 for communication
if numpy.float16 == grad_dtype:
grad_outputs = tuple([item.astype(numpy.float32)
for item in grad_outputs])
gxs = self.comm.alltoall(grad_outputs)
gx = xp.stack(gxs).sum(axis=0)
# convert back
if numpy.float16 == grad_dtype:
gx = gx.astype(grad_dtype)
return gx,
示例2: forward
# 需要导入模块: from chainer import backend [as 别名]
# 或者: from chainer.backend import get_array_module [as 别名]
def forward(self, inputs):
xp = backend.get_array_module(*inputs)
x, = inputs
# convert to float32 for communication
x_dtype = x.dtype
if numpy.float16 == x_dtype:
x = x.astype(numpy.float32)
ys = self.comm.gather(x, self.root)
if self.comm.rank == self.root:
# convert back
if numpy.float16 == x_dtype:
ys = tuple([item.astype(x_dtype) for item in ys])
return ys
else:
# Return an empty variable, which serves as "delegate_variable."
return xp.array([], dtype=x_dtype),
示例3: test_update
# 需要导入模块: from chainer import backend [as 别名]
# 或者: from chainer.backend import get_array_module [as 别名]
def test_update(self, backend_config):
if backend_config.xp is chainerx:
# ChainerX performs the loss scaling on its own backward
# method, the optimizer should not divide back the parameters
# This test is not actually creating a ChainerX
# computation graph so no actual loss scale is being done
self.optimizer.lr = 1.0
target = self.target
optimizer = self.optimizer
target.to_device(backend_config.device)
optimizer.setup(target)
optimizer.update()
xp = backend.get_array_module(target[0].param)
expected_data = xp.zeros(self.shape, dtype=self.dtype)
rtol, atol = 1e-4, 1e-5
if self.dtype is np.float16:
rtol, atol = 1e-1, 1e-2
for i in range(2):
testing.assert_allclose(
target[i].param.data, expected_data,
rtol=rtol, atol=atol)
示例4: check_type_mismatch
# 需要导入模块: from chainer import backend [as 别名]
# 或者: from chainer.backend import get_array_module [as 别名]
def check_type_mismatch(self, x_data, retain):
xp = backend.get_array_module(x_data)
class DummyFunction(chainer.Function):
label = 'dummy_function'
def forward(self, inputs):
if not retain:
self.retain_inputs(())
return xp.array(1, np.float32),
def backward(self, inputs, grads):
return [1]
x = chainer.Variable(x_data)
y = DummyFunction()(x)
with six.assertRaisesRegex(self, TypeError, 'dummy_function'):
y.backward()
示例5: check_dtype_mismatch
# 需要导入模块: from chainer import backend [as 别名]
# 或者: from chainer.backend import get_array_module [as 别名]
def check_dtype_mismatch(self, x_data, retain):
xp = backend.get_array_module(x_data)
class DummyFunction(chainer.Function):
label = 'dummy_function'
def forward(self, inputs):
if not retain:
self.retain_inputs(())
return xp.array(1, np.float32),
def backward(self, inputs, grads):
return xp.array([1], np.int32),
x = chainer.Variable(x_data)
y = DummyFunction()(x)
with six.assertRaisesRegex(self, TypeError, 'dummy_function'):
y.backward()
示例6: check_shape_mismatch
# 需要导入模块: from chainer import backend [as 别名]
# 或者: from chainer.backend import get_array_module [as 别名]
def check_shape_mismatch(self, x_data, retain):
xp = backend.get_array_module(x_data)
class DummyFunction(chainer.Function):
label = 'dummy_function'
def forward(self, inputs):
if not retain:
self.retain_inputs(())
return xp.array(1, np.float32),
def backward(self, inputs, grads):
return xp.array([1, 2], np.float32),
x = chainer.Variable(x_data)
y = DummyFunction()(x)
with six.assertRaisesRegex(self, ValueError, 'dummy_function'):
y.backward()
示例7: check_traceback
# 需要导入模块: from chainer import backend [as 别名]
# 或者: from chainer.backend import get_array_module [as 别名]
def check_traceback(self, x_data):
xp = backend.get_array_module(x_data)
class DummyFunction(chainer.Function):
label = 'dummy_function'
def forward(self, inputs):
return xp.array(1, np.float32),
def backward(self, inputs, grads):
return xp.array([1, 2], np.float32),
x = chainer.Variable(x_data)
line = inspect.currentframe().f_lineno + 1
y = DummyFunction()(x) # `line` is THIS line
try:
y.backward()
self.fail()
except ValueError as e:
assert 'Stacktrace' in str(e)
assert 'line %d' % line in str(e)
示例8: test_forward_consistency
# 需要导入模块: from chainer import backend [as 别名]
# 或者: from chainer.backend import get_array_module [as 别名]
def test_forward_consistency(self):
x_data = self.x
xp = backend.get_array_module(x_data)
if not self.c_contiguous:
x_data = xp.asfortranarray(x_data)
self.assertFalse(x_data.flags.c_contiguous)
x_cpu = chainer.Variable(x_data)
y_cpu = shift.shift(
x_cpu, ksize=self.ksize, dilate=self.dilate)
x_gpu = chainer.Variable(cuda.to_gpu(x_data))
y_gpu = shift.shift(
x_gpu, ksize=self.ksize, dilate=self.dilate)
testing.assert_allclose(
y_cpu.data, y_gpu.data.get(), atol=5e-4, rtol=5e-3)
示例9: forward_expected
# 需要导入模块: from chainer import backend [as 别名]
# 或者: from chainer.backend import get_array_module [as 别名]
def forward_expected(self, inputs):
if self.test_partial:
e1, e2, W = inputs
V1 = None
V2 = None
b = None
else:
e1, e2, W, V1, V2, b = inputs
e1 = e1.reshape(e1.shape[0], -1)
e2 = e2.reshape(e2.shape[0], -1)
xp = backend.get_array_module(e1)
y_expect = xp.einsum('ij,ik,jkl->il', e1, e2, W)
flags = V1 is None, V2 is None, b is None
if any(flags):
if not all(flags):
raise ValueError(
'Test either all or none of the optional parameters.')
else:
y_expect += e1.dot(V1)
y_expect += e2.dot(V2)
y_expect += b
return y_expect,
示例10: check_backward
# 需要导入模块: from chainer import backend [as 别名]
# 或者: from chainer.backend import get_array_module [as 别名]
def check_backward(self, x_data, W_data, b_data, y_grad):
args = x_data, W_data
if b_data is not None:
args += b_data,
if self.use_batchwise_mask:
mask_shape = (x_data.shape[0],) + W_data.shape
else:
mask_shape = W_data.shape
xp = backend.get_array_module(x_data)
mask = xp.random.rand(*mask_shape) >= self.ratio
def f(x, W, b=None):
return functions.simplified_dropconnect(
x, W, b, self.ratio, self.train, mask,
self.use_batchwise_mask)
gradient_check.check_backward(
f, args, y_grad, eps=1e-2, **self.check_backward_options)
示例11: check_double_backward
# 需要导入模块: from chainer import backend [as 别名]
# 或者: from chainer.backend import get_array_module [as 别名]
def check_double_backward(
self, h_data, x_data, y_grad, h_grad_grad, x_grad_grad):
xp = backend.get_array_module(h_data)
flag_x = xp.random.rand(*x_data.shape)
def f(h, x):
# As forward computation is executed multiple times in
# check_double_backward, use a fixed flag.
xp_str = 'numpy' if xp is numpy else 'cupy'
with mock.patch(
'{}.random.rand'.format(xp_str),
return_value=flag_x) as mock_rand:
y = functions.zoneout(h, x, self.ratio)
mock_rand.assert_called_once_with(*x.shape)
return y
gradient_check.check_double_backward(
f, (h_data, x_data), y_grad, (h_grad_grad, x_grad_grad),
dtype=numpy.float64)
示例12: __call__
# 需要导入模块: from chainer import backend [as 别名]
# 或者: from chainer.backend import get_array_module [as 别名]
def __call__(self, x):
if self.resize_identity:
identity = self.identity_conv(x)
else:
identity = x
x = self.body(x)
if config.train:
xp = backend.get_array_module(x)
b = xp.random.binomial(n=1, p=self.life_prob)
x = float(b) / self.life_prob * x
x = x + identity
x = self.activ(x)
return x
示例13: forward
# 需要导入模块: from chainer import backend [as 别名]
# 或者: from chainer.backend import get_array_module [as 别名]
def forward(self, inputs):
x1, x2 = inputs
if chainer.config.train:
xp = backend.get_array_module(x1)
alpha = xp.empty((x1.shape[0], 1, 1, 1), dtype=x1.dtype)
for i in range(len(alpha)):
alpha[i] = xp.random.rand()
return alpha * x1 + (1 - alpha) * x2,
else:
return 0.5 * (x1 + x2),
示例14: backward
# 需要导入模块: from chainer import backend [as 别名]
# 或者: from chainer.backend import get_array_module [as 别名]
def backward(self, inputs, grad_outputs):
dy, = grad_outputs
xp = backend.get_array_module(dy)
beta = xp.empty((dy.shape[0], 1, 1, 1), dtype=dy.dtype)
for i in range(len(beta)):
beta[i] = xp.random.rand()
return beta * dy, (xp.ones(dy.shape, dtype=dy.dtype) - beta) * dy
示例15: forward
# 需要导入模块: from chainer import backend [as 别名]
# 或者: from chainer.backend import get_array_module [as 别名]
def forward(self, inputs):
x, = inputs
xp = backend.get_array_module(x)
return math.sqrt(2.0 / (x.shape[1] * x.shape[2] * x.shape[3])) * xp.sign(x),