本文整理汇总了Python中chainer.Function方法的典型用法代码示例。如果您正苦于以下问题:Python chainer.Function方法的具体用法?Python chainer.Function怎么用?Python chainer.Function使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类chainer
的用法示例。
在下文中一共展示了chainer.Function方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: check_type_mismatch
# 需要导入模块: import chainer [as 别名]
# 或者: from chainer import Function [as 别名]
def check_type_mismatch(self, x_data, retain):
xp = backend.get_array_module(x_data)
class DummyFunction(chainer.Function):
label = 'dummy_function'
def forward(self, inputs):
if not retain:
self.retain_inputs(())
return xp.array(1, np.float32),
def backward(self, inputs, grads):
return [1]
x = chainer.Variable(x_data)
y = DummyFunction()(x)
with six.assertRaisesRegex(self, TypeError, 'dummy_function'):
y.backward()
示例2: check_dtype_mismatch
# 需要导入模块: import chainer [as 别名]
# 或者: from chainer import Function [as 别名]
def check_dtype_mismatch(self, x_data, retain):
xp = backend.get_array_module(x_data)
class DummyFunction(chainer.Function):
label = 'dummy_function'
def forward(self, inputs):
if not retain:
self.retain_inputs(())
return xp.array(1, np.float32),
def backward(self, inputs, grads):
return xp.array([1], np.int32),
x = chainer.Variable(x_data)
y = DummyFunction()(x)
with six.assertRaisesRegex(self, TypeError, 'dummy_function'):
y.backward()
示例3: check_traceback
# 需要导入模块: import chainer [as 别名]
# 或者: from chainer import Function [as 别名]
def check_traceback(self, x_data):
xp = backend.get_array_module(x_data)
class DummyFunction(chainer.Function):
label = 'dummy_function'
def forward(self, inputs):
return xp.array(1, np.float32),
def backward(self, inputs, grads):
return xp.array([1, 2], np.float32),
x = chainer.Variable(x_data)
line = inspect.currentframe().f_lineno + 1
y = DummyFunction()(x) # `line` is THIS line
try:
y.backward()
self.fail()
except ValueError as e:
assert 'Stacktrace' in str(e)
assert 'line %d' % line in str(e)
示例4: check_positive
# 需要导入模块: import chainer [as 别名]
# 或者: from chainer import Function [as 别名]
def check_positive(self, xp, func_name, input, eps, nout):
# Should be non-differentiable
func = getattr(self, '_func_{}'.format(func_name))
grad_outputs = [
xp.random.uniform(-1, 1, input.shape).astype(input.dtype)
for _ in range(nout)]
def f():
return func(input) * nout
try:
gradient_check.numerical_grad(
f, (input,), grad_outputs, eps=eps,
detect_nondifferentiable=True)
except gradient_check.NondifferentiableError:
pass
else:
raise AssertionError(
'Function `{}` is expected to be non-differentiable, '
'but determined to be differentiable.\n\n'
'eps: {}\n'
'input: {}\n'
'xp: {}\n'
''.format(
func_name, eps, input, xp.__name__))
示例5: check_negative
# 需要导入模块: import chainer [as 别名]
# 或者: from chainer import Function [as 别名]
def check_negative(self, xp, func_name, input, eps, nout):
# Should be differentiable
func = getattr(self, '_func_{}'.format(func_name))
grad_outputs = [
xp.random.uniform(-1, 1, input.shape).astype(input.dtype)
for _ in range(nout)]
def f():
return func(input) * nout
try:
gradient_check.numerical_grad(
f, (input,), grad_outputs, eps=eps,
detect_nondifferentiable=True)
except gradient_check.NondifferentiableError as e:
raise AssertionError(
'Function `{}` is expected to be differentiable, '
'but determined to be non-differentiable.\n\n'
'eps: {}\n'
'input: {}\n'
'xp: {}\n\n'
'{}: {}'
.format(
func_name, eps, input, xp.__name__,
e.__class__.__name__, e))
示例6: forward
# 需要导入模块: import chainer [as 别名]
# 或者: from chainer import Function [as 别名]
def forward(self, inputs):
"""Applies forward propagation to input arrays.
It delegates the procedure to :meth:`forward_cpu` or
:meth:`forward_gpu` by default. Which it selects is determined by the
type of input arrays.
Implementations of :class:`Function` must implement either CPU/GPU
methods or this method.
Args:
inputs: Tuple of input array(s).
Returns:
Tuple of output array(s).
.. warning::
Implementations of :class:`Function` must take care that the
return value must be a tuple even if it returns only one array.
"""
if any(isinstance(x, cuda.ndarray) for x in inputs):
return self.forward_gpu(inputs)
else:
return self.forward_cpu(inputs)
示例7: forward_cpu
# 需要导入模块: import chainer [as 别名]
# 或者: from chainer import Function [as 别名]
def forward_cpu(self, inputs):
"""Applies forward propagation to input arrays on CPU.
Args:
inputs: Tuple of :class:`numpy.ndarray` object(s).
Returns:
tuple: Tuple of :class:`numpy.ndarray` object(s).
.. warning::
Implementations of :class:`Function` must take care that the
return value must be a tuple even if it returns only one array.
"""
raise NotImplementedError()
示例8: backward_cpu
# 需要导入模块: import chainer [as 别名]
# 或者: from chainer import Function [as 别名]
def backward_cpu(self, inputs, grad_outputs):
"""Applies backprop to output gradient arrays on CPU.
Args:
inputs: Tuple of input :class:`numpy.ndarray` object(s).
grad_outputs: Tuple of output gradient :class:`numpy.ndarray`
object(s).
Returns:
tuple: Tuple of input gradient :class:`numpy.ndarray` object(s).
Some or all of them can be ``None``, if the function is not
differentiable on corresponding inputs.
.. warning::
Implementations of :class:`Function` must take care that the
return value must be a tuple even if it returns only one array.
"""
return tuple(None for _ in inputs)
示例9: backward_gpu
# 需要导入模块: import chainer [as 别名]
# 或者: from chainer import Function [as 别名]
def backward_gpu(self, inputs, grad_outputs):
"""Applies backprop to output gradient arrays on GPU.
Args:
inputs: Tuple of input :class:`cupy.ndarray`
object(s).
grad_outputs: Tuple of output gradient
:class:`cupy.ndarray` object(s).
Returns:
tuple: Tuple of input gradient :class:`cupy.ndarray`
object(s). Some or all of them can be ``None``, if the function is
not differentiable on corresponding inputs.
.. warning::
Implementations of :class:`Function` must take care that the
return value must be a tuple even if it returns only one array.
"""
return tuple(None for _ in inputs)
示例10: check_layout_forward
# 需要导入模块: import chainer [as 别名]
# 或者: from chainer import Function [as 别名]
def check_layout_forward(self, inputs):
if self.is_elementwise:
if not all([x.layout == inputs[0].layout for x in inputs]):
raise RuntimeError(
'Inputs with mixed memory layouts were given to '
'an elementwise function.\n'
'Function: {}\n'
'Input layouts: {}\n'.format(
self.label,
', '.join(str(x.layout) for x in inputs),
))
else:
if not all([x.layout is None for x in inputs]):
raise RuntimeError(
'Inputs with non-standard layouts were given to '
'a function without explicit `check_layout_forward` '
'implementation.\n'
'Function: {}\n'
'Input layouts: {}\n'.format(
self.label,
', '.join(str(x.layout) for x in inputs),
))
示例11: retain_inputs
# 需要导入模块: import chainer [as 别名]
# 或者: from chainer import Function [as 别名]
def retain_inputs(self, indexes):
"""Lets specified input variable nodes keep data arrays.
By calling this method from :meth:`forward`, the function node can
specify which inputs are required for backprop. The input variables
with retained arrays can then be obtained by calling
:meth:`get_retained_inputs` from inside :meth:`backward`.
Unlike :class:`~chainer.Function`, the function node **DOES NOT** keep
input
arrays by default. If you want to keep some or all input arrays, do not
forget to call this method.
Note that **this method must not be called from the outside of**
:meth:`forward`.
Args:
indexes (iterable of int): Indexes of input variables that the
function will require for backprop.
"""
self._input_indexes_to_retain = indexes
示例12: add_hook
# 需要导入模块: import chainer [as 别名]
# 或者: from chainer import Function [as 别名]
def add_hook(self, hook, name=None):
"""Registers a function hook.
Args:
hook (~chainer.FunctionHook): Function hook to be
registered.
name (str): Name of the function hook. The name must be unique
among function hooks registered to this function. If ``None``,
the default name of the function hook is used.
"""
if not isinstance(hook, function_hook.FunctionHook):
raise TypeError('Hook must be of type FunctionHook')
if name is None:
name = hook.name
hooks = self.local_function_hooks
if name in hooks:
raise KeyError('Hook %s already exists' % name)
hooks[name] = hook
hook.added(self)
示例13: creator
# 需要导入模块: import chainer [as 别名]
# 或者: from chainer import Function [as 别名]
def creator(self):
"""Function implementation that created this variable.
When this variable has been created by an old-style function (i.e., it
is implemented as a subclass of :class:`Function`), this property
returns that :class:`Function` object.
When this variable has been created by a new-style function (i.e., it
is implemented as a subclass of :class:`FunctionNode` class), this
property returns that node object.
"""
if self._has_chainerx_array:
raise RuntimeError(
'A variable of ChainerX does not provide a creator.')
return self._node.creator
示例14: _find_old_style_function
# 需要导入模块: import chainer [as 别名]
# 或者: from chainer import Function [as 别名]
def _find_old_style_function(outputs):
"""Find old-style functions in the computational graph."""
found = []
for v in outputs:
assert isinstance(v, (chainer.Variable, chainer.variable.VariableNode))
if v.creator is None:
continue
if isinstance(v.creator, chainer.Function):
found.append(v.creator)
else:
assert isinstance(v.creator, chainer.FunctionNode)
found.extend(_find_old_style_function(v.creator.inputs))
return found
示例15: setUp
# 需要导入模块: import chainer [as 别名]
# 或者: from chainer import Function [as 别名]
def setUp(self):
y_shape = self.y_shape
x_shape = self.x_shape
y1 = make_array(1, y_shape, numpy.float32)
y2 = make_array(2, y_shape, numpy.float32)
gx1 = make_array(1, x_shape, numpy.float32)
gx2 = None
gy1 = make_array(1, y_shape, numpy.float32)
gy2 = make_array(1, y_shape, numpy.float32)
f = chainer.Function()
f.check_type_forward = mock.MagicMock()
f.forward_cpu = mock.MagicMock(return_value=(y1, y2))
f.forward_gpu = mock.MagicMock()
f.backward_cpu = mock.MagicMock(return_value=(gx1, gx2))
f.backward_gpu = mock.MagicMock()
self.f = f
self.x1 = make_array(0, x_shape, numpy.float32)
self.x2 = make_array(0, x_shape, numpy.int32)
self.y1 = y1
self.y2 = y2
self.gx1 = gx1
self.gx2 = gx2
self.gy1 = gy1
self.gy2 = gy2