本文整理匯總了Python中chainer.function_node.FunctionNode方法的典型用法代碼示例。如果您正苦於以下問題:Python function_node.FunctionNode方法的具體用法?Python function_node.FunctionNode怎麽用?Python function_node.FunctionNode使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類chainer.function_node
的用法示例。
在下文中一共展示了function_node.FunctionNode方法的8個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: _skip_variable
# 需要導入模塊: from chainer import function_node [as 別名]
# 或者: from chainer.function_node import FunctionNode [as 別名]
def _skip_variable(nodes, edges):
func_edges = []
for edge_i, edge in enumerate(edges):
head, tail = edge
if isinstance(head, variable.VariableNode):
if head.creator_node is not None:
head = head.creator_node
else:
continue
if isinstance(tail, variable.VariableNode):
for node in nodes:
if isinstance(node, function_node.FunctionNode):
for input_var in node.inputs:
if input_var is tail:
tail = node
break
if isinstance(tail, function_node.FunctionNode):
break
else:
continue
func_edges.append((head, tail))
return nodes, func_edges
示例2: retain_inputs
# 需要導入模塊: from chainer import function_node [as 別名]
# 或者: from chainer.function_node import FunctionNode [as 別名]
def retain_inputs(self, indexes):
"""Lets specified input variable nodes keep data arrays.
By calling this method from :meth:`forward`, the function can specify
which inputs are required for backprop.
If this method is not called, the function keeps all input arrays. If
you want to release all input arrays, call this method by passing an
empty sequence. *Note that this behavior is different from that of*
:meth:`FunctionNode.retain_inputs() \
<chainer.FunctionNode.retain_inputs>`.
Note that **this method must not be called from the outside of**
:meth:`forward`.
Args:
indexes (iterable of int): Indexes of input variables that the
function will require for backprop.
"""
self.node.retain_inputs(indexes)
示例3: __init__
# 需要導入模塊: from chainer import function_node [as 別名]
# 或者: from chainer.function_node import FunctionNode [as 別名]
def __init__(self, node, attribute=None, show_name=True):
assert isinstance(node, (variable.VariableNode,
function_node.FunctionNode))
self.node = node
self.id_ = id(node)
self.attribute = {'label': node.label}
if isinstance(node, variable.VariableNode):
if show_name and node.name is not None:
self.attribute['label'] = '{}: {}'.format(
node.name, self.attribute['label'])
self.attribute.update({'shape': 'oval'})
else:
self.attribute.update({'shape': 'box'})
if attribute is not None:
self.attribute.update(attribute)
示例4: no_backprop_mode
# 需要導入模塊: from chainer import function_node [as 別名]
# 或者: from chainer.function_node import FunctionNode [as 別名]
def no_backprop_mode():
"""Make a context manager which disables back-propagation.
In this context, Chainer does not make a computational graph. It has the
benefit of reducing memory consumption. However, a
:class:`~chainer.Variable` created in this context does not hold a
reference to the :class:`~chainer.FunctionNode` that created itself so no
gradients are accumulated by :func:`~chainer.Variable.backward`.
In the following example, ``y`` is created in this context, which means
that calling :func:`~chainer.Variable.backward` on ``y`` has no effect on
the gradients of ``x``.
>>> x = chainer.Variable(np.array([1,], np.float32))
>>> with chainer.no_backprop_mode():
... y = x + 1
>>> y.backward()
>>> x.grad is None
True
.. note::
``chainer.no_backprop_mode()`` implicitly applies ChainerX's
counterpart :func:`chainerx.no_backprop_mode()`, but not vice versa.
Also, setting ``enable_backprop`` :ref:`configuration <configuration>`
does not affect ChainerX.
.. seealso::
See :func:`chainer.force_backprop_mode` for details on how to override
this context.
"""
c = configuration.using_config('enable_backprop', False)
if chainerx.is_available():
return _BackpropModeContext((c, chainerx.no_backprop_mode()))
return _BackpropModeContext((c,))
示例5: __call__
# 需要導入模塊: from chainer import function_node [as 別名]
# 或者: from chainer.function_node import FunctionNode [as 別名]
def __call__(self, *inputs):
"""Applies forward propagation with chaining backward references.
This method creates a new :class:`~chainer.FunctionAdapter`
object and runs the forward propagation using it.
See :class:`~chainer.FunctionNode` for the detailed
behavior of building the computational graph.
Args:
inputs: Tuple of input :class:`Variable` or :ref:`ndarray` objects.
If the input is :ref:`ndarray`, it is automatically wrapped
with :class:`Variable`.
Returns:
One :class:`Variable` object or a tuple of multiple
:class:`Variable` objects.
"""
node = self.node
# Swap the ownership
node._function = self
node._weak_function = None
self._node = weakref.ref(node)
self._owned_node = None
ret = node.apply(inputs)
if len(ret) == 1:
return ret[0]
else:
return tuple(ret)
示例6: unchain
# 需要導入模塊: from chainer import function_node [as 別名]
# 或者: from chainer.function_node import FunctionNode [as 別名]
def unchain(self):
"""Purges in/out nodes and this function itself from the graph.
See :meth:`FunctionNode.unchain() <chainer.FunctionNode.unchain>`
for the detail.
"""
self.node.unchain()
示例7: add_hook
# 需要導入模塊: from chainer import function_node [as 別名]
# 或者: from chainer.function_node import FunctionNode [as 別名]
def add_hook(self, hook, name=None):
"""Registers a function hook.
See :meth:`FunctionNode.add_hook` for the detail.
Args:
hook(~chainer.FunctionHook):
Function hook to be registered.
name(str): Name of the function hook.
name must be unique among function hooks
registered to the function. If ``None``,
default name of the function hook is used.
"""
self.node.add_hook(hook, name)
示例8: _to_dot
# 需要導入模塊: from chainer import function_node [as 別名]
# 或者: from chainer.function_node import FunctionNode [as 別名]
def _to_dot(self):
"""Converts graph in dot format.
`label` property of is used as short description of each node.
Returns:
str: The graph in dot format.
"""
ret = 'digraph graphname{rankdir=%s;' % self.rankdir
if self.remove_variable:
self.nodes, self.edges = _skip_variable(self.nodes, self.edges)
for node in self.nodes:
assert isinstance(node, (variable.VariableNode,
function_node.FunctionNode))
if isinstance(node, variable.VariableNode):
if not self.remove_variable:
ret += DotNode(
node, self.variable_style, self.show_name).label
else:
ret += DotNode(node, self.function_style, self.show_name).label
drawn_edges = []
for edge in self.edges:
head, tail = edge
if (isinstance(head, variable.VariableNode) and
isinstance(tail, function_node.FunctionNode)):
head_attr = self.variable_style
tail_attr = self.function_style
elif (isinstance(head, function_node.FunctionNode) and
isinstance(tail, variable.VariableNode)):
head_attr = self.function_style
tail_attr = self.variable_style
else:
if not self.remove_variable:
raise TypeError('head and tail should be the set of '
'VariableNode and Function')
else:
head_attr = self.function_style
tail_attr = self.function_style
head_node = DotNode(head, head_attr, self.show_name)
tail_node = DotNode(tail, tail_attr, self.show_name)
edge = (head_node.id_, tail_node.id_)
if edge in drawn_edges:
continue
ret += '%s -> %s;' % edge
drawn_edges.append(edge)
ret += '}'
return ret