本文整理汇总了Python中chainer.variable.Parameter方法的典型用法代码示例。如果您正苦于以下问题:Python variable.Parameter方法的具体用法?Python variable.Parameter怎么用?Python variable.Parameter使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类chainer.variable
的用法示例。
在下文中一共展示了variable.Parameter方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from chainer import variable [as 别名]
# 或者: from chainer.variable import Parameter [as 别名]
def __init__(self, axis=1, W_shape=None, bias_term=False, bias_shape=None, initialW=None, initial_bias=None):
super(Scale, self).__init__()
self.axis = axis
with self.init_scope():
# Add W parameter and/or bias term.
if W_shape is not None:
if initialW is None:
initialW = 1
W_initializer = initializers._get_initializer(initialW)
self.W = variable.Parameter(W_initializer, W_shape)
if bias_term:
self.bias = Bias(axis, W_shape, initial_bias)
else:
if bias_term:
if bias_shape is None:
raise ValueError(
'bias_shape should be given if W is not '
'learnt parameter and bias_term is True.')
self.bias = Bias(axis, W_shape, initial_bias)
示例2: test_serialize
# 需要导入模块: from chainer import variable [as 别名]
# 或者: from chainer.variable import Parameter [as 别名]
def test_serialize(self):
l1 = links.Linear(None, 1)
l2 = links.Linear(None, 3)
with l2.init_scope():
l2.x = variable.Parameter(0, 2)
s1 = chainer.Sequential(l1, l2)
mocks = {'0': mock.MagicMock(), '1': mock.MagicMock()}
serializer = mock.MagicMock()
serializer.__getitem__.side_effect = lambda k: mocks[k]
serializer.return_value = None
mocks['0'].return_value = None
mocks['1'].return_value = None
s1.serialize(serializer)
self.assertEqual(serializer.call_count, 0)
self.assertEqual(serializer.__getitem__.call_count, 2)
serializer.__getitem__.assert_any_call('0')
serializer.__getitem__.assert_any_call('1')
mocks['0'].assert_any_call('W', None)
mocks['0'].assert_any_call('b', l1.b.data)
mocks['1'].assert_any_call('W', None)
mocks['1'].assert_any_call('b', l2.b.data)
mocks['1'].assert_any_call('x', l2.x.data)
示例3: params
# 需要导入模块: from chainer import variable [as 别名]
# 或者: from chainer.variable import Parameter [as 别名]
def params(
self,
include_uninit: bool = True
) -> tp.Iterator['chainer.Parameter']:
"""Returns a generator of all parameters under the link hierarchy.
Args:
include_uninit (bool): If ``True``, it also generates uninitialized
parameters.
Returns:
A generator object that generates all parameters.
"""
d = self.__dict__ # type: tp.Dict[str, chainer.Parameter]
for name in sorted(self._params):
if include_uninit or d[name].data is not None:
yield d[name]
示例4: namedparams
# 需要导入模块: from chainer import variable [as 别名]
# 或者: from chainer.variable import Parameter [as 别名]
def namedparams(
self,
include_uninit: bool = True
) -> tp.Iterator[tp.Tuple[str, 'chainer.Parameter']]:
"""Returns a generator of all (path, param) pairs under the hierarchy.
Args:
include_uninit (bool): If ``True``, it also generates uninitialized
parameters.
Returns:
A generator object that generates all (path, parameter) pairs. The
paths are relative from this link.
"""
d = self.__dict__ # type: tp.Dict[str, chainer.Parameter]
for name in sorted(self._params):
if include_uninit or d[name].is_initialized:
yield '/' + name, d[name]
示例5: serialize
# 需要导入模块: from chainer import variable [as 别名]
# 或者: from chainer.variable import Parameter [as 别名]
def serialize(self, serializer: 'chainer.AbstractSerializer') -> None:
"""Serializes the link object.
Args:
serializer (~chainer.AbstractSerializer): Serializer object.
"""
d = self.__dict__ # type: tp.Dict[str, chainer.Parameter]
for name in self._params:
param = d[name]
data = serializer(name, param.data) # type: types.NdArray
if param.data is None and data is not None:
# Initialize the parameter here
param.initialize(data.shape)
with chainer.using_device(param.device):
param.data[...] = param.device.send(data)
for name in self._persistent:
d[name] = serializer(name, d[name])
示例6: count_params
# 需要导入模块: from chainer import variable [as 别名]
# 或者: from chainer.variable import Parameter [as 别名]
def count_params(self) -> int:
"""Counts the total number of parameters.
This method counts the total number of scalar values included in all
the :class:`~chainer.Parameter`\\ s held by this link and its
descendants.
If the link containts uninitialized parameters, this method raises a
warning.
Returns:
The total size of parameters (int)
"""
size = 0
for name, param in self.namedparams():
if param.array is None:
warnings.warn(
'Parameter \'{}\' has not been initialized, so the '
'resulting count will not include the number of parameters'
' in it.'.format(name))
continue
size += param.size
return size
示例7: __init__
# 需要导入模块: from chainer import variable [as 别名]
# 或者: from chainer.variable import Parameter [as 别名]
def __init__(self, in_size, out_size, ratio=.5, nobias=False,
initialW=None, initial_bias=None):
super(SimplifiedDropconnect, self).__init__()
self.out_size = out_size
self.ratio = ratio
if initialW is None:
initialW = initializers.HeNormal(1. / numpy.sqrt(2))
with self.init_scope():
W_initializer = initializers._get_initializer(initialW)
self.W = variable.Parameter(W_initializer)
if in_size is not None:
self._initialize_params(in_size)
if nobias:
self.b = None
else:
if initial_bias is None:
initial_bias = initializers.Constant(0)
bias_initializer = initializers._get_initializer(initial_bias)
self.b = variable.Parameter(bias_initializer, out_size)
示例8: forward
# 需要导入模块: from chainer import variable [as 别名]
# 或者: from chainer.variable import Parameter [as 别名]
def forward(self, x):
"""Applies the convolution layer.
Args:
x (~chainer.Variable): Input image.
Returns:
~chainer.Variable: Output of the convolution.
"""
x = chainer.as_variable(x)
assert x.layout == self.x_layout
# self.W can be a Variable instead of Parameter: #8462
# TODO(niboshi): Use Parameter.is_initialized.
if self.W.raw_array is None:
_, c, _, _ = memory_layouts.get_semantic_shape(
x, assumed_layout=self.x_layout)
self._initialize_params(c)
return convolution_2d.convolution_2d(
x, self.W, self.b, self.stride, self.pad, dilate=self.dilate,
groups=self.groups, cudnn_fast=self.cudnn_fast)
示例9: __init__
# 需要导入模块: from chainer import variable [as 别名]
# 或者: from chainer.variable import Parameter [as 别名]
def __init__(self, in_channels, out_channels, in_size=None, ksize=None,
stride=1, nobias=False, initialW=None, initial_bias=None,
**kwargs):
super(LocalConvolution2D, self).__init__()
self.ksize = ksize
self.stride = _pair(stride)
self.nobias = nobias
self.out_channels = out_channels
with self.init_scope():
W_initializer = initializers._get_initializer(initialW)
self.W = variable.Parameter(W_initializer)
if nobias:
self.b = None
else:
if initial_bias is None:
initial_bias = 0
bias_initializer = initializers._get_initializer(initial_bias)
self.b = variable.Parameter(bias_initializer)
if in_channels is not None and in_size is not None:
self._initialize_params(in_channels, _pair(in_size))
示例10: __init__
# 需要导入模块: from chainer import variable [as 别名]
# 或者: from chainer.variable import Parameter [as 别名]
def __init__(self, groups, size=None, eps=1e-5, initial_gamma=None,
initial_beta=None):
super(GroupNormalization, self).__init__()
if initial_gamma is None:
initial_gamma = 1
if initial_beta is None:
initial_beta = 0
highprec_dtype = chainer.get_dtype(
None, map_mixed16=numpy.float32)
with self.init_scope():
self.groups = groups
gamma_initializer = \
initializers._get_initializer(initial_gamma)
gamma_initializer.dtype = highprec_dtype
beta_initializer = \
initializers._get_initializer(initial_beta)
beta_initializer.dtype = highprec_dtype
self.gamma = variable.Parameter(gamma_initializer)
self.beta = variable.Parameter(beta_initializer)
self.eps = eps
if size is not None:
self._initialize_params(size)
示例11: __init__
# 需要导入模块: from chainer import variable [as 别名]
# 或者: from chainer.variable import Parameter [as 别名]
def __init__(self, n_label):
super(My_CRF, self).__init__(n_label)
with self.init_scope():
'''
[Initialization]
'''
# Generate random values for transition matrix.
# The shape of transition matrix is (n_label+2, n_label+2).
# "2" means the extra added labels, START and END. (see 3.2)
drange = np.sqrt(6. / (np.sum((n_label + 2, n_label + 2))))
value = drange * np.random.uniform(low=-1.0, high=1.0, size=(n_label + 2, n_label + 2))
transitions = np.array(value, dtype=np.float32)
self.cost = variable.Parameter(transitions)
# The number of unique labels in training data set (e.g B-Person, I-Person, O)
self.n_label = n_label
# The small value will fill the expanded emission score matrix as described in 3.2
self.small = -1000
示例12: __init__
# 需要导入模块: from chainer import variable [as 别名]
# 或者: from chainer.variable import Parameter [as 别名]
def __init__(self, inp = 256, mid = 128, sz = 3):
super(ConvLSTM, self).__init__(
Wxi = L.Convolution2D(inp, mid, sz, pad = sz//2),
Whi = L.Convolution2D(mid, mid, sz, pad = sz//2, nobias = True),
Wxf = L.Convolution2D(inp, mid, sz, pad = sz//2),
Whf = L.Convolution2D(mid, mid, sz, pad = sz//2, nobias = True),
Wxc = L.Convolution2D(inp, mid, sz, pad = sz//2),
Whc = L.Convolution2D(mid, mid, sz, pad = sz//2, nobias = True),
Wxo = L.Convolution2D(inp, mid, sz, pad = sz//2),
Who = L.Convolution2D(mid, mid, sz, pad = sz//2, nobias = True)
)
self.inp = inp
self.mid = mid
self.pc = None
self.ph = None
with self.init_scope():
Wci_initializer = initializers.Zero()
self.Wci = variable.Parameter(Wci_initializer)
Wcf_initializer = initializers.Zero()
self.Wcf = variable.Parameter(Wcf_initializer)
Wco_initializer = initializers.Zero()
self.Wco = variable.Parameter(Wco_initializer)
示例13: __init__
# 需要导入模块: from chainer import variable [as 别名]
# 或者: from chainer.variable import Parameter [as 别名]
def __init__(self, ndim, in_channels, out_channels, ksize, stride=1, pad=0,
nobias=False, initialW=None, initial_bias=None,
cover_all=False, use_gamma=False, Ip=1, factor=None):
super(SNConvolutionND, self).__init__()
ksize = conv_nd.as_tuple(ksize, ndim)
self.stride = stride
self.pad = pad
self.cover_all = cover_all
self.use_gamma = use_gamma
self.Ip = Ip
self.u = np.random.normal(size=(1, out_channels)).astype(dtype="f")
self.register_persistent('u')
self.factor = factor
with self.init_scope():
W_shape = (out_channels, in_channels) + ksize
self.W = variable.Parameter(
initializers._get_initializer(initialW), W_shape)
if nobias:
self.b = None
else:
if initial_bias is None:
initial_bias = 0
initial_bias = initializers._get_initializer(initial_bias)
self.b = variable.Parameter(initial_bias, out_channels)
if self.use_gamma:
W_mat = self.W.data.reshape(self.W.shape[0], -1)
_, s, _ = np.linalg.svd(W_mat)
self.gamma = variable.Parameter(s[0], (1,) * len(self.W.shape))
示例14: __init__
# 需要导入模块: from chainer import variable [as 别名]
# 或者: from chainer.variable import Parameter [as 别名]
def __init__(self, axis=1, shape=None, initial_bias=None):
super(Bias, self).__init__()
# Add b parameter if given.
if shape is not None:
with self.init_scope():
if initial_bias is None:
initial_bias = 0
bias_initializer = initializers._get_initializer(initial_bias)
self.b = variable.Parameter(bias_initializer, shape)
self.axis = axis
示例15: __init__
# 需要导入模块: from chainer import variable [as 别名]
# 或者: from chainer.variable import Parameter [as 别名]
def __init__(self, size, comm, decay=0.9, eps=2e-5, dtype=None,
use_gamma=True, use_beta=True,
initial_gamma=None, initial_beta=None,
communication_backend='auto'):
chainer.utils.experimental(
'chainermn.links.MultiNodeBatchNormalization')
super(MultiNodeBatchNormalization, self).__init__()
self._highprec_dtype = chainer.get_dtype(
dtype, map_mixed16=numpy.float32)
self.comm = comm
self.avg_mean = numpy.zeros(size, dtype=self._highprec_dtype)
self.register_persistent('avg_mean')
self.avg_var = numpy.zeros(size, dtype=self._highprec_dtype)
self.register_persistent('avg_var')
self.N = 0
self.register_persistent('N')
self.decay = decay
self.eps = eps
self._communication_backend = \
chainermn_batch_normalization.get_communication_backend(
comm, communication_backend)
with self.init_scope():
if use_gamma:
if initial_gamma is None:
initial_gamma = 1
initial_gamma = initializers._get_initializer(initial_gamma)
initial_gamma.dtype = self._highprec_dtype
self.gamma = variable.Parameter(initial_gamma, size)
if use_beta:
if initial_beta is None:
initial_beta = 0
initial_beta = initializers._get_initializer(initial_beta)
initial_beta.dtype = self._highprec_dtype
self.beta = variable.Parameter(initial_beta, size)