本文整理汇总了Python中chainer.initializers._get_initializer方法的典型用法代码示例。如果您正苦于以下问题:Python initializers._get_initializer方法的具体用法?Python initializers._get_initializer怎么用?Python initializers._get_initializer使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类chainer.initializers
的用法示例。
在下文中一共展示了initializers._get_initializer方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import _get_initializer [as 别名]
def __init__(self, axis=1, W_shape=None, bias_term=False, bias_shape=None, initialW=None, initial_bias=None):
super(Scale, self).__init__()
self.axis = axis
with self.init_scope():
# Add W parameter and/or bias term.
if W_shape is not None:
if initialW is None:
initialW = 1
W_initializer = initializers._get_initializer(initialW)
self.W = variable.Parameter(W_initializer, W_shape)
if bias_term:
self.bias = Bias(axis, W_shape, initial_bias)
else:
if bias_term:
if bias_shape is None:
raise ValueError(
'bias_shape should be given if W is not '
'learnt parameter and bias_term is True.')
self.bias = Bias(axis, W_shape, initial_bias)
示例2: _initialize_params
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import _get_initializer [as 别名]
def _initialize_params(self):
lateral_init = initializers._get_initializer(self.lateral_init)
upward_init = initializers._get_initializer(self.upward_init)
bias_init = initializers._get_initializer(self.bias_init)
forget_bias_init = initializers._get_initializer(self.forget_bias_init)
for i in six.moves.range(0, 4 * self.state_size, self.state_size):
lateral_init(self.lateral.W.data[i:i + self.state_size, :])
upward_init(self.upward.W.data[i:i + self.state_size, :])
a, i, f, o = lstm._extract_gates(
self.upward.b.data.reshape(1, 4 * self.state_size, 1))
bias_init(a)
bias_init(i)
forget_bias_init(f)
bias_init(o)
示例3: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import _get_initializer [as 别名]
def __init__(self,
units,
in_units,
drop_rate=0.5):
super(CondenseLinear, self).__init__()
drop_in_units = int(in_units * drop_rate)
with self.init_scope():
self.dense = L.Linear(
in_size=drop_in_units,
out_size=units)
self.index = initializers.generate_array(
initializer=initializers._get_initializer(0),
shape=(drop_in_units,),
xp=self.xp,
dtype=np.int32)
self.register_persistent("index")
示例4: _initialize_params
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import _get_initializer [as 别名]
def _initialize_params(self):
lateral_init = initializers._get_initializer(self.lateral_init)
upward_init = initializers._get_initializer(self.upward_init)
bias_init = initializers._get_initializer(self.bias_init)
forget_bias_init = initializers._get_initializer(self.forget_bias_init)
for i in six.moves.range(0, 4 * self.state_size, self.state_size):
lateral_init(self.lateral.W.array[i:i + self.state_size, :])
upward_init(self.upward.W.array[i:i + self.state_size, :])
a, i, f, o = lstm._extract_gates(
self.upward.b.array.reshape(1, 4 * self.state_size, 1))
bias_init(a)
bias_init(i)
forget_bias_init(f)
bias_init(o)
示例5: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import _get_initializer [as 别名]
def __init__(self, in_size, out_size, ratio=.5, nobias=False,
initialW=None, initial_bias=None):
super(SimplifiedDropconnect, self).__init__()
self.out_size = out_size
self.ratio = ratio
if initialW is None:
initialW = initializers.HeNormal(1. / numpy.sqrt(2))
with self.init_scope():
W_initializer = initializers._get_initializer(initialW)
self.W = variable.Parameter(W_initializer)
if in_size is not None:
self._initialize_params(in_size)
if nobias:
self.b = None
else:
if initial_bias is None:
initial_bias = initializers.Constant(0)
bias_initializer = initializers._get_initializer(initial_bias)
self.b = variable.Parameter(bias_initializer, out_size)
示例6: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import _get_initializer [as 别名]
def __init__(self, in_channels, out_channels, in_size=None, ksize=None,
stride=1, nobias=False, initialW=None, initial_bias=None,
**kwargs):
super(LocalConvolution2D, self).__init__()
self.ksize = ksize
self.stride = _pair(stride)
self.nobias = nobias
self.out_channels = out_channels
with self.init_scope():
W_initializer = initializers._get_initializer(initialW)
self.W = variable.Parameter(W_initializer)
if nobias:
self.b = None
else:
if initial_bias is None:
initial_bias = 0
bias_initializer = initializers._get_initializer(initial_bias)
self.b = variable.Parameter(bias_initializer)
if in_channels is not None and in_size is not None:
self._initialize_params(in_channels, _pair(in_size))
示例7: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import _get_initializer [as 别名]
def __init__(self, groups, size=None, eps=1e-5, initial_gamma=None,
initial_beta=None):
super(GroupNormalization, self).__init__()
if initial_gamma is None:
initial_gamma = 1
if initial_beta is None:
initial_beta = 0
highprec_dtype = chainer.get_dtype(
None, map_mixed16=numpy.float32)
with self.init_scope():
self.groups = groups
gamma_initializer = \
initializers._get_initializer(initial_gamma)
gamma_initializer.dtype = highprec_dtype
beta_initializer = \
initializers._get_initializer(initial_beta)
beta_initializer.dtype = highprec_dtype
self.gamma = variable.Parameter(gamma_initializer)
self.beta = variable.Parameter(beta_initializer)
self.eps = eps
if size is not None:
self._initialize_params(size)
示例8: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import _get_initializer [as 别名]
def __init__(self, in_size, out_size=None, nobias=True, initialW=None,
initial_bias=None):
super(GraphConvolution, self).__init__()
if out_size is None:
in_size, out_size = None, in_size
self.out_size = out_size
with self.init_scope():
if initialW is None:
initialW = initializers.GlorotUniform()
self.W = chainer.Parameter(initialW, (in_size, out_size))
if nobias:
self.b = None
else:
if initial_bias is None:
initial_bias = 0
bias_initializer = initializers._get_initializer(initial_bias)
self.b = chainer.Parameter(bias_initializer, out_size)
示例9: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import _get_initializer [as 别名]
def __init__(self, ndim, in_channels, out_channels, ksize, stride=1, pad=0,
nobias=False, initialW=None, initial_bias=None,
cover_all=False, use_gamma=False, Ip=1, factor=None):
super(SNConvolutionND, self).__init__()
ksize = conv_nd.as_tuple(ksize, ndim)
self.stride = stride
self.pad = pad
self.cover_all = cover_all
self.use_gamma = use_gamma
self.Ip = Ip
self.u = np.random.normal(size=(1, out_channels)).astype(dtype="f")
self.register_persistent('u')
self.factor = factor
with self.init_scope():
W_shape = (out_channels, in_channels) + ksize
self.W = variable.Parameter(
initializers._get_initializer(initialW), W_shape)
if nobias:
self.b = None
else:
if initial_bias is None:
initial_bias = 0
initial_bias = initializers._get_initializer(initial_bias)
self.b = variable.Parameter(initial_bias, out_channels)
if self.use_gamma:
W_mat = self.W.data.reshape(self.W.shape[0], -1)
_, s, _ = np.linalg.svd(W_mat)
self.gamma = variable.Parameter(s[0], (1,) * len(self.W.shape))
示例10: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import _get_initializer [as 别名]
def __init__(self, axis=1, shape=None, initial_bias=None):
super(Bias, self).__init__()
# Add b parameter if given.
if shape is not None:
with self.init_scope():
if initial_bias is None:
initial_bias = 0
bias_initializer = initializers._get_initializer(initial_bias)
self.b = variable.Parameter(bias_initializer, shape)
self.axis = axis
示例11: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import _get_initializer [as 别名]
def __init__(self, size, comm, decay=0.9, eps=2e-5, dtype=None,
use_gamma=True, use_beta=True,
initial_gamma=None, initial_beta=None,
communication_backend='auto'):
chainer.utils.experimental(
'chainermn.links.MultiNodeBatchNormalization')
super(MultiNodeBatchNormalization, self).__init__()
self._highprec_dtype = chainer.get_dtype(
dtype, map_mixed16=numpy.float32)
self.comm = comm
self.avg_mean = numpy.zeros(size, dtype=self._highprec_dtype)
self.register_persistent('avg_mean')
self.avg_var = numpy.zeros(size, dtype=self._highprec_dtype)
self.register_persistent('avg_var')
self.N = 0
self.register_persistent('N')
self.decay = decay
self.eps = eps
self._communication_backend = \
chainermn_batch_normalization.get_communication_backend(
comm, communication_backend)
with self.init_scope():
if use_gamma:
if initial_gamma is None:
initial_gamma = 1
initial_gamma = initializers._get_initializer(initial_gamma)
initial_gamma.dtype = self._highprec_dtype
self.gamma = variable.Parameter(initial_gamma, size)
if use_beta:
if initial_beta is None:
initial_beta = 0
initial_beta = initializers._get_initializer(initial_beta)
initial_beta.dtype = self._highprec_dtype
self.beta = variable.Parameter(initial_beta, size)
示例12: test_scalar
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import _get_initializer [as 别名]
def test_scalar(self):
init = initializers._get_initializer(10)
self.assertIsInstance(init, initializers.Constant)
x = numpy.empty((2, 3), dtype=numpy.int32)
init(x)
expected = numpy.full((2, 3), 10, dtype=numpy.int32)
numpy.testing.assert_array_equal(x, expected)
示例13: test_numpy_array
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import _get_initializer [as 别名]
def test_numpy_array(self):
c = numpy.array([1, 2, 3])
init = initializers._get_initializer(c)
self.assertIsInstance(init, initializers.Constant)
x = numpy.empty((3,), dtype=numpy.int32)
init(x)
expected = numpy.array([1, 2, 3], dtype=numpy.int32)
numpy.testing.assert_array_equal(x, expected)
示例14: test_callable
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import _get_initializer [as 别名]
def test_callable(self):
def initializer(arr):
arr[...] = 100
init = initializers._get_initializer(initializer)
self.assertTrue(callable(init))
x = numpy.empty((2, 3), dtype=numpy.int32)
init(x)
expected = numpy.full((2, 3), 100, dtype=numpy.int32)
numpy.testing.assert_array_equal(x, expected)
示例15: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import _get_initializer [as 别名]
def __init__(
self,
in_size: tp.Optional[int],
out_size: tp.Optional[int] = None,
nobias: bool = False,
initialW: tp.Optional[types.InitializerSpec] = None,
initial_bias: tp.Optional[types.InitializerSpec] = None
) -> None:
super(Linear, self).__init__()
if out_size is None:
in_size, out_size = None, in_size
self.in_size = in_size
self.out_size = out_size
with self.init_scope():
W_initializer = initializers._get_initializer(initialW)
self.W = variable.Parameter(W_initializer) # type: variable.Variable # NOQA
if in_size is not None:
self._initialize_params(in_size)
if nobias:
self.b = None # type: tp.Optional[variable.Variable]
else:
if initial_bias is None:
initial_bias = 0
bias_initializer = initializers._get_initializer(initial_bias)
self.b = variable.Parameter(bias_initializer, out_size)