本文整理汇总了Python中chainer.initializers.Normal方法的典型用法代码示例。如果您正苦于以下问题:Python initializers.Normal方法的具体用法?Python initializers.Normal怎么用?Python initializers.Normal使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类chainer.initializers
的用法示例。
在下文中一共展示了initializers.Normal方法的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import Normal [as 别名]
def __init__(self, n_class, scales):
super(BboxHead, self).__init__()
fc_init = {
'initialW': Caffe2FCUniform(),
'initial_bias': Caffe2FCUniform(),
}
with self.init_scope():
self.fc1 = L.Linear(1024, **fc_init)
self.fc2 = L.Linear(1024, **fc_init)
self.loc = L.Linear(
n_class * 4, initialW=initializers.Normal(0.001))
self.conf = L.Linear(n_class, initialW=initializers.Normal(0.01))
self._n_class = n_class
self._scales = scales
示例2: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import Normal [as 别名]
def __init__(self, vocab_size, hidden_size, dropout_ratio, ignore_label):
super(LSTMLanguageModel, self).__init__()
with self.init_scope():
self.embed_word = L.EmbedID(
vocab_size,
hidden_size,
initialW=initializers.Normal(1.0),
ignore_label=ignore_label
)
self.embed_img = L.Linear(
hidden_size,
initialW=initializers.Normal(0.01)
)
self.lstm = L.LSTM(hidden_size, hidden_size)
self.out_word = L.Linear(
hidden_size,
vocab_size,
initialW=initializers.Normal(0.01)
)
self.dropout_ratio = dropout_ratio
示例3: test_copy_with_init_mode
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import Normal [as 别名]
def test_copy_with_init_mode(self):
self.link.u.initializer = initializers.Normal(
dtype=self.link.u.initializer.dtype)
self.link.u.initialize((2, 3))
link = self.link.copy(mode='init')
self.assertFalse(numpy.array_equal(self.link.u.array, link.u.array))
self.assertIsInstance(link._params, set)
self.assertIsInstance(link._persistent, set)
self.assertTrue(hasattr(link, 'x'))
self.assertTrue(hasattr(link, 'y'))
self.assertTrue(hasattr(link, 'u'))
self.assertTrue(hasattr(link, 'p'))
self.assertIsNot(link.x, self.link.x)
self.assertIsNot(link.x.array, self.link.x.array)
self.assertIsNot(link.y, self.link.y)
self.assertIsNot(link.y.array, self.link.y.array)
self.assertIsNot(link.p, self.link.p)
self.assertIsNot(link.name, None)
示例4: create_initializer
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import Normal [as 别名]
def create_initializer(init_type, scale=None, fillvalue=None):
if init_type == 'identity':
return initializers.Identity() if scale is None else initializers.Identity(scale=scale)
if init_type == 'constant':
return initializers.Constant(fillvalue)
if init_type == 'zero':
return initializers.Zero()
if init_type == 'one':
return initializers.One()
if init_type == 'normal':
return initializers.Normal() if scale is None else initializers.Normal(scale)
if init_type == 'glorotNormal':
return initializers.GlorotNormal() if scale is None else initializers.GlorotNormal(scale)
if init_type == 'heNormal':
return initializers.HeNormal() if scale is None else initializers.HeNormal(scale)
if init_type == 'orthogonal':
return initializers.Orthogonal(
scale) if scale is None else initializers.Orthogonal(scale)
if init_type == 'uniform':
return initializers.Uniform(
scale) if scale is None else initializers.Uniform(scale)
if init_type == 'leCunUniform':
return initializers.LeCunUniform(
scale) if scale is None else initializers.LeCunUniform(scale)
if init_type == 'glorotUniform':
return initializers.GlorotUniform(
scale) if scale is None else initializers.GlorotUniform(scale)
if init_type == 'heUniform':
return initializers.HeUniform(
scale) if scale is None else initializers.HeUniform(scale)
raise ValueError("Unknown initializer type: {0}".format(init_type))
示例5: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import Normal [as 别名]
def __init__(self, scales):
super(RPN, self).__init__()
init = {'initialW': initializers.Normal(0.01)}
with self.init_scope():
self.conv = L.Convolution2D(256, 3, pad=1, **init)
self.loc = L.Convolution2D(len(self._anchor_ratios) * 4, 1, **init)
self.conf = L.Convolution2D(len(self._anchor_ratios), 1, **init)
self._scales = scales
示例6: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import Normal [as 别名]
def __init__(self, n_units, n_vocab, encoder, max_memory, hops):
super(MemNN, self).__init__()
with self.init_scope():
self.embeds = chainer.ChainList()
self.temporals = chainer.ChainList()
normal = initializers.Normal()
# Shares both embeded matrixes in adjacent layres
for _ in six.moves.range(hops + 1):
self.embeds.append(L.EmbedID(n_vocab, n_units, initialW=normal))
self.temporals.append(
L.EmbedID(max_memory, n_units, initialW=normal))
self.memories = [
Memory(self.embeds[i], self.embeds[i + 1],
self.temporals[i], self.temporals[i + 1], encoder)
for i in six.moves.range(hops)
]
# The question embedding is same as the input embedding of the
# first layer
self.B = self.embeds[0]
# The answer prediction matrix W is same as the final output layer
self.W = lambda u: F.linear(u, self.embeds[-1].W)
self.encoder = encoder
self.n_units = n_units
self.max_memory = max_memory
self.hops = hops
示例7: setUp
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import Normal [as 别名]
def setUp(self):
class Layer(chainer.Link):
def __init__(self):
super(Layer, self).__init__()
with self.init_scope():
self.x = chainer.Parameter(
chainer.initializers.Normal(), shape=(2, 3))
def forward(self):
pass
self.link = Layer()
示例8: test_copy_with_share_mode
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import Normal [as 别名]
def test_copy_with_share_mode(self):
c2 = self.c2.copy(mode='share')
self.l1.x.initializer = initializers.Normal(
dtype=self.l1.x.initializer.dtype)
self.l1.x.initialize(self.l1.x.shape)
self.l2.x.initializer = initializers.Normal(
dtype=self.l2.x.initializer.dtype)
self.l2.x.initialize(self.l2.x.shape)
self.assertIs(c2.name, None)
self.assertIsInstance(c2._children, list)
self.assertIsNot(c2[0], self.c1)
self.assertEqual(c2[0].name, '0')
self.assertIsInstance(c2[0]._children, list)
self.assertIsNot(c2[0][0], self.l1)
self.assertEqual(c2[0][0].name, '0')
self.assertIsNot(c2[0][0].x, self.l1.x)
self.assertIs(c2[0][0].x.data, self.l1.x.data)
self.assertIs(c2[0][0].x.grad, None)
self.assertIsNot(c2[0][1], self.l2)
self.assertEqual(c2[0][1].name, '1')
self.assertIsNot(c2[0][1].x, self.l2.x)
self.assertIs(c2[0][1].x.data, self.l2.x.data)
self.assertIs(c2[0][1].x.grad, None)
self.assertIsNot(c2[1], self.l3)
self.assertEqual(c2[1].name, '1')
self.assertIsNot(c2[1].x, self.l3.x)
self.assertIs(c2[1].x.data, self.l3.x.data)
self.assertIs(c2[1].x.grad, None)
示例9: test_copy_with_copy_mode
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import Normal [as 别名]
def test_copy_with_copy_mode(self):
self.l1.x.initializer = initializers.Normal(
dtype=self.l1.x.initializer.dtype)
self.l1.x.initialize(self.l1.x.shape)
self.l2.x.initializer = initializers.Normal(
dtype=self.l2.x.initializer.dtype)
self.l2.x.initialize(self.l2.x.shape)
c2 = self.c2.copy(mode='copy')
self.assertIs(c2.name, None)
self.assertIsInstance(c2._children, list)
self.assertEqual(c2[0].name, '0')
self.assertIsInstance(c2[0]._children, list)
self.assertIsNot(c2[0][0], self.l1)
self.assertEqual(c2[0][0].name, '0')
self.assertIsNot(c2[0][0].x, self.l1.x)
self.assertIsNot(c2[0][0].x.data, self.l1.x.data)
self.assertTrue(numpy.array_equal(c2[0][0].x.data, self.l1.x.data))
self.assertIs(c2[0][0].x.grad, None)
self.assertIsNot(c2[0][1], self.l2)
self.assertEqual(c2[0][1].name, '1')
self.assertIsNot(c2[0][1].x, self.l2.x)
self.assertIsNot(c2[0][1].x.data, self.l2.x.data)
self.assertTrue(numpy.array_equal(c2[0][1].x.data, self.l2.x.data))
self.assertIs(c2[0][1].x.grad, None)
self.assertIsNot(c2[1], self.l3)
self.assertEqual(c2[1].name, '1')
self.assertIsNot(c2[1].x, self.l3.x)
self.assertIsNot(c2[1].x.data, self.l3.x.data)
# l3 is constructed with shape argument but not initialized
self.assertTrue(numpy.isnan(c2[1].x.grad).all())
示例10: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import Normal [as 别名]
def __init__(self, nf, rf, nx):
super(Conv1D, self).__init__()
self.rf = rf
self.nf = nf
if rf == 1: # faster 1x1 conv
with self.init_scope():
self.w = chainer.Parameter(
initializers.Normal(scale=0.02), (nf, nx)) # transposed
self.b = chainer.Parameter(0., nf)
else: # was used to train LM
raise NotImplementedError
示例11: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import Normal [as 别名]
def __init__(self, n_layer,
n_class=None,
pretrained_model=None,
mean=None, initialW=None, fc_kwargs={}):
blocks = self._blocks[n_layer]
param, path = utils.prepare_pretrained_model(
{'n_class': n_class, 'mean': mean},
pretrained_model, self._models[n_layer],
{'n_class': 1000, 'mean': _imagenet_mean})
self.mean = param['mean']
if initialW is None:
initialW = initializers.HeNormal(scale=1., fan_option='fan_out')
if 'initialW' not in fc_kwargs:
fc_kwargs['initialW'] = initializers.Normal(scale=0.01)
if pretrained_model:
# As a sampling process is time-consuming,
# we employ a zero initializer for faster computation.
initialW = initializers.constant.Zero()
fc_kwargs['initialW'] = initializers.constant.Zero()
kwargs = {
'initialW': initialW, 'stride_first': True, 'add_seblock': True}
super(SEResNet, self).__init__()
with self.init_scope():
self.conv1 = Conv2DBNActiv(None, 64, 7, 2, 3, nobias=True,
initialW=initialW)
self.pool1 = lambda x: F.max_pooling_2d(x, ksize=3, stride=2)
self.res2 = ResBlock(blocks[0], None, 64, 256, 1, **kwargs)
self.res3 = ResBlock(blocks[1], None, 128, 512, 2, **kwargs)
self.res4 = ResBlock(blocks[2], None, 256, 1024, 2, **kwargs)
self.res5 = ResBlock(blocks[3], None, 512, 2048, 2, **kwargs)
self.pool5 = lambda x: F.average(x, axis=(2, 3))
self.fc6 = L.Linear(None, param['n_class'], **fc_kwargs)
self.prob = F.softmax
if path:
chainer.serializers.load_npz(path, self)
示例12: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import Normal [as 别名]
def __init__(self, n_layer,
n_class=None,
pretrained_model=None,
mean=None, initialW=None, fc_kwargs={}):
blocks = self._blocks[n_layer]
param, path = utils.prepare_pretrained_model(
{'n_class': n_class, 'mean': mean},
pretrained_model, self._models[n_layer],
{'n_class': 1000, 'mean': _imagenet_mean})
self.mean = param['mean']
if initialW is None:
initialW = initializers.HeNormal(scale=1., fan_option='fan_out')
if 'initialW' not in fc_kwargs:
fc_kwargs['initialW'] = initializers.Normal(scale=0.01)
if pretrained_model:
# As a sampling process is time-consuming,
# we employ a zero initializer for faster computation.
initialW = initializers.constant.Zero()
fc_kwargs['initialW'] = initializers.constant.Zero()
kwargs = {
'groups': 32, 'initialW': initialW, 'stride_first': False,
'add_seblock': True}
super(SEResNeXt, self).__init__()
with self.init_scope():
self.conv1 = Conv2DBNActiv(None, 64, 7, 2, 3, nobias=True,
initialW=initialW)
self.pool1 = lambda x: F.max_pooling_2d(x, ksize=3, stride=2)
self.res2 = ResBlock(blocks[0], None, 128, 256, 1, **kwargs)
self.res3 = ResBlock(blocks[1], None, 256, 512, 2, **kwargs)
self.res4 = ResBlock(blocks[2], None, 512, 1024, 2, **kwargs)
self.res5 = ResBlock(blocks[3], None, 1024, 2048, 2, **kwargs)
self.pool5 = lambda x: F.average(x, axis=(2, 3))
self.fc6 = L.Linear(None, param['n_class'], **fc_kwargs)
self.prob = F.softmax
if path:
chainer.serializers.load_npz(path, self)
示例13: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import Normal [as 别名]
def __init__(self, n_layer,
n_class=None,
pretrained_model=None,
mean=None, initialW=None, fc_kwargs={}, arch='fb'):
if arch == 'fb':
stride_first = False
conv1_no_bias = True
elif arch == 'he':
stride_first = True
# Kaiming He uses bias only for ResNet50
conv1_no_bias = n_layer != 50
else:
raise ValueError('arch is expected to be one of [\'he\', \'fb\']')
blocks = self._blocks[n_layer]
param, path = prepare_pretrained_model(
{'n_class': n_class, 'mean': mean},
pretrained_model, self._models[arch][n_layer],
{'n_class': 1000, 'mean': _imagenet_mean})
self.mean = param['mean']
if initialW is None:
initialW = initializers.HeNormal(scale=1., fan_option='fan_out')
if 'initialW' not in fc_kwargs:
fc_kwargs['initialW'] = initializers.Normal(scale=0.01)
if pretrained_model:
# As a sampling process is time-consuming,
# we employ a zero initializer for faster computation.
initialW = initializers.constant.Zero()
fc_kwargs['initialW'] = initializers.constant.Zero()
kwargs = {'initialW': initialW, 'stride_first': stride_first}
super(ResNet, self).__init__()
with self.init_scope():
self.conv1 = Conv2DBNActiv(None, 64, 7, 2, 3, nobias=conv1_no_bias,
initialW=initialW)
self.pool1 = lambda x: F.max_pooling_2d(x, ksize=3, stride=2)
self.res2 = ResBlock(blocks[0], None, 64, 256, 1, **kwargs)
self.res3 = ResBlock(blocks[1], None, 128, 512, 2, **kwargs)
self.res4 = ResBlock(blocks[2], None, 256, 1024, 2, **kwargs)
self.res5 = ResBlock(blocks[3], None, 512, 2048, 2, **kwargs)
self.pool5 = lambda x: F.average(x, axis=(2, 3))
self.fc6 = L.Linear(None, param['n_class'], **fc_kwargs)
self.prob = F.softmax
self._pick = ('prob',)
self._return_tuple = False
if path:
chainer.serializers.load_npz(path, self)
示例14: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import Normal [as 别名]
def __init__(self, n_layer,
n_class=None,
pretrained_model=None,
mean=None, initialW=None, fc_kwargs={}, arch='fb'):
if arch == 'fb':
stride_first = False
conv1_no_bias = True
elif arch == 'he':
stride_first = True
# Kaiming He uses bias only for ResNet50
conv1_no_bias = n_layer != 50
else:
raise ValueError('arch is expected to be one of [\'he\', \'fb\']')
blocks = self._blocks[n_layer]
param, path = utils.prepare_pretrained_model(
{'n_class': n_class, 'mean': mean},
pretrained_model, self._models[arch][n_layer],
{'n_class': 1000, 'mean': _imagenet_mean})
self.mean = param['mean']
if initialW is None:
initialW = initializers.HeNormal(scale=1., fan_option='fan_out')
if 'initialW' not in fc_kwargs:
fc_kwargs['initialW'] = initializers.Normal(scale=0.01)
if pretrained_model:
# As a sampling process is time-consuming,
# we employ a zero initializer for faster computation.
initialW = initializers.constant.Zero()
fc_kwargs['initialW'] = initializers.constant.Zero()
kwargs = {'initialW': initialW, 'stride_first': stride_first}
super(ResNet, self).__init__()
with self.init_scope():
self.conv1 = Conv2DBNActiv(None, 64, 7, 2, 3, nobias=conv1_no_bias,
initialW=initialW)
self.pool1 = lambda x: F.max_pooling_2d(x, ksize=3, stride=2)
self.res2 = ResBlock(blocks[0], None, 64, 256, 1, **kwargs)
self.res3 = ResBlock(blocks[1], None, 128, 512, 2, **kwargs)
self.res4 = ResBlock(blocks[2], None, 256, 1024, 2, **kwargs)
self.res5 = ResBlock(blocks[3], None, 512, 2048, 2, **kwargs)
self.pool5 = lambda x: F.average(x, axis=(2, 3))
self.fc6 = L.Linear(None, param['n_class'], **fc_kwargs)
self.prob = F.softmax
if path:
chainer.serializers.load_npz(path, self)