本文整理汇总了Python中chainer.initializers.HeNormal方法的典型用法代码示例。如果您正苦于以下问题:Python initializers.HeNormal方法的具体用法?Python initializers.HeNormal怎么用?Python initializers.HeNormal使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类chainer.initializers
的用法示例。
在下文中一共展示了initializers.HeNormal方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import HeNormal [as 别名]
def __init__(self, in_size, ch, out_size, stride=2, groups=1):
super(BottleNeckA, self).__init__()
initialW = initializers.HeNormal()
with self.init_scope():
self.conv1 = L.Convolution2D(
in_size, ch, 1, stride, 0, initialW=initialW, nobias=True)
self.bn1 = L.BatchNormalization(ch)
self.conv2 = L.Convolution2D(
ch, ch, 3, 1, 1, initialW=initialW, nobias=True,
groups=groups)
self.bn2 = L.BatchNormalization(ch)
self.conv3 = L.Convolution2D(
ch, out_size, 1, 1, 0, initialW=initialW, nobias=True)
self.bn3 = L.BatchNormalization(out_size)
self.conv4 = L.Convolution2D(
in_size, out_size, 1, stride, 0,
initialW=initialW, nobias=True)
self.bn4 = L.BatchNormalization(out_size)
示例2: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import HeNormal [as 别名]
def __init__(self):
chainer.Chain.__init__(self)
self.dtype = np.float16
W = initializers.HeNormal(1 / np.sqrt(2), self.dtype)
bias = initializers.Zero(self.dtype)
with self.init_scope():
self.conv1 = L.Convolution2D(None, 96, 11, stride=4,
initialW=W, initial_bias=bias)
self.conv2 = L.Convolution2D(None, 256, 5, pad=2,
initialW=W, initial_bias=bias)
self.conv3 = L.Convolution2D(None, 384, 3, pad=1,
initialW=W, initial_bias=bias)
self.conv4 = L.Convolution2D(None, 384, 3, pad=1,
initialW=W, initial_bias=bias)
self.conv5 = L.Convolution2D(None, 256, 3, pad=1,
initialW=W, initial_bias=bias)
self.fc6 = L.Linear(None, 4096, initialW=W, initial_bias=bias)
self.fc7 = L.Linear(None, 4096, initialW=W, initial_bias=bias)
self.fc8 = L.Linear(None, 1000, initialW=W, initial_bias=bias)
示例3: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import HeNormal [as 别名]
def __init__(self, in_size, ch, out_size, stride=2):
super(BottleNeckA, self).__init__()
initialW = initializers.HeNormal()
with self.init_scope():
self.conv1 = L.Convolution2D(
in_size, ch, 1, stride, 0, initialW=initialW, nobias=True)
self.bn1 = L.BatchNormalization(ch)
self.conv2 = L.Convolution2D(
ch, ch, 3, 1, 1, initialW=initialW, nobias=True)
self.bn2 = L.BatchNormalization(ch)
self.conv3 = L.Convolution2D(
ch, out_size, 1, 1, 0, initialW=initialW, nobias=True)
self.bn3 = L.BatchNormalization(out_size)
self.conv4 = L.Convolution2D(
in_size, out_size, 1, stride, 0,
initialW=initialW, nobias=True)
self.bn4 = L.BatchNormalization(out_size)
示例4: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import HeNormal [as 别名]
def __init__(self, in_size, out_size, ratio=.5, nobias=False,
initialW=None, initial_bias=None):
super(SimplifiedDropconnect, self).__init__()
self.out_size = out_size
self.ratio = ratio
if initialW is None:
initialW = initializers.HeNormal(1. / numpy.sqrt(2))
with self.init_scope():
W_initializer = initializers._get_initializer(initialW)
self.W = variable.Parameter(W_initializer)
if in_size is not None:
self._initialize_params(in_size)
if nobias:
self.b = None
else:
if initial_bias is None:
initial_bias = initializers.Constant(0)
bias_initializer = initializers._get_initializer(initial_bias)
self.b = variable.Parameter(bias_initializer, out_size)
示例5: create_initializer
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import HeNormal [as 别名]
def create_initializer(init_type, scale=None, fillvalue=None):
if init_type == 'identity':
return initializers.Identity() if scale is None else initializers.Identity(scale=scale)
if init_type == 'constant':
return initializers.Constant(fillvalue)
if init_type == 'zero':
return initializers.Zero()
if init_type == 'one':
return initializers.One()
if init_type == 'normal':
return initializers.Normal() if scale is None else initializers.Normal(scale)
if init_type == 'glorotNormal':
return initializers.GlorotNormal() if scale is None else initializers.GlorotNormal(scale)
if init_type == 'heNormal':
return initializers.HeNormal() if scale is None else initializers.HeNormal(scale)
if init_type == 'orthogonal':
return initializers.Orthogonal(
scale) if scale is None else initializers.Orthogonal(scale)
if init_type == 'uniform':
return initializers.Uniform(
scale) if scale is None else initializers.Uniform(scale)
if init_type == 'leCunUniform':
return initializers.LeCunUniform(
scale) if scale is None else initializers.LeCunUniform(scale)
if init_type == 'glorotUniform':
return initializers.GlorotUniform(
scale) if scale is None else initializers.GlorotUniform(scale)
if init_type == 'heUniform':
return initializers.HeUniform(
scale) if scale is None else initializers.HeUniform(scale)
raise ValueError("Unknown initializer type: {0}".format(init_type))
示例6: lazy_init_conv_to_join
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import HeNormal [as 别名]
def lazy_init_conv_to_join(block, x):
if not hasattr(block, 'Conv2d_1x1'):
with block.init_scope():
block.Conv2d_1x1 = L.Convolution2D(x.shape[1], 1, initialW=I.HeNormal())
if isinstance(x.data, cuda.ndarray):
block.Conv2d_1x1.to_gpu(x.data.device)
示例7: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import HeNormal [as 别名]
def __init__(self, depth, ksize, stride=1, pad=0, initialW=I.HeNormal()):
super(ConvBnRelu, self).__init__()
with self.init_scope():
self.conv = L.Convolution2D(None, depth, ksize=ksize, stride=stride, pad=pad, initialW=initialW, nobias=True)
self.bn = L.BatchNormalization(depth, decay=0.9997, eps=0.001, use_gamma=False)
示例8: main
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import HeNormal [as 别名]
def main():
np.random.seed(314)
model = ResBlock(3, None, 64, 256, 1, initialW=initializers.HeNormal(scale=1., fan_option='fan_out'), stride_first=False)
v = np.random.rand(2, 64, 56, 56).astype(np.float32)
testtools.generate_testcase(model, [v])
示例9: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import HeNormal [as 别名]
def __init__(self, compute_accuracy=False):
super(NIN, self).__init__()
self.compute_accuracy = compute_accuracy
conv_init = I.HeNormal() # MSRA scaling
with self.init_scope():
self.mlpconv1 = L.MLPConvolution2D(
None, (96, 96, 96), 11, stride=4, conv_init=conv_init)
self.mlpconv2 = L.MLPConvolution2D(
None, (256, 256, 256), 5, pad=2, conv_init=conv_init)
self.mlpconv3 = L.MLPConvolution2D(
None, (384, 384, 384), 3, pad=1, conv_init=conv_init)
self.mlpconv4 = L.MLPConvolution2D(
None, (1024, 1024, 1000), 3, pad=1, conv_init=conv_init)
示例10: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import HeNormal [as 别名]
def __init__(self):
super(NIN, self).__init__()
conv_init = I.HeNormal() # MSRA scaling
with self.init_scope():
self.mlpconv1 = L.MLPConvolution2D(
None, (96, 96, 96), 11, stride=4, conv_init=conv_init)
self.mlpconv2 = L.MLPConvolution2D(
None, (256, 256, 256), 5, pad=2, conv_init=conv_init)
self.mlpconv3 = L.MLPConvolution2D(
None, (384, 384, 384), 3, pad=1, conv_init=conv_init)
self.mlpconv4 = L.MLPConvolution2D(
None, (1024, 1024, 1000), 3, pad=1, conv_init=conv_init)
示例11: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import HeNormal [as 别名]
def __init__(self, in_size, ch, groups=1):
super(BottleNeckB, self).__init__()
initialW = initializers.HeNormal()
with self.init_scope():
self.conv1 = L.Convolution2D(
in_size, ch, 1, 1, 0, initialW=initialW, nobias=True)
self.bn1 = L.BatchNormalization(ch)
self.conv2 = L.Convolution2D(
ch, ch, 3, 1, 1, initialW=initialW, nobias=True,
groups=groups)
self.bn2 = L.BatchNormalization(ch)
self.conv3 = L.Convolution2D(
ch, in_size, 1, 1, 0, initialW=initialW, nobias=True)
self.bn3 = L.BatchNormalization(in_size)
示例12: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import HeNormal [as 别名]
def __init__(self, dtype=numpy.float32):
super(SimpleNet, self).__init__()
self.dtype = dtype
W = initializers.HeNormal(1 / numpy.sqrt(2), self.dtype)
bias = initializers.Zero(self.dtype)
with self.init_scope():
self.conv = chainer.links.Convolution2D(2, 2, 3, initialW=W,
initial_bias=bias)
self.fc = chainer.links.Linear(18, 2, initialW=W,
initial_bias=bias)
self.train = True
示例13: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import HeNormal [as 别名]
def __init__(self, optimizer, dtype, use_placeholder):
self.dtype = dtype
weight = initializers.HeNormal(1 / numpy.sqrt(2), dtype)
bias = initializers.Constant(0, dtype)
in_size = None if use_placeholder else self.UNIT_NUM
self.model = L.Linear(in_size, 2, initialW=weight, initial_bias=bias)
self.optimizer = optimizer
# true parameters
self.w = numpy.random.uniform(
-1, 1, (self.UNIT_NUM, 1)).astype(dtype)
self.b = numpy.random.uniform(-1, 1, (1, )).astype(dtype)
示例14: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import HeNormal [as 别名]
def __init__(self, in_channels, channel_multiplier, ksize, stride=1, pad=0,
nobias=False, initialW=None, initial_bias=None):
super(DepthwiseConvolution2D, self).__init__()
self.ksize = ksize
self.stride = _pair(stride)
self.pad = _pair(pad)
self.channel_multiplier = channel_multiplier
self.nobias = nobias
if initialW is None:
initialW = initializers.HeNormal(1. / numpy.sqrt(2))
with self.init_scope():
W_initializer = initializers._get_initializer(initialW)
self.W = variable.Parameter(W_initializer)
if nobias:
self.b = None
else:
if initial_bias is None:
initial_bias = initializers.Constant(0)
bias_initializer = initializers._get_initializer(initial_bias)
self.b = variable.Parameter(bias_initializer)
if in_channels is not None:
self._initialize_params(in_channels)
示例15: __init__
# 需要导入模块: from chainer import initializers [as 别名]
# 或者: from chainer.initializers import HeNormal [as 别名]
def __init__(self, n_layer,
n_class=None,
pretrained_model=None,
mean=None, initialW=None, fc_kwargs={}):
blocks = self._blocks[n_layer]
param, path = utils.prepare_pretrained_model(
{'n_class': n_class, 'mean': mean},
pretrained_model, self._models[n_layer],
{'n_class': 1000, 'mean': _imagenet_mean})
self.mean = param['mean']
if initialW is None:
initialW = initializers.HeNormal(scale=1., fan_option='fan_out')
if 'initialW' not in fc_kwargs:
fc_kwargs['initialW'] = initializers.Normal(scale=0.01)
if pretrained_model:
# As a sampling process is time-consuming,
# we employ a zero initializer for faster computation.
initialW = initializers.constant.Zero()
fc_kwargs['initialW'] = initializers.constant.Zero()
kwargs = {
'initialW': initialW, 'stride_first': True, 'add_seblock': True}
super(SEResNet, self).__init__()
with self.init_scope():
self.conv1 = Conv2DBNActiv(None, 64, 7, 2, 3, nobias=True,
initialW=initialW)
self.pool1 = lambda x: F.max_pooling_2d(x, ksize=3, stride=2)
self.res2 = ResBlock(blocks[0], None, 64, 256, 1, **kwargs)
self.res3 = ResBlock(blocks[1], None, 128, 512, 2, **kwargs)
self.res4 = ResBlock(blocks[2], None, 256, 1024, 2, **kwargs)
self.res5 = ResBlock(blocks[3], None, 512, 2048, 2, **kwargs)
self.pool5 = lambda x: F.average(x, axis=(2, 3))
self.fc6 = L.Linear(None, param['n_class'], **fc_kwargs)
self.prob = F.softmax
if path:
chainer.serializers.load_npz(path, self)