本文整理汇总了Python中chainer.functions.max_pooling_2d方法的典型用法代码示例。如果您正苦于以下问题:Python functions.max_pooling_2d方法的具体用法?Python functions.max_pooling_2d怎么用?Python functions.max_pooling_2d使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类chainer.functions
的用法示例。
在下文中一共展示了functions.max_pooling_2d方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __call__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import max_pooling_2d [as 别名]
def __call__(self, x, t):
h = F.relu(self.conv1_1(x))
h = F.relu(self.conv1_2(h))
h = F.max_pooling_2d(h, 2, 2)
h = F.relu(self.conv2_1(h))
h = F.relu(self.conv2_2(h))
h = F.max_pooling_2d(h, 2, 2)
h = F.relu(self.conv3_1(h))
h = F.relu(self.conv3_2(h))
h = F.relu(self.conv3_3(h))
h = F.max_pooling_2d(h, 2, 2)
h = F.relu(self.conv4_1(h))
h = F.relu(self.conv4_2(h))
h = F.relu(self.conv4_3(h))
h = F.max_pooling_2d(h, 2, 2)
h = F.relu(self.conv5_1(h))
h = F.relu(self.conv5_2(h))
h = F.relu(self.conv5_3(h))
h = F.max_pooling_2d(h, 2, 2)
h = F.dropout(F.relu(self.fc6(h)), ratio=0.5, train=self.train)
h = F.dropout(F.relu(self.fc7(h)), ratio=0.5, train=self.train)
h = self.score_fr(h)
h = self.upsample(h)
return h
示例2: __call__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import max_pooling_2d [as 别名]
def __call__(self, x, t):
h = F.relu(self.conv1(x))
h = F.max_pooling_2d(h, 2, 1)
h = F.relu(self.conv2(h))
h = F.relu(self.conv3(h))
h = F.dropout(F.relu(self.fc4(h)), train=self.train)
h = self.fc5(h)
h = F.reshape(h, (x.data.shape[0], 3, 16, 16))
h = self.channelwise_inhibited(h)
if self.train:
self.loss = F.softmax_cross_entropy(h, t, normalize=False)
return self.loss
else:
self.pred = F.softmax(h)
return self.pred
示例3: __call__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import max_pooling_2d [as 别名]
def __call__(self, x):
h = F.relu(self.conv1_1(x))
h = F.relu(self.conv1_2(h))
h = F.max_pooling_2d(h, 2, stride=2)
h = F.relu(self.conv2_1(h))
h = F.relu(self.conv2_2(h))
h = F.max_pooling_2d(h, 2, stride=2)
h = F.relu(self.conv3_1(h))
h = F.relu(self.conv3_2(h))
h = F.max_pooling_2d(h, 2, stride=2)
h = F.relu(self.conv4_1(h))
h = F.relu(self.conv4_2(h))
h = F.spatial_pyramid_pooling_2d(h, 3, F.MaxPooling2D)
h = F.tanh(self.fc4(h))
h = F.dropout(h, ratio=.5, train=self.train)
h = F.tanh(self.fc5(h))
h = F.dropout(h, ratio=.5, train=self.train)
h = self.fc6(h)
return h
示例4: forward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import max_pooling_2d [as 别名]
def forward(self, x, t):
# def forward(self, x):
h = F.max_pooling_2d(F.local_response_normalization(
F.relu(self.conv1(x))), 3, stride=2)
h = F.max_pooling_2d(F.local_response_normalization(
F.relu(self.conv2(h))), 3, stride=2)
h = F.relu(self.conv3(h))
h = F.relu(self.conv4(h))
h = F.max_pooling_2d(F.relu(self.conv5(h)), 3, stride=2)
h = F.dropout(F.relu(self.fc6(h)))
h = F.dropout(F.relu(self.fc7(h)))
h = self.fc8(h)
loss = F.softmax_cross_entropy(h, t)
#loss = h
# chainer.report({'loss': loss, 'accuracy': F.accuracy(h, t)}, self)
return loss
# from https://github.com/chainer/chainer/blob/master/examples/imagenet/alex.py
示例5: forward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import max_pooling_2d [as 别名]
def forward(self, x):
"""Computes the output of the Inception module.
Args:
x (~chainer.Variable): Input variable.
Returns:
Variable: Output variable. Its array has the same spatial size and
the same minibatch size as the input array. The channel dimension
has size ``out1 + out3 + out5 + proj_pool``.
"""
out1 = self.conv1(x)
out3 = self.conv3(relu.relu(self.proj3(x)))
out5 = self.conv5(relu.relu(self.proj5(x)))
pool = self.projp(F.max_pooling_2d(
x, 3, stride=1, pad=1))
y = relu.relu(concat.concat((out1, out3, out5, pool), axis=1))
return y
示例6: forward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import max_pooling_2d [as 别名]
def forward(self, x):
hs = self.base(x)
with flags.for_unroll():
for i in range(self.n_base_output_minus1, -1, -1):
hs[i] = self.inner[i](hs[i])
if i < self.n_base_output_minus1:
hs[i] += F.unpooling_2d(hs[i + 1], 2, cover_all=False)
for i in range(self.n_base_output):
hs[i] = self.outer[i](hs[i])
for _ in range(self.scales_minus_n_base_output):
hs.append(F.max_pooling_2d(hs[-1], 1, stride=2, cover_all=False))
return hs
# ======================================
示例7: forward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import max_pooling_2d [as 别名]
def forward(self, x, t):
# def forward(self, x):
h = F.max_pooling_2d(F.local_response_normalization(
F.relu(self.conv1(x))), 3, stride=2)
h = F.max_pooling_2d(F.local_response_normalization(
F.relu(self.conv2(h))), 3, stride=2)
h = F.relu(self.conv3(h))
h = F.relu(self.conv4(h))
h = F.max_pooling_2d(F.relu(self.conv5(h)), 3, stride=2)
h = F.dropout(F.relu(self.fc6(h)))
h = F.dropout(F.relu(self.fc7(h)))
h = self.fc8(h)
loss = F.softmax_cross_entropy(h, t)
#loss = h
# chainer.report({'loss': loss, 'accuracy': F.accuracy(h, t)}, self)
return loss
示例8: forward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import max_pooling_2d [as 别名]
def forward(self, x, t):
h = self.bn1(self.conv1(x))
h = F.max_pooling_2d(F.relu(h), 3, stride=2)
h = self.res2(h)
h = self.res3(h)
h = self.res4(h)
h = self.res5(h)
h = F.average_pooling_2d(h, 7, stride=1)
h = self.fc(h)
#loss = F.softmax_cross_entropy(h, t)
loss = self.softmax_cross_entropy(h, t)
if self.compute_accuracy:
chainer.report({'loss': loss, 'accuracy': F.accuracy(h, np.argmax(t, axis=1))}, self)
else:
chainer.report({'loss': loss}, self)
return loss
示例9: forward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import max_pooling_2d [as 别名]
def forward(self, x, t):
h = F.max_pooling_2d(F.local_response_normalization(
F.relu(self.conv1(x))), 3, stride=2)
h = F.max_pooling_2d(F.local_response_normalization(
F.relu(self.conv2(h))), 3, stride=2)
h = F.relu(self.conv3(h))
h = F.relu(self.conv4(h))
h = F.max_pooling_2d(F.relu(self.conv5(h)), 3, stride=2)
h = F.dropout(F.relu(self.fc6(h)))
h = F.dropout(F.relu(self.fc7(h)))
h = self.fc8(h)
# EDIT(hamaji): ONNX-chainer cannot output SoftmaxCrossEntropy.
# loss = F.softmax_cross_entropy(h, t)
loss = self.softmax_cross_entropy(h, t)
if self.compute_accuracy:
chainer.report({'loss': loss, 'accuracy': F.accuracy(h, t)}, self)
else:
chainer.report({'loss': loss}, self)
return loss
示例10: __init__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import max_pooling_2d [as 别名]
def __init__(self,
in_channels,
out_channels,
ksize,
pad):
super(DPNInitBlock, self).__init__()
with self.init_scope():
self.conv = L.Convolution2D(
in_channels=in_channels,
out_channels=out_channels,
ksize=ksize,
stride=2,
pad=pad,
nobias=True)
self.bn = dpn_batch_norm(channels=out_channels)
self.activ = F.relu
self.pool = partial(
F.max_pooling_2d,
ksize=3,
stride=2,
pad=1,
cover_all=False)
示例11: __init__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import max_pooling_2d [as 别名]
def __init__(self,
in_channels,
out_channels):
super(WRNInitBlock, self).__init__()
with self.init_scope():
self.conv = WRNConv(
in_channels=in_channels,
out_channels=out_channels,
ksize=7,
stride=2,
pad=3,
activate=True)
self.pool = partial(
F.max_pooling_2d,
ksize=3,
stride=2,
pad=1,
cover_all=False)
示例12: __init__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import max_pooling_2d [as 别名]
def __init__(self,
in_channels,
out_channels):
super(SEInitBlock, self).__init__()
mid_channels = out_channels // 2
with self.init_scope():
self.conv1 = conv3x3_block(
in_channels=in_channels,
out_channels=mid_channels,
stride=2)
self.conv2 = conv3x3_block(
in_channels=mid_channels,
out_channels=mid_channels)
self.conv3 = conv3x3_block(
in_channels=mid_channels,
out_channels=out_channels)
self.pool = partial(
F.max_pooling_2d,
ksize=3,
stride=2,
pad=1,
cover_all=False)
示例13: __init__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import max_pooling_2d [as 别名]
def __init__(self,
in_channels,
out_channels_list):
super(DownUnit, self).__init__()
with self.init_scope():
self.blocks = SimpleSequential()
with self.blocks.init_scope():
for i, out_channels in enumerate(out_channels_list):
setattr(self.blocks, "block{}".format(i + 1), FishBlock(
in_channels=in_channels,
out_channels=out_channels))
in_channels = out_channels
self.pool = partial(
F.max_pooling_2d,
ksize=2,
stride=2,
cover_all=False)
示例14: __init__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import max_pooling_2d [as 别名]
def __init__(self,
in_channels,
out_channels):
super(PreResInitBlock, self).__init__()
with self.init_scope():
self.conv = L.Convolution2D(
in_channels=in_channels,
out_channels=out_channels,
ksize=7,
stride=2,
pad=3,
nobias=True)
self.bn = L.BatchNormalization(size=out_channels)
self.activ = F.relu
self.pool = partial(
F.max_pooling_2d,
ksize=3,
stride=2,
pad=1,
cover_all=False)
示例15: __init__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import max_pooling_2d [as 别名]
def __init__(self,
in_channels,
out_channels):
super(AirInitBlock, self).__init__()
mid_channels = out_channels // 2
with self.init_scope():
self.conv1 = conv3x3_block(
in_channels=in_channels,
out_channels=mid_channels,
stride=2)
self.conv2 = conv3x3_block(
in_channels=mid_channels,
out_channels=mid_channels)
self.conv3 = conv3x3_block(
in_channels=mid_channels,
out_channels=out_channels)
self.pool = partial(
F.max_pooling_2d,
ksize=3,
stride=2,
pad=1,
cover_all=False)