本文整理汇总了Python中mxnet.gluon.nn.GlobalAvgPool2D方法的典型用法代码示例。如果您正苦于以下问题:Python nn.GlobalAvgPool2D方法的具体用法?Python nn.GlobalAvgPool2D怎么用?Python nn.GlobalAvgPool2D使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类mxnet.gluon.nn
的用法示例。
在下文中一共展示了nn.GlobalAvgPool2D方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from mxnet.gluon import nn [as 别名]
# 或者: from mxnet.gluon.nn import GlobalAvgPool2D [as 别名]
def __init__(self, depth, ctx, pretrained=True, num_classes=0):
super(ResNet, self).__init__()
self.pretrained = pretrained
with self.name_scope():
network = ResNet.__factory[depth](pretrained=pretrained, ctx=ctx).features[0:-1]
network[-1][0].body[0]._kwargs['stride'] = (1, 1)
network[-1][0].downsample[0]._kwargs['stride'] = (1, 1)
self.base = nn.HybridSequential()
for n in network:
self.base.add(n)
self.avgpool = nn.GlobalAvgPool2D()
self.flatten = nn.Flatten()
self.bn = nn.BatchNorm(center=False, scale=True)
self.bn.initialize(init=init.Zero(), ctx=ctx)
self.classifier = nn.Dense(num_classes, use_bias=False)
self.classifier.initialize(init=init.Normal(0.001), ctx=ctx)
示例2: __init__
# 需要导入模块: from mxnet.gluon import nn [as 别名]
# 或者: from mxnet.gluon.nn import GlobalAvgPool2D [as 别名]
def __init__(self, multiplier=1.0, classes=1000,
norm_layer=BatchNorm, norm_kwargs=None, **kwargs):
super(MobileNet, self).__init__(**kwargs)
with self.name_scope():
self.features = nn.HybridSequential(prefix='')
with self.features.name_scope():
_add_conv(self.features, channels=int(32 * multiplier), kernel=3, pad=1, stride=2,
norm_layer=norm_layer, norm_kwargs=norm_kwargs)
dw_channels = [int(x * multiplier) for x in [32, 64] + [128] * 2 +
[256] *
2 +
[512] *
6 +
[1024]]
channels = [int(x * multiplier) for x in [64] +
[128] * 2 + [256] * 2 + [512] * 6 + [1024] * 2]
strides = [1, 2] * 3 + [1] * 5 + [2, 1]
for dwc, c, s in zip(dw_channels, channels, strides):
_add_conv_dw(self.features, dw_channels=dwc, channels=c, stride=s,
norm_layer=norm_layer, norm_kwargs=norm_kwargs)
self.features.add(nn.GlobalAvgPool2D())
self.features.add(nn.Flatten())
self.output = nn.Dense(classes)
示例3: __init__
# 需要导入模块: from mxnet.gluon import nn [as 别名]
# 或者: from mxnet.gluon.nn import GlobalAvgPool2D [as 别名]
def __init__(self, block, layers, channels, classes=1000, thumbnail=False,
last_gamma=False, use_se=False, norm_layer=BatchNorm, norm_kwargs=None, **kwargs):
super(ResNetV1, self).__init__(**kwargs)
assert len(layers) == len(channels) - 1
with self.name_scope():
self.features = nn.HybridSequential(prefix='')
if thumbnail:
self.features.add(_conv3x3(channels[0], 1, 0))
else:
self.features.add(nn.Conv2D(channels[0], 7, 2, 3, use_bias=False))
self.features.add(norm_layer(**({} if norm_kwargs is None else norm_kwargs)))
self.features.add(nn.Activation('relu'))
self.features.add(nn.MaxPool2D(3, 2, 1))
for i, num_layer in enumerate(layers):
stride = 1 if i == 0 else 2
self.features.add(self._make_layer(block, num_layer, channels[i+1],
stride, i+1, in_channels=channels[i],
last_gamma=last_gamma, use_se=use_se,
norm_layer=norm_layer, norm_kwargs=norm_kwargs))
self.features.add(nn.GlobalAvgPool2D())
self.output = nn.Dense(classes, in_units=channels[-1])
示例4: __init__
# 需要导入模块: from mxnet.gluon import nn [as 别名]
# 或者: from mxnet.gluon.nn import GlobalAvgPool2D [as 别名]
def __init__(self, block, layers, channels, classes=10,
norm_layer=BatchNorm, norm_kwargs=None, **kwargs):
super(CIFARResNetV1, self).__init__(**kwargs)
assert len(layers) == len(channels) - 1
with self.name_scope():
self.features = nn.HybridSequential(prefix='')
self.features.add(nn.Conv2D(channels[0], 3, 1, 1, use_bias=False))
self.features.add(norm_layer(**({} if norm_kwargs is None else norm_kwargs)))
for i, num_layer in enumerate(layers):
stride = 1 if i == 0 else 2
self.features.add(self._make_layer(block, num_layer, channels[i+1],
stride, i+1, in_channels=channels[i],
norm_layer=norm_layer, norm_kwargs=norm_kwargs))
self.features.add(nn.GlobalAvgPool2D())
self.output = nn.Dense(classes, in_units=channels[-1])
示例5: __init__
# 需要导入模块: from mxnet.gluon import nn [as 别名]
# 或者: from mxnet.gluon.nn import GlobalAvgPool2D [as 别名]
def __init__(self, block, layers, channels, classes=1000, thumbnail=False,
norm_layer=BatchNorm, norm_kwargs=None, **kwargs):
super(SE_ResNetV1, self).__init__(**kwargs)
assert len(layers) == len(channels) - 1
with self.name_scope():
self.features = nn.HybridSequential(prefix='')
if thumbnail:
self.features.add(_conv3x3(channels[0], 1, 0))
else:
self.features.add(nn.Conv2D(channels[0], 7, 2, 3, use_bias=False))
self.features.add(norm_layer(**({} if norm_kwargs is None else norm_kwargs)))
self.features.add(nn.Activation('relu'))
self.features.add(nn.MaxPool2D(3, 2, 1))
for i, num_layer in enumerate(layers):
stride = 1 if i == 0 else 2
self.features.add(self._make_layer(block, num_layer, channels[i+1],
stride, i+1, in_channels=channels[i],
norm_layer=norm_layer, norm_kwargs=norm_kwargs))
self.features.add(nn.GlobalAvgPool2D())
self.output = nn.Dense(classes, in_units=channels[-1])
示例6: __init__
# 需要导入模块: from mxnet.gluon import nn [as 别名]
# 或者: from mxnet.gluon.nn import GlobalAvgPool2D [as 别名]
def __init__(self, layers, cardinality, bottleneck_width, classes=10,
norm_layer=BatchNorm, norm_kwargs=None, **kwargs):
super(CIFARResNext, self).__init__(**kwargs)
self.cardinality = cardinality
self.bottleneck_width = bottleneck_width
channels = 64
with self.name_scope():
self.features = nn.HybridSequential(prefix='')
self.features.add(nn.Conv2D(channels, 3, 1, 1, use_bias=False))
self.features.add(norm_layer(**({} if norm_kwargs is None else norm_kwargs)))
self.features.add(nn.Activation('relu'))
for i, num_layer in enumerate(layers):
stride = 1 if i == 0 else 2
self.features.add(self._make_layer(channels, num_layer, stride, i+1,
norm_layer=norm_layer, norm_kwargs=norm_kwargs))
channels *= 2
self.features.add(nn.GlobalAvgPool2D())
self.output = nn.Dense(classes)
示例7: resnet18
# 需要导入模块: from mxnet.gluon import nn [as 别名]
# 或者: from mxnet.gluon.nn import GlobalAvgPool2D [as 别名]
def resnet18(num_classes):
net = nn.HybridSequential()
with net.name_scope():
net.add(
nn.BatchNorm(),
nn.Conv2D(64, kernel_size=3, strides=1),
nn.MaxPool2D(pool_size=3, strides=2),
Residual(64),
Residual(64),
Residual(128, same_shape=False),
Residual(128),
Residual(256, same_shape=False),
Residual(256),
nn.GlobalAvgPool2D(),
nn.Dense(num_classes)
)
return net
示例8: __init__
# 需要导入模块: from mxnet.gluon import nn [as 别名]
# 或者: from mxnet.gluon.nn import GlobalAvgPool2D [as 别名]
def __init__(self,
channels,
bn_use_global_stats,
reduction_ratio=16,
num_layers=1,
**kwargs):
super(ChannelGate, self).__init__(**kwargs)
mid_channels = channels // reduction_ratio
with self.name_scope():
self.pool = nn.GlobalAvgPool2D()
self.flatten = nn.Flatten()
self.init_fc = DenseBlock(
in_channels=channels,
out_channels=mid_channels,
bn_use_global_stats=bn_use_global_stats)
self.main_fcs = nn.HybridSequential(prefix="")
for i in range(num_layers - 1):
self.main_fcs.add(DenseBlock(
in_channels=mid_channels,
out_channels=mid_channels,
bn_use_global_stats=bn_use_global_stats))
self.final_fc = nn.Dense(
units=channels,
in_units=mid_channels)
示例9: __init__
# 需要导入模块: from mxnet.gluon import nn [as 别名]
# 或者: from mxnet.gluon.nn import GlobalAvgPool2D [as 别名]
def __init__(self, layers, cardinality, bottleneck_width,
classes=1000, last_gamma=False, use_se=False,
norm_layer=BatchNorm, norm_kwargs=None, **kwargs):
super(ResNext, self).__init__(**kwargs)
self.cardinality = cardinality
self.bottleneck_width = bottleneck_width
channels = 64
with self.name_scope():
self.features = nn.HybridSequential(prefix='')
self.features.add(nn.Conv2D(channels, 7, 2, 3, use_bias=False))
self.features.add(norm_layer(**({} if norm_kwargs is None else norm_kwargs)))
self.features.add(nn.Activation('relu'))
self.features.add(nn.MaxPool2D(3, 2, 1))
for i, num_layer in enumerate(layers):
stride = 1 if i == 0 else 2
self.features.add(self._make_layer(channels, num_layer, stride,
last_gamma, use_se, i+1,
norm_layer=norm_layer, norm_kwargs=norm_kwargs))
channels *= 2
self.features.add(nn.GlobalAvgPool2D())
self.output = nn.Dense(classes)
示例10: __init__
# 需要导入模块: from mxnet.gluon import nn [as 别名]
# 或者: from mxnet.gluon.nn import GlobalAvgPool2D [as 别名]
def __init__(self, block, layers, channels, drop_rate, classes=10,
norm_layer=BatchNorm, norm_kwargs=None, **kwargs):
super(CIFARWideResNet, self).__init__(**kwargs)
assert len(layers) == len(channels) - 1
with self.name_scope():
self.features = nn.HybridSequential(prefix='')
self.features.add(norm_layer(scale=False, center=False,
**({} if norm_kwargs is None else norm_kwargs)))
self.features.add(nn.Conv2D(channels[0], 3, 1, 1, use_bias=False))
self.features.add(norm_layer(**({} if norm_kwargs is None else norm_kwargs)))
in_channels = channels[0]
for i, num_layer in enumerate(layers):
stride = 1 if i == 0 else 2
self.features.add(self._make_layer(block, num_layer, channels[i+1], drop_rate,
stride, i+1, in_channels=in_channels,
norm_layer=norm_layer, norm_kwargs=norm_kwargs))
in_channels = channels[i+1]
self.features.add(norm_layer(**({} if norm_kwargs is None else norm_kwargs)))
self.features.add(nn.Activation('relu'))
self.features.add(nn.GlobalAvgPool2D())
self.features.add(nn.Flatten())
self.output = nn.Dense(classes)
示例11: __init__
# 需要导入模块: from mxnet.gluon import nn [as 别名]
# 或者: from mxnet.gluon.nn import GlobalAvgPool2D [as 别名]
def __init__(self, multiplier=1.0, classes=1000,
norm_layer=BatchNorm, norm_kwargs=None, **kwargs):
super(MobileNet, self).__init__(**kwargs)
with self.name_scope():
self.features = nn.HybridSequential(prefix='')
with self.features.name_scope():
_add_conv(self.features, channels=int(32 * multiplier), kernel=3, pad=1, stride=2, in_channels=3,
norm_layer=BatchNorm, norm_kwargs=None, quantized=False)
dw_channels = [int(x * multiplier) for x in [32, 64] + [128] * 2 +
[256] *
2 +
[512] *
6 +
[1024]]
channels = [int(x * multiplier) for x in [64] +
[128] * 2 + [256] * 2 + [512] * 6 + [1024] * 2]
strides = [1, 2] * 3 + [1] * 5 + [2, 1]
for dwc, c, s in zip(dw_channels, channels, strides):
_add_conv_dw(self.features, dw_channels=dwc, channels=c, stride=s,
norm_layer=BatchNorm, norm_kwargs=None)
self.features.add(nn.GlobalAvgPool2D())
self.features.add(nn.Flatten())
self.output = nn.Dense(classes)
示例12: __init__
# 需要导入模块: from mxnet.gluon import nn [as 别名]
# 或者: from mxnet.gluon.nn import GlobalAvgPool2D [as 别名]
def __init__(self, **kwargs):
super(ResNet50V2, self).__init__(**kwargs)
with self.name_scope():
self.rescale = nn.HybridSequential(prefix='')
self.rescale.add(Rescale(prefix=''))
self.layer0 = nn.HybridSequential(prefix='')
self.layer0.add(nn.BatchNorm(scale=False, epsilon=2e-5, use_global_stats=True))
self.layer0.add(nn.Conv2D(64, 7, 2, 3, use_bias=False))
self.layer0.add(nn.BatchNorm(epsilon=2e-5, use_global_stats=True))
self.layer0.add(nn.Activation('relu'))
self.layer0.add(nn.MaxPool2D(3, 2, 1))
self.layer1 = self._make_layer(stage_index=1, layers=3, in_channels=64,
channels=256, stride=1)
self.layer2 = self._make_layer(stage_index=2, layers=4, in_channels=256,
channels=512, stride=2)
self.layer3 = self._make_layer(stage_index=3, layers=6, in_channels=512,
channels=1024, stride=2)
self.layer4 = self._make_layer(stage_index=4, layers=3, in_channels=1024,
channels=2048, stride=1)
self.layer4.add(nn.BatchNorm(epsilon=2e-5, use_global_stats=True))
self.layer4.add(nn.Activation('relu'))
# self.layer4.add(nn.GlobalAvgPool2D())
# self.layer4.add(nn.Flatten())
示例13: resnet18
# 需要导入模块: from mxnet.gluon import nn [as 别名]
# 或者: from mxnet.gluon.nn import GlobalAvgPool2D [as 别名]
def resnet18(num_classes):
"""The ResNet-18 model."""
net = nn.Sequential()
net.add(nn.Conv2D(64, kernel_size=3, strides=1, padding=1),
nn.BatchNorm(), nn.Activation('relu'))
def resnet_block(num_channels, num_residuals, first_block=False):
blk = nn.Sequential()
for i in range(num_residuals):
if i == 0 and not first_block:
blk.add(Residual(num_channels, use_1x1conv=True, strides=2))
else:
blk.add(Residual(num_channels))
return blk
net.add(resnet_block(64, 2, first_block=True),
resnet_block(128, 2),
resnet_block(256, 2),
resnet_block(512, 2))
net.add(nn.GlobalAvgPool2D(), nn.Dense(num_classes))
return net
示例14: test_reshape_pooling2d
# 需要导入模块: from mxnet.gluon import nn [as 别名]
# 或者: from mxnet.gluon.nn import GlobalAvgPool2D [as 别名]
def test_reshape_pooling2d():
max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1))
avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1))
global_maxpooling = nn.GlobalMaxPool2D()
global_avgpooling = nn.GlobalAvgPool2D()
pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling]
class Net(gluon.HybridBlock):
def __init__(self,
shape,
pooling_layer,
**kwargs):
super(Net, self).__init__(**kwargs)
with self.name_scope():
self.reshape = shape
self.pool0 = pooling_layer
def hybrid_forward(self, F, x):
x_reshape = x.reshape(self.reshape)
out = self.pool0(x_reshape)
return out
x = mx.nd.random.uniform(shape=(4, 32, 32, 32))
shape = (4, 64, 64, -1)
for i in range(len(pooling_layers)):
net = Net(shape, pooling_layers[i])
check_layer_forward_withinput(net, x)
示例15: test_slice_pooling2d
# 需要导入模块: from mxnet.gluon import nn [as 别名]
# 或者: from mxnet.gluon.nn import GlobalAvgPool2D [as 别名]
def test_slice_pooling2d():
max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1))
avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1))
global_maxpooling = nn.GlobalMaxPool2D()
global_avgpooling = nn.GlobalAvgPool2D()
pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling]
class Net(gluon.HybridBlock):
def __init__(self,
slice,
pooling_layer,
**kwargs):
super(Net, self).__init__(**kwargs)
with self.name_scope():
self.slice = slice
self.pool0 = pooling_layer
def hybrid_forward(self, F, x):
x_slice = x.slice(begin=self.slice[0], end=self.slice[1])
out = self.pool0(x_slice)
return out
x = mx.nd.random.uniform(shape=(16, 128, 256, 256))
slice = [(0, 0, 0, 0), (4, 16, 32, 64)]
for i in range(len(pooling_layers)):
net = Net(slice, pooling_layers[i])
check_layer_forward_withinput(net, x)