本文整理汇总了Python中lasagne.layers.GlobalPoolLayer方法的典型用法代码示例。如果您正苦于以下问题:Python layers.GlobalPoolLayer方法的具体用法?Python layers.GlobalPoolLayer怎么用?Python layers.GlobalPoolLayer使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类lasagne.layers
的用法示例。
在下文中一共展示了layers.GlobalPoolLayer方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: ResNet_FullPreActivation
# 需要导入模块: from lasagne import layers [as 别名]
# 或者: from lasagne.layers import GlobalPoolLayer [as 别名]
def ResNet_FullPreActivation(input_shape=(None, 3, PIXELS, PIXELS), input_var=None, n_classes=10, n=18):
"""
Adapted from https://github.com/Lasagne/Recipes/tree/master/papers/deep_residual_learning.
Tweaked to be consistent with 'Identity Mappings in Deep Residual Networks', Kaiming He et al. 2016 (https://arxiv.org/abs/1603.05027)
Formula to figure out depth: 6n + 2
"""
# Building the network
l_in = InputLayer(shape=input_shape, input_var=input_var)
# first layer, output is 16 x 32 x 32
l = batch_norm(ConvLayer(l_in, num_filters=16, filter_size=(3, 3), stride=(1, 1), nonlinearity=rectify, pad='same', W=he_norm))
# first stack of residual blocks, output is 16 x 32 x 32
l = residual_block(l, first=True)
for _ in range(1, n):
l = residual_block(l)
# second stack of residual blocks, output is 32 x 16 x 16
l = residual_block(l, increase_dim=True)
for _ in range(1, n):
l = residual_block(l)
# third stack of residual blocks, output is 64 x 8 x 8
l = residual_block(l, increase_dim=True)
for _ in range(1, n):
l = residual_block(l)
bn_post_conv = BatchNormLayer(l)
bn_post_relu = NonlinearityLayer(bn_post_conv, rectify)
# average pooling
avg_pool = GlobalPoolLayer(bn_post_relu)
# fully connected layer
network = DenseLayer(avg_pool, num_units=n_classes, W=HeNormal(), nonlinearity=softmax)
return network
示例2: build_model
# 需要导入模块: from lasagne import layers [as 别名]
# 或者: from lasagne.layers import GlobalPoolLayer [as 别名]
def build_model():
net = {}
net['input'] = InputLayer((None, 3, None, None))
net['conv1/7x7_s2'] = ConvLayer(
net['input'], 64, 7, stride=2, pad=3, flip_filters=False)
net['pool1/3x3_s2'] = PoolLayer(
net['conv1/7x7_s2'], pool_size=3, stride=2, ignore_border=False)
net['pool1/norm1'] = LRNLayer(net['pool1/3x3_s2'], alpha=0.00002, k=1)
net['conv2/3x3_reduce'] = ConvLayer(
net['pool1/norm1'], 64, 1, flip_filters=False)
net['conv2/3x3'] = ConvLayer(
net['conv2/3x3_reduce'], 192, 3, pad=1, flip_filters=False)
net['conv2/norm2'] = LRNLayer(net['conv2/3x3'], alpha=0.00002, k=1)
net['pool2/3x3_s2'] = PoolLayer(
net['conv2/norm2'], pool_size=3, stride=2, ignore_border=False)
net.update(build_inception_module('inception_3a',
net['pool2/3x3_s2'],
[32, 64, 96, 128, 16, 32]))
net.update(build_inception_module('inception_3b',
net['inception_3a/output'],
[64, 128, 128, 192, 32, 96]))
net['pool3/3x3_s2'] = PoolLayer(
net['inception_3b/output'], pool_size=3, stride=2, ignore_border=False)
net.update(build_inception_module('inception_4a',
net['pool3/3x3_s2'],
[64, 192, 96, 208, 16, 48]))
net.update(build_inception_module('inception_4b',
net['inception_4a/output'],
[64, 160, 112, 224, 24, 64]))
net.update(build_inception_module('inception_4c',
net['inception_4b/output'],
[64, 128, 128, 256, 24, 64]))
net.update(build_inception_module('inception_4d',
net['inception_4c/output'],
[64, 112, 144, 288, 32, 64]))
net.update(build_inception_module('inception_4e',
net['inception_4d/output'],
[128, 256, 160, 320, 32, 128]))
net['pool4/3x3_s2'] = PoolLayer(
net['inception_4e/output'], pool_size=3, stride=2, ignore_border=False)
net.update(build_inception_module('inception_5a',
net['pool4/3x3_s2'],
[128, 256, 160, 320, 32, 128]))
net.update(build_inception_module('inception_5b',
net['inception_5a/output'],
[128, 384, 192, 384, 48, 128]))
net['pool5/7x7_s1'] = GlobalPoolLayer(net['inception_5b/output'])
net['loss3/classifier'] = DenseLayer(net['pool5/7x7_s1'],
num_units=1000,
nonlinearity=linear)
net['prob'] = NonlinearityLayer(net['loss3/classifier'],
nonlinearity=softmax)
return net
示例3: ResNet_BottleNeck_FullPreActivation
# 需要导入模块: from lasagne import layers [as 别名]
# 或者: from lasagne.layers import GlobalPoolLayer [as 别名]
def ResNet_BottleNeck_FullPreActivation(input_shape=(None, 3, PIXELS, PIXELS), input_var=None, n_classes=10, n=18):
'''
Adapted from https://github.com/Lasagne/Recipes/tree/master/papers/deep_residual_learning.
Tweaked to be consistent with 'Identity Mappings in Deep Residual Networks', Kaiming He et al. 2016 (https://arxiv.org/abs/1603.05027)
Judging from https://github.com/KaimingHe/resnet-1k-layers/blob/master/resnet-pre-act.lua.
Number of filters go 16 -> 64 -> 128 -> 256
Forumala to figure out depth: 9n + 2
'''
# Building the network
l_in = InputLayer(shape=input_shape, input_var=input_var)
# first layer, output is 16x16x16
l = batch_norm(ConvLayer(l_in, num_filters=16, filter_size=(3, 3), stride=(1, 1), nonlinearity=rectify, pad='same', W=he_norm))
# first stack of residual blocks, output is 64x16x16
l = residual_bottleneck_block(l, first=True)
for _ in range(1, n):
l = residual_bottleneck_block(l)
# second stack of residual blocks, output is 128x8x8
l = residual_bottleneck_block(l, increase_dim=True)
for _ in range(1, n):
l = residual_bottleneck_block(l)
# third stack of residual blocks, output is 256x4x4
l = residual_bottleneck_block(l, increase_dim=True)
for _ in range(1, n):
l = residual_bottleneck_block(l)
bn_post_conv = BatchNormLayer(l)
bn_post_relu = NonlinearityLayer(bn_post_conv, rectify)
# average pooling
avg_pool = GlobalPoolLayer(bn_post_relu)
# fully connected layer
network = DenseLayer(avg_pool, num_units=n_classes, W=HeNormal(), nonlinearity=softmax)
return network
示例4: ResNet_FullPre_Wide
# 需要导入模块: from lasagne import layers [as 别名]
# 或者: from lasagne.layers import GlobalPoolLayer [as 别名]
def ResNet_FullPre_Wide(input_shape=(None, 3, PIXELS, PIXELS), input_var=None, n_classes=10, n=6, k=4):
"""
Adapted from https://github.com/Lasagne/Recipes/tree/master/papers/deep_residual_learning.
Tweaked to be consistent with 'Identity Mappings in Deep Residual Networks', Kaiming He et al. 2016 (https://arxiv.org/abs/1603.05027)
And 'Wide Residual Networks', Sergey Zagoruyko, Nikos Komodakis 2016 (http://arxiv.org/pdf/1605.07146v1.pdf)
Depth = 6n + 2
"""
n_filters = {0: 16, 1: 16*k, 2: 32*k, 3: 64*k}
# Building the network
l_in = InputLayer(shape=input_shape, input_var=input_var)
# first layer, output is 16 x 64 x 64
l = batch_norm(ConvLayer(l_in, num_filters=n_filters[0], filter_size=(3, 3), stride=(1, 1), nonlinearity=rectify, pad='same', W=he_norm))
# first stack of residual blocks, output is 32 x 64 x 64
l = residual_wide_block(l, first=True, filters=n_filters[1])
for _ in range(1, n):
l = residual_wide_block(l, filters=n_filters[1])
# second stack of residual blocks, output is 64 x 32 x 32
l = residual_wide_block(l, increase_dim=True, filters=n_filters[2])
for _ in range(1, (n+2)):
l = residual_wide_block(l, filters=n_filters[2])
# third stack of residual blocks, output is 128 x 16 x 16
l = residual_wide_block(l, increase_dim=True, filters=n_filters[3])
for _ in range(1, (n+2)):
l = residual_wide_block(l, filters=n_filters[3])
bn_post_conv = BatchNormLayer(l)
bn_post_relu = NonlinearityLayer(bn_post_conv, rectify)
# average pooling
avg_pool = GlobalPoolLayer(bn_post_relu)
# fully connected layer
network = DenseLayer(avg_pool, num_units=n_classes, W=HeNormal(), nonlinearity=softmax)
return network
示例5: build_resnet_model
# 需要导入模块: from lasagne import layers [as 别名]
# 或者: from lasagne.layers import GlobalPoolLayer [as 别名]
def build_resnet_model():
log.i('BUILDING RESNET MODEL...')
# Random Seed
lasagne_random.set_rng(cfg.getRandomState())
# Input layer for images
net = l.InputLayer((None, cfg.IM_DIM, cfg.IM_SIZE[1], cfg.IM_SIZE[0]))
# First Convolution
net = l.Conv2DLayer(net,
num_filters=cfg.FILTERS[0],
filter_size=cfg.KERNEL_SIZES[0],
pad='same',
W=initialization(cfg.NONLINEARITY),
nonlinearity=None)
log.i(("\tFIRST CONV OUT SHAPE:", l.get_output_shape(net), "LAYER:", len(l.get_all_layers(net)) - 1))
# Residual Stacks
for i in range(0, len(cfg.FILTERS)):
net = resblock(net, filters=cfg.FILTERS[i] * cfg.RESNET_K, kernel_size=cfg.KERNEL_SIZES[i], stride=2, num_groups=cfg.NUM_OF_GROUPS[i])
for _ in range(1, cfg.RESNET_N):
net = resblock(net, filters=cfg.FILTERS[i] * cfg.RESNET_K, kernel_size=cfg.KERNEL_SIZES[i], num_groups=cfg.NUM_OF_GROUPS[i], preactivated=False)
log.i(("\tRES STACK", i + 1, "OUT SHAPE:", l.get_output_shape(net), "LAYER:", len(l.get_all_layers(net)) - 1))
# Post Activation
net = batch_norm(net)
net = l.NonlinearityLayer(net, nonlinearity=nonlinearity(cfg.NONLINEARITY))
# Pooling
net = l.GlobalPoolLayer(net)
log.i(("\tFINAL POOLING SHAPE:", l.get_output_shape(net), "LAYER:", len(l.get_all_layers(net)) - 1))
# Classification Layer
net = l.DenseLayer(net, len(cfg.CLASSES), nonlinearity=nonlinearity('identity'), W=initialization('identity'))
net = l.NonlinearityLayer(net, nonlinearity=nonlinearity('softmax'))
log.i(("\tFINAL NET OUT SHAPE:", l.get_output_shape(net), "LAYER:", len(l.get_all_layers(net))))
log.i("...DONE!")
# Model stats
log.i(("MODEL HAS", (sum(hasattr(layer, 'W') for layer in l.get_all_layers(net))), "WEIGHTED LAYERS"))
log.i(("MODEL HAS", l.count_params(net), "PARAMS"))
return net
################## PASPBERRY PI NET #####################