本文整理汇总了Python中tensorflow.contrib.slim.python.slim.nets.resnet_v1.resnet_v1方法的典型用法代码示例。如果您正苦于以下问题:Python resnet_v1.resnet_v1方法的具体用法?Python resnet_v1.resnet_v1怎么用?Python resnet_v1.resnet_v1使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tensorflow.contrib.slim.python.slim.nets.resnet_v1
的用法示例。
在下文中一共展示了resnet_v1.resnet_v1方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _resnet_small
# 需要导入模块: from tensorflow.contrib.slim.python.slim.nets import resnet_v1 [as 别名]
# 或者: from tensorflow.contrib.slim.python.slim.nets.resnet_v1 import resnet_v1 [as 别名]
def _resnet_small(self,
inputs,
num_classes=None,
global_pool=True,
output_stride=None,
include_root_block=True,
reuse=None,
scope='resnet_v1_small'):
"""A shallow and thin ResNet v1 for faster tests."""
bottleneck = resnet_v1.bottleneck
blocks = [
resnet_utils.Block('block1', bottleneck, [(4, 1, 1)] * 2 + [(4, 1, 2)]),
resnet_utils.Block('block2', bottleneck, [(8, 2, 1)] * 2 + [(8, 2, 2)]),
resnet_utils.Block('block3', bottleneck,
[(16, 4, 1)] * 2 + [(16, 4, 2)]),
resnet_utils.Block('block4', bottleneck, [(32, 8, 1)] * 2)
]
return resnet_v1.resnet_v1(inputs, blocks, num_classes, global_pool,
output_stride, include_root_block, reuse, scope)
示例2: _head_to_tail
# 需要导入模块: from tensorflow.contrib.slim.python.slim.nets import resnet_v1 [as 别名]
# 或者: from tensorflow.contrib.slim.python.slim.nets.resnet_v1 import resnet_v1 [as 别名]
def _head_to_tail(self, pool5, is_training, reuse=None):
with slim.arg_scope(resnet_arg_scope(is_training=is_training)):
fc7_bef_pool, _ = resnet_v1.resnet_v1(pool5,
self._blocks[-1:],
global_pool=False,
include_root_block=False,
reuse=reuse,
scope=self._resnet_scope)
# average pooling done by reduce_mean
fc7 = tf.reduce_mean(fc7_bef_pool, axis=[1, 2])
if cfg.DEBUG_ALL:
self._for_debug['fc7'] = fc7
self._for_debug['fc7_before_pool'] = fc7_bef_pool
return fc7
示例3: res5
# 需要导入模块: from tensorflow.contrib.slim.python.slim.nets import resnet_v1 [as 别名]
# 或者: from tensorflow.contrib.slim.python.slim.nets.resnet_v1 import resnet_v1 [as 别名]
def res5(self, pool5_H, pool5_O, sp, is_training, name):
with slim.arg_scope(resnet_arg_scope(is_training=is_training)):
fc7_H, _ = resnet_v1.resnet_v1(pool5_H,
self.blocks[-2:-1],
global_pool=False,
include_root_block=False,
reuse=False,
scope=self.scope)
fc7_H = tf.reduce_mean(fc7_H, axis=[1, 2])
fc7_O, _ = resnet_v1.resnet_v1(pool5_O,
self.blocks[-1:],
global_pool=False,
include_root_block=False,
reuse=False,
scope=self.scope)
fc7_O = tf.reduce_mean(fc7_O, axis=[1, 2])
return fc7_H, fc7_O
示例4: image_to_head
# 需要导入模块: from tensorflow.contrib.slim.python.slim.nets import resnet_v1 [as 别名]
# 或者: from tensorflow.contrib.slim.python.slim.nets.resnet_v1 import resnet_v1 [as 别名]
def image_to_head(self, is_training):
with slim.arg_scope(resnet_arg_scope(is_training=False)):
net = self.build_base()
net, _ = resnet_v1.resnet_v1(net,
self.blocks[0:cfg.RESNET.FIXED_BLOCKS],
global_pool=False,
include_root_block=False,
scope=self.scope)
with slim.arg_scope(resnet_arg_scope(is_training=is_training)):
head, _ = resnet_v1.resnet_v1(net,
self.blocks[cfg.RESNET.FIXED_BLOCKS:-2],
global_pool=False,
include_root_block=False,
scope=self.scope)
return head
# spatial configuration, conv-pool-conv-pool-flatten
示例5: res5
# 需要导入模块: from tensorflow.contrib.slim.python.slim.nets import resnet_v1 [as 别名]
# 或者: from tensorflow.contrib.slim.python.slim.nets.resnet_v1 import resnet_v1 [as 别名]
def res5(self, pool5_H, pool5_O, sp, is_training, name):
with slim.arg_scope(resnet_arg_scope(is_training=is_training)):
fc7_H, _ = resnet_v1.resnet_v1(pool5_H, # H input, one block
self.blocks[-2:-1],
global_pool=False,
include_root_block=False,
reuse=False,
scope=self.scope)
fc7_H = tf.reduce_mean(fc7_H, axis=[1, 2])
fc7_O, _ = resnet_v1.resnet_v1(pool5_O, # O input, one block
self.blocks[-1:],
global_pool=False,
include_root_block=False,
reuse=False,
scope=self.scope)
fc7_O = tf.reduce_mean(fc7_O, axis=[1, 2])
return fc7_H, fc7_O
示例6: testEndPointsV1
# 需要导入模块: from tensorflow.contrib.slim.python.slim.nets import resnet_v1 [as 别名]
# 或者: from tensorflow.contrib.slim.python.slim.nets.resnet_v1 import resnet_v1 [as 别名]
def testEndPointsV1(self):
"""Test the end points of a tiny v1 bottleneck network."""
bottleneck = resnet_v1.bottleneck
blocks = [
resnet_utils.Block('block1', bottleneck, [(4, 1, 1), (4, 1, 2)]),
resnet_utils.Block('block2', bottleneck, [(8, 2, 1), (8, 2, 1)])
]
inputs = create_test_input(2, 32, 16, 3)
with arg_scope(resnet_utils.resnet_arg_scope()):
_, end_points = self._resnet_plain(inputs, blocks, scope='tiny')
expected = [
'tiny/block1/unit_1/bottleneck_v1/shortcut',
'tiny/block1/unit_1/bottleneck_v1/shortcut/BatchNorm',
'tiny/block1/unit_1/bottleneck_v1/conv1',
'tiny/block1/unit_1/bottleneck_v1/conv2',
'tiny/block1/unit_1/bottleneck_v1/conv3',
'tiny/block1/unit_1/bottleneck_v1/conv3/BatchNorm',
'tiny/block1/unit_2/bottleneck_v1/conv1',
'tiny/block1/unit_2/bottleneck_v1/conv2',
'tiny/block1/unit_2/bottleneck_v1/conv3',
'tiny/block1/unit_2/bottleneck_v1/conv3/BatchNorm',
'tiny/block2/unit_1/bottleneck_v1/shortcut',
'tiny/block2/unit_1/bottleneck_v1/shortcut/BatchNorm',
'tiny/block2/unit_1/bottleneck_v1/conv1',
'tiny/block2/unit_1/bottleneck_v1/conv2',
'tiny/block2/unit_1/bottleneck_v1/conv3',
'tiny/block2/unit_1/bottleneck_v1/conv3/BatchNorm',
'tiny/block2/unit_2/bottleneck_v1/conv1',
'tiny/block2/unit_2/bottleneck_v1/conv2',
'tiny/block2/unit_2/bottleneck_v1/conv3',
'tiny/block2/unit_2/bottleneck_v1/conv3/BatchNorm'
]
self.assertItemsEqual(expected, end_points)
示例7: testAtrousValuesBottleneck
# 需要导入模块: from tensorflow.contrib.slim.python.slim.nets import resnet_v1 [as 别名]
# 或者: from tensorflow.contrib.slim.python.slim.nets.resnet_v1 import resnet_v1 [as 别名]
def testAtrousValuesBottleneck(self):
self._atrousValues(resnet_v1.bottleneck)
示例8: _image_to_head
# 需要导入模块: from tensorflow.contrib.slim.python.slim.nets import resnet_v1 [as 别名]
# 或者: from tensorflow.contrib.slim.python.slim.nets.resnet_v1 import resnet_v1 [as 别名]
def _image_to_head(self, is_training, reuse=None):
assert (0 <= cfg.RESNET.FIXED_BLOCKS <= 3)
# Now the base is always fixed during training
with slim.arg_scope(resnet_arg_scope(is_training=False)):
net_conv = self._build_base()
end_points = {}
if cfg.RESNET.FIXED_BLOCKS > 0:
with slim.arg_scope(resnet_arg_scope(is_training=False)):
net_conv, end_point = resnet_v1.resnet_v1(net_conv,
self._blocks[0:cfg.RESNET.FIXED_BLOCKS],
global_pool=False,
include_root_block=False,
reuse=reuse,
scope=self._scope)
end_points.update(end_point)
if cfg.RESNET.FIXED_BLOCKS < 3:
with slim.arg_scope(resnet_arg_scope(is_training=is_training)):
net_conv, end_points = resnet_v1.resnet_v1(net_conv,
self._blocks[cfg.RESNET.FIXED_BLOCKS:],
global_pool=False,
include_root_block=False,
reuse=reuse,
scope=self._scope)
end_points.update(end_point)
self.end_points['block1'] = end_points[self._scope + '/' + 'block1']
self.end_points['block2'] = end_points[self._scope + '/' + 'block2']
self.end_points['block3'] = end_points[self._scope + '/' + 'block3']
self.end_points['block4'] = end_points[self._scope + '/' + 'block4']
self._act_summaries.append(net_conv)
self._layers['head'] = net_conv
return net_conv
示例9: _head_to_tail
# 需要导入模块: from tensorflow.contrib.slim.python.slim.nets import resnet_v1 [as 别名]
# 或者: from tensorflow.contrib.slim.python.slim.nets.resnet_v1 import resnet_v1 [as 别名]
def _head_to_tail(self, pool5, is_training, reuse=None):
with slim.arg_scope(resnet_arg_scope(is_training=is_training)):
fc7, _ = resnet_v1.resnet_v1(pool5,
self._blocks[-1:],
global_pool=False,
include_root_block=False,
reuse=reuse,
scope=self._scope)
# average pooling done by reduce_mean
fc7 = tf.reduce_mean(fc7, axis=[1, 2])
return fc7
示例10: _image_to_head
# 需要导入模块: from tensorflow.contrib.slim.python.slim.nets import resnet_v1 [as 别名]
# 或者: from tensorflow.contrib.slim.python.slim.nets.resnet_v1 import resnet_v1 [as 别名]
def _image_to_head(self, is_training, reuse=None):
assert (0 <= cfg.RESNET.FIXED_BLOCKS <= 3)
# Now the base is always fixed during training
with slim.arg_scope(resnet_arg_scope(is_training=False)):
net_conv = self._build_base()
if cfg.RESNET.FIXED_BLOCKS > 0:
with slim.arg_scope(resnet_arg_scope(is_training=False)):
net_conv, _ = resnet_v1.resnet_v1(net_conv,
self._blocks[0:cfg.RESNET.FIXED_BLOCKS],
global_pool=False,
include_root_block=False,
reuse=reuse,
scope=self._scope)
if cfg.RESNET.FIXED_BLOCKS < 3:
with slim.arg_scope(resnet_arg_scope(is_training=is_training)):
net_conv, _ = resnet_v1.resnet_v1(net_conv,
self._blocks[cfg.RESNET.FIXED_BLOCKS:-1],
global_pool=False,
include_root_block=False,
reuse=reuse,
scope=self._scope)
self._act_summaries.append(net_conv)
self._layers['head'] = net_conv
return net_conv
示例11: _image_to_head
# 需要导入模块: from tensorflow.contrib.slim.python.slim.nets import resnet_v1 [as 别名]
# 或者: from tensorflow.contrib.slim.python.slim.nets.resnet_v1 import resnet_v1 [as 别名]
def _image_to_head(self, is_training, reuse=None):
assert (0 <= cfg.RESNET.FIXED_BLOCKS <= 3)
# Now the base is always fixed during training
with slim.arg_scope(resnet_arg_scope(is_training=False)):
net_conv = self._build_base()
print("Fixing %s blocks." % cfg.RESNET.FIXED_BLOCKS)
if cfg.RESNET.FIXED_BLOCKS > 0:
with slim.arg_scope(resnet_arg_scope(is_training=False)):
net_conv, _ = resnet_v1.resnet_v1(net_conv,
self._blocks[0:cfg.RESNET.FIXED_BLOCKS],
global_pool=False,
include_root_block=False,
reuse=reuse,
scope=self._resnet_scope)
if cfg.RESNET.FIXED_BLOCKS < 3:
with slim.arg_scope(resnet_arg_scope(is_training=is_training)):
net_conv, _ = resnet_v1.resnet_v1(net_conv,
self._blocks[cfg.RESNET.FIXED_BLOCKS:-1],
global_pool=False,
include_root_block=False,
reuse=reuse,
scope=self._resnet_scope)
self._act_summaries.append(net_conv)
self._layers['head'] = net_conv
if cfg.DEBUG_ALL:
self._for_debug['head'] = net_conv
return net_conv
示例12: _image_to_head
# 需要导入模块: from tensorflow.contrib.slim.python.slim.nets import resnet_v1 [as 别名]
# 或者: from tensorflow.contrib.slim.python.slim.nets.resnet_v1 import resnet_v1 [as 别名]
def _image_to_head(self, is_training, reuse=None):
assert (0 <= cfg.RESNET.FIXED_BLOCKS <= 3)
# Now the base is always fixed during training
with slim.arg_scope(resnet_arg_scope(is_training=False)):
net_conv = self._build_base()
if cfg.RESNET.FIXED_BLOCKS > 0:
with slim.arg_scope(resnet_arg_scope(is_training=False)):
net_conv, _ = resnet_v1.resnet_v1(net_conv,
self._blocks[0:cfg.RESNET.FIXED_BLOCKS],
global_pool=False,
include_root_block=False,
reuse=reuse,
scope=self._scope)
if cfg.RESNET.FIXED_BLOCKS < 3:
with slim.arg_scope(resnet_arg_scope(is_training=is_training)):
net_conv, _ = resnet_v1.resnet_v1(net_conv,
self._blocks[cfg.RESNET.FIXED_BLOCKS:-1],
global_pool=False,
include_root_block=False,
reuse=reuse,
scope=self._scope)
self._act_summaries.append(net_conv)
self._layers['head'] = net_conv
return net_conv
示例13: _head_to_tail
# 需要导入模块: from tensorflow.contrib.slim.python.slim.nets import resnet_v1 [as 别名]
# 或者: from tensorflow.contrib.slim.python.slim.nets.resnet_v1 import resnet_v1 [as 别名]
def _head_to_tail(self, pool5, is_training, reuse=None):
with slim.arg_scope(resnet_arg_scope(is_training=is_training)):
fc7, _ = resnet_v1.resnet_v1(pool5,
self._blocks[-1:],
global_pool=False,
include_root_block=False,
reuse=reuse,
scope=self._scope)
# average pooling done by reduce_mean
fc7 = tf.reduce_mean(fc7, axis=[1, 2])
return fc7
示例14: resnet_arg_scope
# 需要导入模块: from tensorflow.contrib.slim.python.slim.nets import resnet_v1 [as 别名]
# 或者: from tensorflow.contrib.slim.python.slim.nets.resnet_v1 import resnet_v1 [as 别名]
def resnet_arg_scope(is_training=True,
weight_decay=cfg.TRAIN.WEIGHT_DECAY,
batch_norm_decay=0.997,
batch_norm_epsilon=1e-5,
batch_norm_scale=True):
batch_norm_params = {
# NOTE 'is_training' here does not work because inside resnet it gets reset:
# https://github.com/tensorflow/models/blob/master/slim/nets/resnet_v1.py#L187
'is_training': False,
'decay': batch_norm_decay,
'epsilon': batch_norm_epsilon,
'scale': batch_norm_scale,
'trainable': cfg.RESNET.BN_TRAIN,
'updates_collections': ops.GraphKeys.UPDATE_OPS
}
with arg_scope(
[slim.conv2d],
weights_regularizer=regularizers.l2_regularizer(weight_decay),
weights_initializer=initializers.variance_scaling_initializer(),
trainable=is_training,
activation_fn=nn_ops.relu,
normalizer_fn=layers.batch_norm,
normalizer_params=batch_norm_params):
with arg_scope([layers.batch_norm], **batch_norm_params) as arg_sc:
return arg_sc
示例15: _decide_blocks
# 需要导入模块: from tensorflow.contrib.slim.python.slim.nets import resnet_v1 [as 别名]
# 或者: from tensorflow.contrib.slim.python.slim.nets.resnet_v1 import resnet_v1 [as 别名]
def _decide_blocks(self):
# choose different blocks for different number of layers
if self._num_layers == 50:
if tf.__version__ == '1.1.0':
self._blocks = [resnet_utils.Block('block1', resnet_v1.bottleneck,[(256, 64, 1)] * 2 + [(256, 64, 2)]),
resnet_utils.Block('block2', resnet_v1.bottleneck,[(512, 128, 1)] * 3 + [(512, 128, 2)]),
resnet_utils.Block('block3', resnet_v1.bottleneck,[(1024, 256, 1)] * 5 + [(1024, 256, 1)]),
resnet_utils.Block('block4', resnet_v1.bottleneck,[(2048, 512, 1)] * 3)]
else:
from tensorflow.contrib.slim.python.slim.nets.resnet_v1 import resnet_v1_block
self._blocks = [resnet_v1_block('block1', base_depth=64, num_units=3, stride=2),
resnet_v1_block('block2', base_depth=128, num_units=4, stride=2),
resnet_v1_block('block3', base_depth=256, num_units=6, stride=1),
resnet_v1_block('block4', base_depth=512, num_units=3, stride=1)]
elif self._num_layers == 101:
self._blocks = [resnet_v1_block('block1', base_depth=64, num_units=3, stride=2),
resnet_v1_block('block2', base_depth=128, num_units=4, stride=2),
# use stride 1 for the last conv4 layer
resnet_v1_block('block3', base_depth=256, num_units=23, stride=1),
resnet_v1_block('block4', base_depth=512, num_units=3, stride=1)]
elif self._num_layers == 152:
self._blocks = [resnet_v1_block('block1', base_depth=64, num_units=3, stride=2),
resnet_v1_block('block2', base_depth=128, num_units=8, stride=2),
# use stride 1 for the last conv4 layer
resnet_v1_block('block3', base_depth=256, num_units=36, stride=1),
resnet_v1_block('block4', base_depth=512, num_units=3, stride=1)]
else:
# other numbers are not supported
raise NotImplementedError