本文整理汇总了Python中tensorflow.contrib.slim.nets.resnet_utils.stack_blocks_dense方法的典型用法代码示例。如果您正苦于以下问题:Python resnet_utils.stack_blocks_dense方法的具体用法?Python resnet_utils.stack_blocks_dense怎么用?Python resnet_utils.stack_blocks_dense使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tensorflow.contrib.slim.nets.resnet_utils
的用法示例。
在下文中一共展示了resnet_utils.stack_blocks_dense方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _build_tail
# 需要导入模块: from tensorflow.contrib.slim.nets import resnet_utils [as 别名]
# 或者: from tensorflow.contrib.slim.nets.resnet_utils import stack_blocks_dense [as 别名]
def _build_tail(self, inputs, is_training=False):
if not self._use_tail:
return inputs
if self._architecture == 'resnet_v1_101':
train_batch_norm = (
is_training and self._config.get('train_batch_norm')
)
with self._enter_variable_scope():
weight_decay = (
self._config.get('arg_scope', {}).get('weight_decay', 0)
)
with tf.variable_scope(self._architecture, reuse=True):
resnet_arg_scope = resnet_utils.resnet_arg_scope(
batch_norm_epsilon=1e-5,
batch_norm_scale=True,
weight_decay=weight_decay
)
with slim.arg_scope(resnet_arg_scope):
with slim.arg_scope(
[slim.batch_norm], is_training=train_batch_norm
):
blocks = [
resnet_utils.Block(
'block4',
resnet_v1.bottleneck,
[{
'depth': 2048,
'depth_bottleneck': 512,
'stride': 1
}] * 3
)
]
proposal_classifier_features = (
resnet_utils.stack_blocks_dense(inputs, blocks)
)
else:
proposal_classifier_features = inputs
return proposal_classifier_features
示例2: _resnet_plain
# 需要导入模块: from tensorflow.contrib.slim.nets import resnet_utils [as 别名]
# 或者: from tensorflow.contrib.slim.nets.resnet_utils import stack_blocks_dense [as 别名]
def _resnet_plain(self, inputs, blocks, output_stride=None, scope=None):
"""A plain ResNet without extra layers before or after the ResNet blocks."""
with tf.variable_scope(scope, values=[inputs]):
with slim.arg_scope([slim.conv2d], outputs_collections='end_points'):
net = resnet_utils.stack_blocks_dense(inputs, blocks, output_stride)
end_points = slim.utils.convert_collection_to_dict('end_points')
return net, end_points
示例3: _atrousValues
# 需要导入模块: from tensorflow.contrib.slim.nets import resnet_utils [as 别名]
# 或者: from tensorflow.contrib.slim.nets.resnet_utils import stack_blocks_dense [as 别名]
def _atrousValues(self, bottleneck):
"""Verify the values of dense feature extraction by atrous convolution.
Make sure that dense feature extraction by stack_blocks_dense() followed by
subsampling gives identical results to feature extraction at the nominal
network output stride using the simple self._stack_blocks_nondense() above.
Args:
bottleneck: The bottleneck function.
"""
blocks = [
resnet_utils.Block('block1', bottleneck, [(4, 1, 1), (4, 1, 2)]),
resnet_utils.Block('block2', bottleneck, [(8, 2, 1), (8, 2, 2)]),
resnet_utils.Block('block3', bottleneck, [(16, 4, 1), (16, 4, 2)]),
resnet_utils.Block('block4', bottleneck, [(32, 8, 1), (32, 8, 1)])
]
nominal_stride = 8
# Test both odd and even input dimensions.
height = 30
width = 31
with slim.arg_scope(resnet_utils.resnet_arg_scope(is_training=False)):
for output_stride in [1, 2, 4, 8, None]:
with tf.Graph().as_default():
with self.test_session() as sess:
tf.set_random_seed(0)
inputs = create_test_input(1, height, width, 3)
# Dense feature extraction followed by subsampling.
output = resnet_utils.stack_blocks_dense(inputs,
blocks,
output_stride)
if output_stride is None:
factor = 1
else:
factor = nominal_stride // output_stride
output = resnet_utils.subsample(output, factor)
# Make the two networks use the same weights.
tf.get_variable_scope().reuse_variables()
# Feature extraction at the nominal network rate.
expected = self._stack_blocks_nondense(inputs, blocks)
sess.run(tf.global_variables_initializer())
output, expected = sess.run([output, expected])
self.assertAllClose(output, expected, atol=1e-4, rtol=1e-4)