本文整理汇总了Python中model_base.ResNet方法的典型用法代码示例。如果您正苦于以下问题:Python model_base.ResNet方法的具体用法?Python model_base.ResNet怎么用?Python model_base.ResNet使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类model_base
的用法示例。
在下文中一共展示了model_base.ResNet方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: forward_pass
# 需要导入模块: import model_base [as 别名]
# 或者: from model_base import ResNet [as 别名]
def forward_pass(self, x, input_data_format='channels_last'):
"""Build the core model within the graph."""
if self._data_format != input_data_format:
if input_data_format == 'channels_last':
# Computation requires channels_first.
x = tf.transpose(x, [0, 3, 1, 2])
else:
# Computation requires channels_last.
x = tf.transpose(x, [0, 2, 3, 1])
# Image standardization.
x = x / 128 - 1
x = self._conv(x, 3, 16, 1)
x = self._batch_norm(x)
x = self._relu(x)
# Use basic (non-bottleneck) block and ResNet V1 (post-activation).
res_func = self._residual_v1
# 3 stages of block stacking.
for i in range(3):
with tf.name_scope('stage'):
for j in range(self.n):
if j == 0:
# First block in a stage, filters and strides may change.
x = res_func(x, 3, self.filters[i], self.filters[i + 1],
self.strides[i])
else:
# Following blocks in a stage, constant filters and unit stride.
x = res_func(x, 3, self.filters[i + 1], self.filters[i + 1], 1)
x = self._global_avg_pool(x)
x = self._fully_connected(x, self.num_classes)
return x
示例2: forward_pass
# 需要导入模块: import model_base [as 别名]
# 或者: from model_base import ResNet [as 别名]
def forward_pass(self, x, input_data_format='channels_last'):
"""Build the core model within the graph."""
if self._data_format != input_data_format:
if input_data_format == 'channels_last':
# Computation requires channels_first.
x = tf.transpose(x, [0, 3, 1, 2])
else:
# Computation requires channels_last.
x = tf.transpose(x, [0, 2, 3, 1])
# Image standardization.
x = x / 128 - 1
x = self._conv(x, 3, 16, 1)
x = self._batch_norm(x)
x = self._relu(x)
if self.version == 'v1':
# Use basic (non-bottleneck) block and ResNet V1 (post-activation).
res_func = self._residual_v1
elif self.version == 'v2':
# Use basic (non-bottleneck) block and ResNet V2 (pre-activation).
res_func = self._residual_v2
else: # 'bv2'
# Use bottleneck block and ResNet V2 (pre-activation).
res_func = self._bottleneck_residual_v2
# 3 stages of block stacking.
for i in range(3):
with tf.name_scope('stage'):
for j in range(self.n):
if j == 0:
# First block in a stage, filters and strides may change.
x = res_func(x, self.filters[i], self.filters[i + 1],
self.strides[i])
else:
# Following blocks in a stage, constant filters and unit stride.
x = res_func(x, self.filters[i + 1], self.filters[i + 1], 1)
x = self._global_avg_pool(x)
x = self._fully_connected(x, self.num_classes, self.loss_type)
return x