本文整理匯總了Python中maskrcnn_benchmark.layers.FrozenBatchNorm2d方法的典型用法代碼示例。如果您正苦於以下問題:Python layers.FrozenBatchNorm2d方法的具體用法?Python layers.FrozenBatchNorm2d怎麽用?Python layers.FrozenBatchNorm2d使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類maskrcnn_benchmark.layers
的用法示例。
在下文中一共展示了layers.FrozenBatchNorm2d方法的5個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: __init__
# 需要導入模塊: from maskrcnn_benchmark import layers [as 別名]
# 或者: from maskrcnn_benchmark.layers import FrozenBatchNorm2d [as 別名]
def __init__(
self,
in_channels,
bottleneck_channels,
out_channels,
num_groups=1,
stride_in_1x1=True,
stride=1,
dilation=1,
dcn_config={}
):
super(BottleneckWithFixedBatchNorm, self).__init__(
in_channels=in_channels,
bottleneck_channels=bottleneck_channels,
out_channels=out_channels,
num_groups=num_groups,
stride_in_1x1=stride_in_1x1,
stride=stride,
dilation=dilation,
norm_func=FrozenBatchNorm2d,
dcn_config=dcn_config
)
示例2: __init__
# 需要導入模塊: from maskrcnn_benchmark import layers [as 別名]
# 或者: from maskrcnn_benchmark.layers import FrozenBatchNorm2d [as 別名]
def __init__(self, inplanes, planes, stride=1):
super(BasicBlock, self).__init__()
self.inplanes = inplanes
self.planes = planes
self.conv1 = Conv2d(
inplanes, planes, kernel_size=3,
stride=stride, padding=1, bias=False)
self.bn1 = FrozenBatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = Conv2d(
planes, planes, kernel_size=3,
stride=stride, padding=1, bias=False)
self.bn2 = FrozenBatchNorm2d(planes)
if self.inplanes != self.planes*self.expansion:
self.downsample = nn.Sequential(
Conv2d(self.inplanes, self.planes * self.expansion,
kernel_size=1, stride=stride, bias=False),
FrozenBatchNorm2d(self.planes * self.expansion),
)
示例3: __init__
# 需要導入模塊: from maskrcnn_benchmark import layers [as 別名]
# 或者: from maskrcnn_benchmark.layers import FrozenBatchNorm2d [as 別名]
def __init__(self, cfg):
super(StemWithFixedBatchNorm, self).__init__()
out_channels = cfg.MODEL.RESNETS.STEM_OUT_CHANNELS
self.conv1 = Conv2d(
3, out_channels, kernel_size=7, stride=2, padding=3, bias=False
)
self.bn1 = FrozenBatchNorm2d(out_channels)
示例4: _make_fuse_layers
# 需要導入模塊: from maskrcnn_benchmark import layers [as 別名]
# 或者: from maskrcnn_benchmark.layers import FrozenBatchNorm2d [as 別名]
def _make_fuse_layers(self):
if self.num_branches == 1:
return None
num_branches = self.num_branches
num_inchannels = self.num_inchannels
fuse_layers = []
for i in range(num_branches if self.multi_scale_output else 1):
fuse_layer = []
for j in range(num_branches):
if j > i:
fuse_layer.append(nn.Sequential(
Conv2d(num_inchannels[j], num_inchannels[i], 1, 1, 0, bias=False),
FrozenBatchNorm2d(num_inchannels[i]),
nn.Upsample(scale_factor=2**(j-i), mode='nearest')))
elif j == i:
fuse_layer.append(None)
else:
conv3x3s = []
for k in range(i-j):
if k == i - j - 1:
num_outchannels_conv3x3 = num_inchannels[i]
conv3x3s.append(nn.Sequential(
Conv2d(num_inchannels[j], num_outchannels_conv3x3,
3, 2, 1, bias=False),
FrozenBatchNorm2d(num_outchannels_conv3x3)))
else:
num_outchannels_conv3x3 = num_inchannels[j]
conv3x3s.append(nn.Sequential(
Conv2d(num_inchannels[j], num_outchannels_conv3x3,
3, 2, 1, bias=False),
FrozenBatchNorm2d(num_outchannels_conv3x3),
nn.ReLU(True)))
fuse_layer.append(nn.Sequential(*conv3x3s))
fuse_layers.append(nn.ModuleList(fuse_layer))
return nn.ModuleList(fuse_layers)
示例5: _make_transition_layer
# 需要導入模塊: from maskrcnn_benchmark import layers [as 別名]
# 或者: from maskrcnn_benchmark.layers import FrozenBatchNorm2d [as 別名]
def _make_transition_layer(
self, num_channels_pre_layer, num_channels_cur_layer):
num_branches_cur = len(num_channels_cur_layer)
num_branches_pre = len(num_channels_pre_layer)
transition_layers = []
for i in range(num_branches_cur):
if i < num_branches_pre:
if num_channels_cur_layer[i] != num_channels_pre_layer[i]:
transition_layers.append(nn.Sequential(
Conv2d(num_channels_pre_layer[i],
num_channels_cur_layer[i],
3,
1,
1,
bias=False),
FrozenBatchNorm2d(num_channels_cur_layer[i]),
nn.ReLU(inplace=True)))
else:
transition_layers.append(None)
else:
conv3x3s = []
for j in range(i+1-num_branches_pre):
inchannels = num_channels_pre_layer[-1]
outchannels = num_channels_cur_layer[i] \
if j == i-num_branches_pre else inchannels
conv3x3s.append(nn.Sequential(
Conv2d(
inchannels, outchannels, 3, 2, 1, bias=False),
FrozenBatchNorm2d(outchannels),
nn.ReLU(inplace=True)))
transition_layers.append(nn.Sequential(*conv3x3s))
return nn.ModuleList(transition_layers)