本文整理汇总了Python中torch.nn.functional.relu_方法的典型用法代码示例。如果您正苦于以下问题:Python functional.relu_方法的具体用法?Python functional.relu_怎么用?Python functional.relu_使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类torch.nn.functional
的用法示例。
在下文中一共展示了functional.relu_方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: forward
# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu_ [as 别名]
def forward(self, x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = F.relu_(out)
out = self.conv2(out)
out = self.bn2(out)
out = F.relu_(out)
out0 = self.conv3(out)
out = self.bn3(out0)
if self.downsample is not None:
identity = self.downsample(x)
out += identity
out = F.relu_(out)
return out
示例2: forward
# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu_ [as 别名]
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = F.relu_(out)
out = self.conv2(out)
out = self.bn2(out)
out = F.relu_(out)
out0 = self.conv3(out)
out = self.bn3(out0)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = F.relu_(out)
return out
示例3: forward
# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu_ [as 别名]
def forward(self, x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = F.relu_(out)
out = self.conv2(out)
out = self.bn2(out)
out = F.relu_(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
identity = self.downsample(x)
out += identity
out = F.relu_(out)
return out
示例4: forward
# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu_ [as 别名]
def forward(self, input):
'''
Input: (batch_size, times_steps, freq_bins)'''
x = input[:, None, :, :]
'''(batch_size, 1, times_steps, freq_bins)'''
x = F.relu_(self.bn1(self.conv1(x)))
x = F.avg_pool2d(x, kernel_size=(2, 2))
x = F.relu_(self.bn2(self.conv2(x)))
x = F.avg_pool2d(x, kernel_size=(2, 2))
x = F.relu_(self.bn3(self.conv3(x)))
x = F.avg_pool2d(x, kernel_size=(2, 2))
x = F.relu_(self.bn4(self.conv4(x)))
x = F.avg_pool2d(x, kernel_size=(1, 1))
'''(batch_size, feature_maps, time_steps, freq_bins)'''
x = torch.mean(x, dim=3) # (batch_size, feature_maps, time_stpes)
(x, _) = torch.max(x, dim=2) # (batch_size, feature_maps)
output = torch.sigmoid(self.fc(x))
return output
示例5: forward
# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu_ [as 别名]
def forward(self, x):
residual = x
out = self.branch2a(x)
out = self.branch2a_bn(out)
out = F.relu_(out)
out = self.branch2b(out)
out = self.branch2b_bn(out)
out = F.relu_(out)
out0 = self.branch2c(out)
out = self.branch2c_bn(out0)
if hasattr(self, "branch1"):
residual = self.branch1(x)
residual = self.branch1_bn(residual)
out += residual
out = F.relu_(out)
return out
示例6: forward
# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu_ [as 别名]
def forward(self, x):
num_branch = self.num_branch if self.training or self.test_branch_idx == -1 else 1
if not isinstance(x, list):
x = [x] * num_branch
out = [self.conv1(b) for b in x]
out = [F.relu_(b) for b in out]
out = self.conv2(out)
out = [F.relu_(b) for b in out]
out = [self.conv3(b) for b in out]
if self.shortcut is not None:
shortcut = [self.shortcut(b) for b in x]
else:
shortcut = x
out = [out_b + shortcut_b for out_b, shortcut_b in zip(out, shortcut)]
out = [F.relu_(b) for b in out]
if self.concat_output:
out = torch.cat(out)
return out
示例7: forward
# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu_ [as 别名]
def forward(self, x):
out = self.conv1(x)
out = F.relu_(out)
out = self.conv2(out)
out = F.relu_(out)
out = self.conv3(out)
if self.shortcut is not None:
shortcut = self.shortcut(x)
else:
shortcut = x
out += shortcut
out = F.relu_(out)
return out
示例8: forward
# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu_ [as 别名]
def forward(self, x):
avg_x = self.gap(x)
out = []
for aspp_idx in range(len(self.aspp)):
inp = avg_x if (aspp_idx == len(self.aspp) - 1) else x
out.append(F.relu_(self.aspp[aspp_idx](inp)))
out[-1] = out[-1].expand_as(out[-2])
out = torch.cat(out, dim=1)
return out
示例9: forward
# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu_ [as 别名]
def forward(self, x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = F.relu_(out)
spx = torch.split(out, self.width, 1)
for i in range(self.nums):
if i==0 or self.stype=='stage':
sp = spx[i]
else:
sp = sp + spx[i]
sp = self.convs[i](sp)
sp = F.relu_(self.bns[i](sp))
if i==0:
out = sp
else:
out = torch.cat((out, sp), 1)
if self.scale != 1 and self.stype=='normal':
out = torch.cat((out, spx[self.nums]),1)
elif self.scale != 1 and self.stype=='stage':
out = torch.cat((out, self.pool(spx[self.nums])),1)
out0 = self.conv3(out)
out = self.bn3(out0)
if self.downsample is not None:
identity = self.downsample(x)
out += identity
out = F.relu_(out)
return out