当前位置: 首页>>代码示例>>Python>>正文


Python functional.relu6方法代码示例

本文整理汇总了Python中torch.nn.functional.relu6方法的典型用法代码示例。如果您正苦于以下问题:Python functional.relu6方法的具体用法?Python functional.relu6怎么用?Python functional.relu6使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在torch.nn.functional的用法示例。


在下文中一共展示了functional.relu6方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: forward

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu6 [as 别名]
def forward(self, source_features):
        outputs = []
        if self.weight_type == 'const':
            for w in F.softplus(self.weights.mul(10)):
                outputs.append(w.view(1, 1))
        else:
            for i, (idx, _) in enumerate(self.pairs):
                f = source_features[idx]
                f = F.avg_pool2d(f, f.size(2)).view(-1, f.size(1))
                if self.weight_type == 'relu':
                    outputs.append(F.relu(self[i](f)))
                elif self.weight_type == 'relu-avg':
                    outputs.append(F.relu(self[i](f.div(f.size(1)))))
                elif self.weight_type == 'relu6':
                    outputs.append(F.relu6(self[i](f)))
        return outputs 
开发者ID:alinlab,项目名称:L2T-ww,代码行数:18,代码来源:train_l2t_ww.py

示例2: get_activation

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu6 [as 别名]
def get_activation(name):
    if isinstance(name, nn.Module):
        return name
    if name == 'default':
        return get_activation(get_default_activation())
    elif name == 'relu':
        return nn.ReLU(inplace=True)
    elif name == 'relu6':
        return nn.ReLU6(inplace=True)
    elif name == 'leaky_relu':
        return nn.LeakyReLU(negative_slope=0.1, inplace=True)
    elif name == 'sigmoid':
        return nn.Sigmoid()
    elif name == 'hswish':
        return HardSwish(inplace=True)
    elif name == 'swish':
        return Swish()
    else:
        raise NotImplementedError("No activation named %s" % name) 
开发者ID:qixuxiang,项目名称:Pytorch_Lightweight_Network,代码行数:21,代码来源:modules.py

示例3: activation

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu6 [as 别名]
def activation(input, kind):
  #print("Activation: {}".format(kind))
  if kind == 'selu':
    return F.selu(input)
  elif kind == 'relu':
    return F.relu(input)
  elif kind == 'relu6':
    return F.relu6(input)
  elif kind == 'sigmoid':
    return F.sigmoid(input)
  elif kind == 'tanh':
    return F.tanh(input)
  elif kind == 'elu':
    return F.elu(input)
  elif kind == 'lrelu':
    return F.leaky_relu(input)
  elif kind == 'swish':
    return input*F.sigmoid(input)
  elif kind == 'none':
    return input
  else:
    raise ValueError('Unknown non-linearity type') 
开发者ID:NVIDIA,项目名称:DeepRecommender,代码行数:24,代码来源:model.py

示例4: forward

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu6 [as 别名]
def forward(self, x):

        out = self.conv1(x)
        out = self.bn1(out)
        out = F.relu6(out)

        out = self.conv2(out)
        out = self.bn2(out)
        out = F.relu6(out)

        out = self.conv3(out)
        out = self.bn3(out)

        if self.inp == self.oup and self.stride == 1:
            return (out + x)

        else:
            return out 
开发者ID:liuzechun,项目名称:MetaPruning,代码行数:20,代码来源:mobilenet_v2.py

示例5: forward

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu6 [as 别名]
def forward(self, x, inp_scale_id):

        inp_scale = overall_channel_scale[inp_scale_id]

        inp = int(self.base_inp * inp_scale)

        scale_tensor = torch.FloatTensor([inp_scale/self.max_overall_scale]).to(x.device)

        fc11_out = F.relu(self.fc11(scale_tensor))
        conv1_weight = self.fc12(fc11_out).view(self.base_oup, self.max_inp_channel, 1, 1)

        out = F.conv2d(x, conv1_weight[:, :inp, :, :], bias=None, stride=self.stride, padding=0)
        out = self.first_bn[inp_scale_id](out)
        out = F.relu6(out)

        return out 
开发者ID:liuzechun,项目名称:MetaPruning,代码行数:18,代码来源:mobilenet_v2.py

示例6: forward

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu6 [as 别名]
def forward(self, x):
        return x * F.relu6(x + 3., inplace=self.inplace) / 6. 
开发者ID:PistonY,项目名称:torch-toolbox,代码行数:4,代码来源:activation.py

示例7: __init__

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu6 [as 别名]
def __init__(self, act_type, auto_optimize=True, **kwargs):
        super(Activation, self).__init__()
        if act_type == 'relu':
            self.act = nn.ReLU(
                inplace=True) if auto_optimize else nn.ReLU(**kwargs)
        elif act_type == 'relu6':
            self.act = nn.ReLU6(
                inplace=True) if auto_optimize else nn.ReLU6(**kwargs)
        elif act_type == 'h_swish':
            self.act = HardSwish(
                inplace=True) if auto_optimize else HardSwish(**kwargs)
        elif act_type == 'h_sigmoid':
            self.act = HardSigmoid(
                inplace=True) if auto_optimize else HardSigmoid(**kwargs)
        elif act_type == 'swish':
            self.act = Swish(**kwargs)
        elif act_type == 'sigmoid':
            self.act = nn.Sigmoid()
        elif act_type == 'lrelu':
            self.act = nn.LeakyReLU(inplace=True, **kwargs) if auto_optimize \
                else nn.LeakyReLU(**kwargs)
        elif act_type == 'prelu':
            self.act = nn.PReLU(**kwargs)
        else:
            raise NotImplementedError(
                '{} activation is not implemented.'.format(act_type)) 
开发者ID:PistonY,项目名称:torch-toolbox,代码行数:28,代码来源:activation.py

示例8: hard_sigmoid

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu6 [as 别名]
def hard_sigmoid(x, inplace=False):
    return F.relu6(x + 3, inplace) / 6 
开发者ID:Randl,项目名称:MobileNetV3-pytorch,代码行数:4,代码来源:MobileNetV3.py

示例9: hard_sigmoid

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu6 [as 别名]
def hard_sigmoid(x, inplace: bool = False):
    if inplace:
        return x.add_(3.).clamp_(0., 6.).div_(6.)
    else:
        return F.relu6(x + 3.) / 6. 
开发者ID:huawei-noah,项目名称:ghostnet,代码行数:7,代码来源:ghostnet.py

示例10: forward

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu6 [as 别名]
def forward(self, x):
        out = x * F.relu6(x + 3, inplace=True) / 6
        return out 
开发者ID:soeaver,项目名称:Parsing-R-CNN,代码行数:5,代码来源:mobilenet_v3.py

示例11: forward

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu6 [as 别名]
def forward(self, x):
        out = F.relu6(x + 3, inplace=True) / 6
        return out 
开发者ID:soeaver,项目名称:Parsing-R-CNN,代码行数:5,代码来源:mixture_batchnorm.py

示例12: forward

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu6 [as 别名]
def forward(self, x):
        return F.relu6(self.conv(x)) 
开发者ID:rwightman,项目名称:posenet-pytorch,代码行数:4,代码来源:mobilenet_v1.py

示例13: hard_swish

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu6 [as 别名]
def hard_swish(x, inplace: bool = False):
    inner = F.relu6(x + 3.).div_(6.)
    return x.mul_(inner) if inplace else x.mul(inner) 
开发者ID:rwightman,项目名称:gen-efficientnet-pytorch,代码行数:5,代码来源:activations.py

示例14: forward

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu6 [as 别名]
def forward(self, x):
        return MishJitAutoFn.apply(x)


# @torch.jit.script
# def hard_swish_jit(x, inplac: bool = False):
#     return x.mul(F.relu6(x + 3.).mul_(1./6.))
#
#
# class HardSwishJit(nn.Module):
#     def __init__(self, inplace: bool = False):
#         super(HardSwishJit, self).__init__()
#
#     def forward(self, x):
#         return hard_swish_jit(x)
#
#
# @torch.jit.script
# def hard_sigmoid_jit(x, inplace: bool = False):
#     return F.relu6(x + 3.).mul(1./6.)
#
#
# class HardSigmoidJit(nn.Module):
#     def __init__(self, inplace: bool = False):
#         super(HardSigmoidJit, self).__init__()
#
#     def forward(self, x):
#         return hard_sigmoid_jit(x) 
开发者ID:rwightman,项目名称:gen-efficientnet-pytorch,代码行数:30,代码来源:activations_jit.py

示例15: forward

# 需要导入模块: from torch.nn import functional [as 别名]
# 或者: from torch.nn.functional import relu6 [as 别名]
def forward(self, x):
        return F.relu6(x + 3.0, inplace=True) / 6.0 
开发者ID:osmr,项目名称:imgclsmob,代码行数:4,代码来源:common.py


注:本文中的torch.nn.functional.relu6方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。