當前位置: 首頁>>代碼示例>>Python>>正文


Python nn.RReLU方法代碼示例

本文整理匯總了Python中torch.nn.RReLU方法的典型用法代碼示例。如果您正苦於以下問題:Python nn.RReLU方法的具體用法?Python nn.RReLU怎麽用?Python nn.RReLU使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在torch.nn的用法示例。


在下文中一共展示了nn.RReLU方法的8個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: get_activation

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import RReLU [as 別名]
def get_activation(self, act):
        if act == 'tanh':
            act = nn.Tanh()
        elif act == 'relu':
            act = nn.ReLU()
        elif act == 'softplus':
            act = nn.Softplus()
        elif act == 'rrelu':
            act = nn.RReLU()
        elif act == 'leakyrelu':
            act = nn.LeakyReLU()
        elif act == 'elu':
            act = nn.ELU()
        elif act == 'selu':
            act = nn.SELU()
        elif act == 'glu':
            act = nn.GLU()
        else:
            print('Defaulting to tanh activations...')
            act = nn.Tanh()
        return act 
開發者ID:blei-lab,項目名稱:causal-text-embeddings,代碼行數:23,代碼來源:supervised_topic_model.py

示例2: act_fn

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import RReLU [as 別名]
def act_fn(act):
    if act == 'relu':
        act_ = nn.ReLU(inplace=False)
    elif act == 'lrelu':
        act_ = nn.LeakyReLU(inplace=True)
    elif act == 'prelu':
        act_ = nn.PReLU()
    elif act == 'rrelu':
        act_ = nn.RReLU(inplace=True)
    elif act == 'elu':
        act_ = nn.ELU(inplace=True)
    elif act == 'selu':
        act_ = nn.SELU(inplace=True)
    elif act == 'tanh':
        act_ = nn.Tanh()
    elif act == 'sigmoid':
        act_ = nn.Sigmoid()
    else:
        print('\n\nActivation function {} is not supported/understood\n\n'.format(act))
        act_ = None
    return act_ 
開發者ID:juefeix,項目名稱:pnn.pytorch.update,代碼行數:23,代碼來源:utils.py

示例3: test_parse_net_kwargs

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import RReLU [as 別名]
def test_parse_net_kwargs(self, parse_net_kwargs):
        kwargs = {
            'lr': 0.05,
            'max_epochs': 5,
            'module__num_units': 10,
            'module__nonlin': 'torch.nn.RReLU(0.123, upper=0.456)',
        }
        parsed_kwargs = parse_net_kwargs(kwargs)

        assert len(parsed_kwargs) == 4
        assert np.isclose(parsed_kwargs['lr'], 0.05)
        assert parsed_kwargs['max_epochs'] == 5
        assert parsed_kwargs['module__num_units'] == 10
        assert isinstance(parsed_kwargs['module__nonlin'], RReLU)
        assert np.isclose(parsed_kwargs['module__nonlin'].lower, 0.123)
        assert np.isclose(parsed_kwargs['module__nonlin'].upper, 0.456) 
開發者ID:skorch-dev,項目名稱:skorch,代碼行數:18,代碼來源:test_cli.py

示例4: get_act

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import RReLU [as 別名]
def get_act(act):
    if act == 'ReLU':
        act_layer = nn.ReLU
    elif act == 'LeakyReLU':
        act_layer = nn.LeakyReLU
    elif act == 'PReLU':
        act_layer = nn.PReLU
    elif act == 'RReLU':
        act_layer = nn.RReLU
    elif act == 'ELU':
        act_layer = nn.ELU
    elif act == 'SELU':
        act_layer = nn.SELU
    elif act == 'Tanh':
        act_layer = nn.Tanh
    elif act == 'Hardtanh':
        act_layer = nn.Hardtanh
    elif act == 'Sigmoid':
        act_layer = nn.Sigmoid
    else:
        print("Invalid activation function")
        raise Exception("Invalid activation function")
    return act_layer 
開發者ID:SinghJasdeep,項目名稱:Attention-on-Attention-for-VQA,代碼行數:25,代碼來源:fc.py

示例5: create_str_to_activations_converter

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import RReLU [as 別名]
def create_str_to_activations_converter(self):
        """Creates a dictionary which converts strings to activations"""
        str_to_activations_converter = {"elu": nn.ELU(), "hardshrink": nn.Hardshrink(), "hardtanh": nn.Hardtanh(),
                                        "leakyrelu": nn.LeakyReLU(), "logsigmoid": nn.LogSigmoid(), "prelu": nn.PReLU(),
                                        "relu": nn.ReLU(), "relu6": nn.ReLU6(), "rrelu": nn.RReLU(), "selu": nn.SELU(),
                                        "sigmoid": nn.Sigmoid(), "softplus": nn.Softplus(), "logsoftmax": nn.LogSoftmax(),
                                        "softshrink": nn.Softshrink(), "softsign": nn.Softsign(), "tanh": nn.Tanh(),
                                        "tanhshrink": nn.Tanhshrink(), "softmin": nn.Softmin(), "softmax": nn.Softmax(dim=1),
                                         "none": None}
        return str_to_activations_converter 
開發者ID:p-christ,項目名稱:nn_builder,代碼行數:12,代碼來源:Base_Network.py

示例6: test_resolve_dotted_name_instantiated

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import RReLU [as 別名]
def test_resolve_dotted_name_instantiated(self, resolve_dotted_name):
        result = resolve_dotted_name('torch.nn.RReLU(0.123, upper=0.456)')
        assert isinstance(result, RReLU)
        assert np.isclose(result.lower, 0.123)
        assert np.isclose(result.upper, 0.456) 
開發者ID:skorch-dev,項目名稱:skorch,代碼行數:7,代碼來源:test_cli.py

示例7: get_activation

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import RReLU [as 別名]
def get_activation(activation):
    if isinstance(activation, str):
        if activation == 'relu':
            return nn.ReLU()
        elif activation == 'leaky':
            return nn.LeakyReLU(negative_slope=0.1)
        elif activation == 'prelu':
            return nn.PReLU(num_parameters=1)
        elif activation == 'rrelu':
            return nn.RReLU()
        elif activation == 'lin':
            return nn.Identity()
    else:
        # Deep copy is necessary in case of paremtrized activations
        return copy.deepcopy(activation) 
開發者ID:ELEKTRONN,項目名稱:elektronn3,代碼行數:17,代碼來源:unet.py

示例8: get_AF

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import RReLU [as 別名]
def get_AF(af_str):
    """
    Given the string identifier, get PyTorch-supported activation function.

    """
    if af_str == 'R':
        return nn.ReLU()         # ReLU(x)=max(0,x)

    elif af_str == 'LR':
        return nn.LeakyReLU()    # LeakyReLU(x)=max(0,x)+negative_slope∗min(0,x)

    elif af_str == 'RR':
        return nn.RReLU()        # the randomized leaky rectified liner unit function

    elif af_str == 'E':          # ELU(x)=max(0,x)+min(0,α∗(exp(x)−1))
        return nn.ELU()

    elif af_str == 'SE':         # SELU(x)=scale∗(max(0,x)+min(0,α∗(exp(x)−1)))
        return nn.SELU()

    elif af_str == 'CE':         # CELU(x)=max(0,x)+min(0,α∗(exp(x/α)−1))
        return nn.CELU()

    elif af_str == 'S':
        return nn.Sigmoid()

    elif af_str == 'SW':
        #return SWISH()
        raise NotImplementedError

    elif af_str == 'T':
        return nn.Tanh()

    elif af_str == 'ST':         # a kind of normalization
        return F.softmax()      # Applies the Softmax function to an n-dimensional input Tensor rescaling them so that the elements of the n-dimensional output Tensor lie in the range (0,1) and sum to 1

    elif af_str == 'EP':
        #return Exp()
        raise NotImplementedError

    else:
        raise NotImplementedError 
開發者ID:pt-ranking,項目名稱:pt-ranking.github.io,代碼行數:44,代碼來源:base_utils.py


注:本文中的torch.nn.RReLU方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。