當前位置: 首頁>>代碼示例>>Python>>正文


Python nn.Softsign方法代碼示例

本文整理匯總了Python中torch.nn.Softsign方法的典型用法代碼示例。如果您正苦於以下問題:Python nn.Softsign方法的具體用法?Python nn.Softsign怎麽用?Python nn.Softsign使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在torch.nn的用法示例。


在下文中一共展示了nn.Softsign方法的5個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: get_activation

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import Softsign [as 別名]
def get_activation(act):
    """Get the activation based on the act string

    Parameters
    ----------
    act: str or callable function

    Returns
    -------
    ret: callable function
    """
    if act is None:
        return lambda x: x
    if isinstance(act, str):
        if act == 'leaky':
            return nn.LeakyReLU(0.1)
        elif act == 'relu':
            return nn.ReLU()
        elif act == 'tanh':
            return nn.Tanh()
        elif act == 'sigmoid':
            return nn.Sigmoid()
        elif act == 'softsign':
            return nn.Softsign()
        else:
            raise NotImplementedError
    else:
        return act 
開發者ID:dmlc,項目名稱:dgl,代碼行數:30,代碼來源:utils.py

示例2: create_str_to_activations_converter

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import Softsign [as 別名]
def create_str_to_activations_converter(self):
        """Creates a dictionary which converts strings to activations"""
        str_to_activations_converter = {"elu": nn.ELU(), "hardshrink": nn.Hardshrink(), "hardtanh": nn.Hardtanh(),
                                        "leakyrelu": nn.LeakyReLU(), "logsigmoid": nn.LogSigmoid(), "prelu": nn.PReLU(),
                                        "relu": nn.ReLU(), "relu6": nn.ReLU6(), "rrelu": nn.RReLU(), "selu": nn.SELU(),
                                        "sigmoid": nn.Sigmoid(), "softplus": nn.Softplus(), "logsoftmax": nn.LogSoftmax(),
                                        "softshrink": nn.Softshrink(), "softsign": nn.Softsign(), "tanh": nn.Tanh(),
                                        "tanhshrink": nn.Tanhshrink(), "softmin": nn.Softmin(), "softmax": nn.Softmax(dim=1),
                                         "none": None}
        return str_to_activations_converter 
開發者ID:p-christ,項目名稱:nn_builder,代碼行數:12,代碼來源:Base_Network.py

示例3: __init__

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import Softsign [as 別名]
def __init__(self, n_heads=3, 
         layer_channels=DEFAULT_LAYERS_PARAMS,
         pre_conv_channels=[64, 32, 16, 8, 4],
         pre_residuals=64, 
         up_residuals=0,
         post_residuals=3):
        super(CNNVocoder, self).__init__()
        self.head = Head(layer_channels, 
                pre_conv_channels=pre_conv_channels, 
                pre_residuals=pre_residuals, up_residuals=up_residuals,
                post_residuals=post_residuals)
        self.linear = nn.Linear(layer_channels[-1], 1)
        self.act_fn = nn.Softsign() 
開發者ID:tuan3w,項目名稱:cnn_vocoder,代碼行數:15,代碼來源:model.py

示例4: __init__

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import Softsign [as 別名]
def __init__(self, opt):
        super(RewardModel, self).__init__()
        self.vocab_size = opt.vocab_size
        self.word_embed_dim = 300
        self.feat_size = opt.feat_size
        self.kernel_num = 512
        self.kernels = [2, 3, 4, 5]
        self.out_dim = len(self.kernels) * self.kernel_num + self.word_embed_dim

        self.emb = nn.Embedding(self.vocab_size, self.word_embed_dim)
        self.emb.weight.data.copy_(torch.from_numpy(np.load("VIST/embedding.npy")))

        self.proj = nn.Linear(self.feat_size, self.word_embed_dim)

        self.convs = [nn.Conv2d(1, self.kernel_num, (k, self.word_embed_dim)) for k in self.kernels]

        self.dropout = nn.Dropout(opt.dropout)

        self.fc = nn.Linear(self.out_dim, 1, bias=True)

        if opt.activation.lower() == "linear":
            self.activation = None
        elif opt.activation.lower() == "sign":
            self.activation = nn.Softsign()
        elif self.activation.lower() == "tahn":
            self.activation = nn.Tanh() 
開發者ID:eric-xw,項目名稱:AREL,代碼行數:28,代碼來源:RewardModel.py

示例5: get_activation_fn

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import Softsign [as 別名]
def get_activation_fn(name):
    """ PyTorch built-in activation functions """

    activation_functions = {
        "linear": lambda: lambda x: x,
        "relu": nn.ReLU,
        "relu6": nn.ReLU6,
        "elu": nn.ELU,
        "prelu": nn.PReLU,
        "leaky_relu": nn.LeakyReLU,
        "threshold": nn.Threshold,
        "hardtanh": nn.Hardtanh,
        "sigmoid": nn.Sigmoid,
        "tanh": nn.Tanh,
        "log_sigmoid": nn.LogSigmoid,
        "softplus": nn.Softplus,
        "softshrink": nn.Softshrink,
        "softsign": nn.Softsign,
        "tanhshrink": nn.Tanhshrink,
    }

    if name not in activation_functions:
        raise ValueError(
            f"'{name}' is not included in activation_functions. use below one. \n {activation_functions.keys()}"
        )

    return activation_functions[name] 
開發者ID:naver,項目名稱:claf,代碼行數:29,代碼來源:activation.py


注:本文中的torch.nn.Softsign方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。