本文整理匯總了Python中torch.nn.Softshrink方法的典型用法代碼示例。如果您正苦於以下問題:Python nn.Softshrink方法的具體用法?Python nn.Softshrink怎麽用?Python nn.Softshrink使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類torch.nn
的用法示例。
在下文中一共展示了nn.Softshrink方法的2個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: create_str_to_activations_converter
# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import Softshrink [as 別名]
def create_str_to_activations_converter(self):
"""Creates a dictionary which converts strings to activations"""
str_to_activations_converter = {"elu": nn.ELU(), "hardshrink": nn.Hardshrink(), "hardtanh": nn.Hardtanh(),
"leakyrelu": nn.LeakyReLU(), "logsigmoid": nn.LogSigmoid(), "prelu": nn.PReLU(),
"relu": nn.ReLU(), "relu6": nn.ReLU6(), "rrelu": nn.RReLU(), "selu": nn.SELU(),
"sigmoid": nn.Sigmoid(), "softplus": nn.Softplus(), "logsoftmax": nn.LogSoftmax(),
"softshrink": nn.Softshrink(), "softsign": nn.Softsign(), "tanh": nn.Tanh(),
"tanhshrink": nn.Tanhshrink(), "softmin": nn.Softmin(), "softmax": nn.Softmax(dim=1),
"none": None}
return str_to_activations_converter
示例2: get_activation_fn
# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import Softshrink [as 別名]
def get_activation_fn(name):
""" PyTorch built-in activation functions """
activation_functions = {
"linear": lambda: lambda x: x,
"relu": nn.ReLU,
"relu6": nn.ReLU6,
"elu": nn.ELU,
"prelu": nn.PReLU,
"leaky_relu": nn.LeakyReLU,
"threshold": nn.Threshold,
"hardtanh": nn.Hardtanh,
"sigmoid": nn.Sigmoid,
"tanh": nn.Tanh,
"log_sigmoid": nn.LogSigmoid,
"softplus": nn.Softplus,
"softshrink": nn.Softshrink,
"softsign": nn.Softsign,
"tanhshrink": nn.Tanhshrink,
}
if name not in activation_functions:
raise ValueError(
f"'{name}' is not included in activation_functions. use below one. \n {activation_functions.keys()}"
)
return activation_functions[name]