當前位置: 首頁>>代碼示例>>Python>>正文


Python nn.LogSigmoid方法代碼示例

本文整理匯總了Python中torch.nn.LogSigmoid方法的典型用法代碼示例。如果您正苦於以下問題:Python nn.LogSigmoid方法的具體用法?Python nn.LogSigmoid怎麽用?Python nn.LogSigmoid使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在torch.nn的用法示例。


在下文中一共展示了nn.LogSigmoid方法的11個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: __init__

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import LogSigmoid [as 別名]
def __init__(self,
                 embed_size,
                 counter,
                 n_negatives,
                 power,
                 device,
                 ignore_index):
        super(NegativeSampling, self).__init__()

        self.counter = counter
        self.n_negatives = n_negatives
        self.power = power
        self.device = device

        self.W = nn.Embedding(num_embeddings=len(counter),
                              embedding_dim=embed_size,
                              padding_idx=ignore_index)
        self.W.weight.data.zero_()
        self.logsigmoid = nn.LogSigmoid()
        self.sampler = WalkerAlias(np.power(counter, power)) 
開發者ID:shibing624,項目名稱:pycorrector,代碼行數:22,代碼來源:loss.py

示例2: __init__

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import LogSigmoid [as 別名]
def __init__(self):
        nn.Module.__init__(self)
        self.m = nn.LogSigmoid() 
開發者ID:THUDM,項目名稱:ScenarioMeta,代碼行數:5,代碼來源:loss.py

示例3: create_str_to_activations_converter

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import LogSigmoid [as 別名]
def create_str_to_activations_converter(self):
        """Creates a dictionary which converts strings to activations"""
        str_to_activations_converter = {"elu": nn.ELU(), "hardshrink": nn.Hardshrink(), "hardtanh": nn.Hardtanh(),
                                        "leakyrelu": nn.LeakyReLU(), "logsigmoid": nn.LogSigmoid(), "prelu": nn.PReLU(),
                                        "relu": nn.ReLU(), "relu6": nn.ReLU6(), "rrelu": nn.RReLU(), "selu": nn.SELU(),
                                        "sigmoid": nn.Sigmoid(), "softplus": nn.Softplus(), "logsoftmax": nn.LogSoftmax(),
                                        "softshrink": nn.Softshrink(), "softsign": nn.Softsign(), "tanh": nn.Tanh(),
                                        "tanhshrink": nn.Tanhshrink(), "softmin": nn.Softmin(), "softmax": nn.Softmax(dim=1),
                                         "none": None}
        return str_to_activations_converter 
開發者ID:p-christ,項目名稱:nn_builder,代碼行數:12,代碼來源:Base_Network.py

示例4: __init__

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import LogSigmoid [as 別名]
def __init__(self):
        super(NegativeSampling, self).__init__()
        self._log_sigmoid = nn.LogSigmoid() 
開發者ID:inejc,項目名稱:paragraph-vectors,代碼行數:5,代碼來源:loss.py

示例5: __init__

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import LogSigmoid [as 別名]
def __init__(self, vocab_size, original_hidden_size, num_layers, tau=1):
        super().__init__()
        self.bert_layer = BertLayer(BertConfig(
            vocab_size_or_config_json_file=vocab_size,
            hidden_size=original_hidden_size * num_layers,
        ))
        self.linear_layer = nn.Linear(original_hidden_size * num_layers, 1)
        self.log_sigmoid = nn.LogSigmoid()
        self.tau = tau 
開發者ID:zphang,項目名稱:bert_on_stilts,代碼行數:11,代碼來源:adv_masker.py

示例6: __init__

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import LogSigmoid [as 別名]
def __init__(self, weights=None):
        self.weights = weights
        self.logsigmoid = nn.LogSigmoid() 
開發者ID:Arseha,項目名稱:peakonly,代碼行數:5,代碼來源:training.py

示例7: get_activation

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import LogSigmoid [as 別名]
def get_activation(name: str = "relu") -> nn.Module:
    """Get back an `nn.Module` by string name of the activation operator

    :param name: A string name of the operation
    :return: A module associated with that string
    """
    if name is None or name == "ident":
        return nn.Identity()
    if name == "tanh":
        return nn.Tanh()
    if name == "gelu":
        return GeLU()
    if name == "hardtanh":
        return nn.Hardtanh()
    if name == "leaky_relu":
        return nn.LeakyReLU()
    if name == "prelu":
        return nn.PReLU()
    if name == "sigmoid":
        return nn.Sigmoid()
    if name == "log_sigmoid":
        return nn.LogSigmoid()
    if name == "log_softmax":
        return nn.LogSoftmax(dim=-1)
    if name == "softmax":
        return nn.Softmax(dim=-1)
    return nn.ReLU() 
開發者ID:dpressel,項目名稱:mead-baseline,代碼行數:29,代碼來源:layers.py

示例8: __init__

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import LogSigmoid [as 別名]
def __init__(self, vocab_size, emb_dim):
        super(SkipGramNeg, self).__init__()
        self.input_emb = nn.Embedding(vocab_size, emb_dim)
        self.output_emb = nn.Embedding(vocab_size, emb_dim)
        self.log_sigmoid = nn.LogSigmoid()

        initrange = (2.0 / (vocab_size + emb_dim)) ** 0.5  # Xavier init
        self.input_emb.weight.data.uniform_(-initrange, initrange)
        self.output_emb.weight.data.uniform_(-0, 0) 
開發者ID:blackredscarf,項目名稱:pytorch-SkipGram,代碼行數:11,代碼來源:model.py

示例9: get_activation_fn

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import LogSigmoid [as 別名]
def get_activation_fn(name):
    """ PyTorch built-in activation functions """

    activation_functions = {
        "linear": lambda: lambda x: x,
        "relu": nn.ReLU,
        "relu6": nn.ReLU6,
        "elu": nn.ELU,
        "prelu": nn.PReLU,
        "leaky_relu": nn.LeakyReLU,
        "threshold": nn.Threshold,
        "hardtanh": nn.Hardtanh,
        "sigmoid": nn.Sigmoid,
        "tanh": nn.Tanh,
        "log_sigmoid": nn.LogSigmoid,
        "softplus": nn.Softplus,
        "softshrink": nn.Softshrink,
        "softsign": nn.Softsign,
        "tanhshrink": nn.Tanhshrink,
    }

    if name not in activation_functions:
        raise ValueError(
            f"'{name}' is not included in activation_functions. use below one. \n {activation_functions.keys()}"
        )

    return activation_functions[name] 
開發者ID:naver,項目名稱:claf,代碼行數:29,代碼來源:activation.py

示例10: __init__

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import LogSigmoid [as 別名]
def __init__(self):
        super(LayerLogSigmoidTest, self).__init__()
        self.sig = nn.LogSigmoid() 
開發者ID:nerox8664,項目名稱:onnx2keras,代碼行數:5,代碼來源:log_sigmoid.py

示例11: __init__

# 需要導入模塊: from torch import nn [as 別名]
# 或者: from torch.nn import LogSigmoid [as 別名]
def __init__(self, num_input, num_hidden, num_context):
    super().__init__()
    self.made = MADE(num_input=num_input, num_output=num_input * 2,
                     num_hidden=num_hidden, num_context=num_context)
    # init such that sigmoid(s) is close to 1 for stability
    self.sigmoid_arg_bias = nn.Parameter(torch.ones(num_input) * 2)
    self.sigmoid = nn.Sigmoid()
    self.log_sigmoid = nn.LogSigmoid() 
開發者ID:altosaar,項目名稱:variational-autoencoder,代碼行數:10,代碼來源:flow.py


注:本文中的torch.nn.LogSigmoid方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。