本文整理匯總了Python中torch.erf方法的典型用法代碼示例。如果您正苦於以下問題:Python torch.erf方法的具體用法?Python torch.erf怎麽用?Python torch.erf使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類torch
的用法示例。
在下文中一共展示了torch.erf方法的11個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: cdf
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import erf [as 別名]
def cdf(self, value):
"""
Evaluate the cumulative distribution function at the value.
Args:
value (Variable, tensor): the value at which to evaluate the cdf
"""
n_samples = value.data.shape[1]
mean = self.mean
std = self.log_var.mul(0.5).exp_()
# unsqueeze the parameters along the sample dimension
if len(mean.size()) == 2:
mean = mean.unsqueeze(1).repeat(1, n_samples, 1)
std = std.unsqueeze(1).repeat(1, n_samples, 1)
elif len(mean.size()) == 4:
mean = mean.unsqueeze(1).repeat(1, n_samples, 1, 1, 1)
std = std.unsqueeze(1).repeat(1, n_samples, 1, 1, 1)
return (1 + torch.erf((value - mean) / (math.sqrt(2) * std).add(1e-5))).mul_(0.5)
示例2: gelu
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import erf [as 別名]
def gelu(x):
"""Implementation of the gelu activation function.
For information: OpenAI GPT's gelu is slightly different (and gives slightly different results):
0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
"""
return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
示例3: gelu
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import erf [as 別名]
def gelu(x):
return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
示例4: gelu
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import erf [as 別名]
def gelu(x):
"Implementation of the gelu activation function by Hugging Face"
return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
示例5: gelu
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import erf [as 別名]
def gelu(x):
""" Original Implementation of the gelu activation function in Google Bert repo when initially created.
For information: OpenAI GPT's gelu is slightly different (and gives slightly different results):
0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
Also see https://arxiv.org/abs/1606.08415
"""
return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
示例6: gelu
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import erf [as 別名]
def gelu(x):
"""Implementation of the gelu activation function.
For information: OpenAI GPT's gelu is slightly different (and gives slightly different results):
0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
Also see https://arxiv.org/abs/1606.08415
"""
return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
示例7: gelu
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import erf [as 別名]
def gelu(x):
return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
# Paper denote this module to `FFN = max(0, x * W1 + b1) * W2 + b2`
# but i use gelu instead of relu about activation
示例8: _F1F2_no_inf
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import erf [as 別名]
def _F1F2_no_inf(x: Tensor, y: Tensor) -> Tuple[Tensor, Tensor]:
if (x.abs() > 3).any() or (y.abs() > 3).any():
raise RuntimeError("_F1F2_no_inf not stable for inputs with abs(value) > 3")
numer_1 = torch.exp(-x ** 2) - torch.exp(-y ** 2)
numer_2 = x * torch.exp(-x ** 2) - y * torch.exp(-y ** 2)
denom = torch.erf(y) - torch.erf(x)
F1 = numer_1 / denom
F2 = numer_2 / denom
return F1, F2
示例9: gelu
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import erf [as 別名]
def gelu(x):
""" Implementation of the gelu activation function by Hugging Face """
return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
示例10: gelu
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import erf [as 別名]
def gelu(x):
"""
GELU activation
https://arxiv.org/abs/1606.08415
https://github.com/huggingface/pytorch-openai-transformer-lm/blob/master/model_pytorch.py#L14
https://github.com/huggingface/pytorch-transformers/blob/master/modeling.py
"""
# return 0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
return 0.5 * x * (1.0 + torch.erf(x / math.sqrt(2.0)))
示例11: _gelu_python
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import erf [as 別名]
def _gelu_python(x):
""" Original Implementation of the gelu activation function in Google Bert repo when initially created.
For information: OpenAI GPT's gelu is slightly different (and gives slightly different results):
0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
This is now written in C in torch.nn.functional
Also see https://arxiv.org/abs/1606.08415
"""
return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))