本文整理汇总了Python中torch.erf方法的典型用法代码示例。如果您正苦于以下问题:Python torch.erf方法的具体用法?Python torch.erf怎么用?Python torch.erf使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类torch
的用法示例。
在下文中一共展示了torch.erf方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: cdf
# 需要导入模块: import torch [as 别名]
# 或者: from torch import erf [as 别名]
def cdf(self, value):
"""
Evaluate the cumulative distribution function at the value.
Args:
value (Variable, tensor): the value at which to evaluate the cdf
"""
n_samples = value.data.shape[1]
mean = self.mean
std = self.log_var.mul(0.5).exp_()
# unsqueeze the parameters along the sample dimension
if len(mean.size()) == 2:
mean = mean.unsqueeze(1).repeat(1, n_samples, 1)
std = std.unsqueeze(1).repeat(1, n_samples, 1)
elif len(mean.size()) == 4:
mean = mean.unsqueeze(1).repeat(1, n_samples, 1, 1, 1)
std = std.unsqueeze(1).repeat(1, n_samples, 1, 1, 1)
return (1 + torch.erf((value - mean) / (math.sqrt(2) * std).add(1e-5))).mul_(0.5)
示例2: gelu
# 需要导入模块: import torch [as 别名]
# 或者: from torch import erf [as 别名]
def gelu(x):
"""Implementation of the gelu activation function.
For information: OpenAI GPT's gelu is slightly different (and gives slightly different results):
0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
"""
return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
示例3: gelu
# 需要导入模块: import torch [as 别名]
# 或者: from torch import erf [as 别名]
def gelu(x):
return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
示例4: gelu
# 需要导入模块: import torch [as 别名]
# 或者: from torch import erf [as 别名]
def gelu(x):
"Implementation of the gelu activation function by Hugging Face"
return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
示例5: gelu
# 需要导入模块: import torch [as 别名]
# 或者: from torch import erf [as 别名]
def gelu(x):
""" Original Implementation of the gelu activation function in Google Bert repo when initially created.
For information: OpenAI GPT's gelu is slightly different (and gives slightly different results):
0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
Also see https://arxiv.org/abs/1606.08415
"""
return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
示例6: gelu
# 需要导入模块: import torch [as 别名]
# 或者: from torch import erf [as 别名]
def gelu(x):
"""Implementation of the gelu activation function.
For information: OpenAI GPT's gelu is slightly different (and gives slightly different results):
0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
Also see https://arxiv.org/abs/1606.08415
"""
return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
示例7: gelu
# 需要导入模块: import torch [as 别名]
# 或者: from torch import erf [as 别名]
def gelu(x):
return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
# Paper denote this module to `FFN = max(0, x * W1 + b1) * W2 + b2`
# but i use gelu instead of relu about activation
示例8: _F1F2_no_inf
# 需要导入模块: import torch [as 别名]
# 或者: from torch import erf [as 别名]
def _F1F2_no_inf(x: Tensor, y: Tensor) -> Tuple[Tensor, Tensor]:
if (x.abs() > 3).any() or (y.abs() > 3).any():
raise RuntimeError("_F1F2_no_inf not stable for inputs with abs(value) > 3")
numer_1 = torch.exp(-x ** 2) - torch.exp(-y ** 2)
numer_2 = x * torch.exp(-x ** 2) - y * torch.exp(-y ** 2)
denom = torch.erf(y) - torch.erf(x)
F1 = numer_1 / denom
F2 = numer_2 / denom
return F1, F2
示例9: gelu
# 需要导入模块: import torch [as 别名]
# 或者: from torch import erf [as 别名]
def gelu(x):
""" Implementation of the gelu activation function by Hugging Face """
return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
示例10: gelu
# 需要导入模块: import torch [as 别名]
# 或者: from torch import erf [as 别名]
def gelu(x):
"""
GELU activation
https://arxiv.org/abs/1606.08415
https://github.com/huggingface/pytorch-openai-transformer-lm/blob/master/model_pytorch.py#L14
https://github.com/huggingface/pytorch-transformers/blob/master/modeling.py
"""
# return 0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
return 0.5 * x * (1.0 + torch.erf(x / math.sqrt(2.0)))
示例11: _gelu_python
# 需要导入模块: import torch [as 别名]
# 或者: from torch import erf [as 别名]
def _gelu_python(x):
""" Original Implementation of the gelu activation function in Google Bert repo when initially created.
For information: OpenAI GPT's gelu is slightly different (and gives slightly different results):
0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
This is now written in C in torch.nn.functional
Also see https://arxiv.org/abs/1606.08415
"""
return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))