本文整理汇总了Python中chainer.functions.softplus方法的典型用法代码示例。如果您正苦于以下问题:Python functions.softplus方法的具体用法?Python functions.softplus怎么用?Python functions.softplus使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类chainer.functions
的用法示例。
在下文中一共展示了functions.softplus方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: shifted_softplus
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softplus [as 别名]
def shifted_softplus(x, beta=1, shift=0.5, threshold=20):
"""shifted softplus function, which holds f(0)=0.
Args:
x (Variable): Input variable
beta (float): Parameter :math:`\\beta`.
shift (float): Shift Parameter
threshold (float): threshold to avoid overflow
Returns:
output (Variable): Output variable whose shape is same with `x`
"""
xp = chainer.cuda.get_array_module(x)
cond = chainer.as_variable(x).array > threshold
x = functions.where(cond, x,
functions.softplus(x, beta=beta))
x += xp.log(shift)
return x
示例2: __init__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softplus [as 别名]
def __init__(self, n_actions, n_input_channels=4, activation=F.relu,
bias=0.1, var_param_init=0, # var_func=F.softplus,
hiddens=None):
self.n_input_channels = n_input_channels
self.activation = activation
self.hiddens = [512] if hiddens is None else hiddens
# self.var_func = var_func
super(ActorTRPONetForContinuous, self).__init__()
with self.init_scope():
self.conv_layers = chainer.ChainList(
L.Convolution2D(n_input_channels, 32, 8, stride=4,
initial_bias=bias),
L.Convolution2D(32, 64, 4, stride=2, initial_bias=bias),
L.Convolution2D(64, 64, 3, stride=1, initial_bias=bias))
self.a_stream = chainerrl.links.mlp.MLP(None, n_actions, self.hiddens)
self.var_param = chainer.Parameter(initializer=var_param_init,
shape=(1,))
# self.var_param = chainer.Parameter(
# initializer=var_param_init, shape=(n_actions,)) # independent
示例3: _tanh_forward_log_det_jacobian
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softplus [as 别名]
def _tanh_forward_log_det_jacobian(x):
"""Compute log|det(dy/dx)| except summation where y=tanh(x)."""
# For the derivation of this formula, see:
# https://github.com/tensorflow/probability/blob/master/tensorflow_probability/python/bijectors/tanh.py # NOQA
return 2. * (np.log(2.) - x - F.softplus(-2. * x))
示例4: compute_mean_and_var
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softplus [as 别名]
def compute_mean_and_var(self, x):
h = x
for layer in self.hidden_layers:
h = self.nonlinearity(layer(h))
mean = self.mean_layer(h)
if self.bound_mean:
mean = bound_by_tanh(mean, self.min_action, self.max_action)
var = F.broadcast_to(F.softplus(self.var_layer(h)), mean.shape) + \
self.min_var
return mean, var
示例5: __init__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softplus [as 别名]
def __init__(self, n_input_channels, action_size,
n_hidden_layers=0, n_hidden_channels=None,
min_action=None, max_action=None, bound_mean=False,
var_type='spherical',
nonlinearity=F.relu,
mean_wscale=1,
var_func=F.softplus,
var_param_init=0,
):
self.n_input_channels = n_input_channels
self.action_size = action_size
self.n_hidden_layers = n_hidden_layers
self.n_hidden_channels = n_hidden_channels
self.min_action = min_action
self.max_action = max_action
self.bound_mean = bound_mean
self.nonlinearity = nonlinearity
self.var_func = var_func
var_size = {'spherical': 1, 'diagonal': action_size}[var_type]
layers = []
layers.append(L.Linear(n_input_channels, n_hidden_channels))
for _ in range(n_hidden_layers - 1):
layers.append(self.nonlinearity)
layers.append(L.Linear(n_hidden_channels, n_hidden_channels))
layers.append(self.nonlinearity)
# The last layer is used to compute the mean
layers.append(
L.Linear(n_hidden_channels, action_size,
initialW=LeCunNormal(mean_wscale)))
if self.bound_mean:
layers.append(lambda x: bound_by_tanh(
x, self.min_action, self.max_action))
super().__init__()
with self.init_scope():
self.hidden_layers = links.Sequence(*layers)
self.var_param = chainer.Parameter(
initializer=var_param_init, shape=(var_size,))
示例6: compute_mean_and_var
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softplus [as 别名]
def compute_mean_and_var(self, x):
# mean = F.relu(self.mean_layer_1(x))
# mean = self.bn_mean(mean)
mean = self.mean_layer_2(x)
# var = F.relu(self.var_layer_1(x))
# var = self.bn_var(var)
var = F.softplus(self.var_layer_2(x))
return mean, var
示例7: forward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softplus [as 别名]
def forward(self, inputs, device):
x, = inputs
return functions.softplus(x, beta=self.beta),
示例8: _encode
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softplus [as 别名]
def _encode(self, xs):
exs = self.embed_mat(xs)
h = F.tanh(self.l1(exs))
logits = F.softplus(self.l2(h))
logits = F.log(logits + 1e-10).reshape(-1, self.M, self.K)
return logits, exs
示例9: loss_func_dcgan_dis_real
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softplus [as 别名]
def loss_func_dcgan_dis_real(y_real):
return F.sum(F.softplus(-y_real)) / np.prod(y_real.data.shape)
示例10: loss_func_dcgan_dis_fake
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softplus [as 别名]
def loss_func_dcgan_dis_fake(y_fake):
return F.sum(F.softplus(y_fake)) / np.prod(y_fake.data.shape)
示例11: loss_sigmoid_cross_entropy_with_logits
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softplus [as 别名]
def loss_sigmoid_cross_entropy_with_logits(x, t):
return F.average(x - x*t + F.softplus(-x))# / x.data.shape[0]