本文整理汇总了Python中chainer.functions.sigmoid方法的典型用法代码示例。如果您正苦于以下问题:Python functions.sigmoid方法的具体用法?Python functions.sigmoid怎么用?Python functions.sigmoid使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类chainer.functions
的用法示例。
在下文中一共展示了functions.sigmoid方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __call__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import sigmoid [as 别名]
def __call__(self, prev_hg, prev_he, prev_ce, x, v, r, u):
xu = cf.concat((x, u), axis=1)
xu = self.downsample_xu(xu)
v = self.broadcast_v(v)
if r.shape[2] == 1:
r = self.broadcast_r(r)
lstm_input = cf.concat((prev_he, prev_hg, xu, v, r), axis=1)
gate_inputs = self.lstm(lstm_input)
if self.use_cuda_kernel:
next_h, next_c = CoreFunction()(gate_inputs, prev_ce)
else:
forget_gate_input, input_gate_input, tanh_input, output_gate_input = cf.split_axis(
gate_inputs, 4, axis=1)
forget_gate = cf.sigmoid(forget_gate_input)
input_gate = cf.sigmoid(input_gate_input)
next_c = forget_gate * prev_ce + input_gate * cf.tanh(tanh_input)
output_gate = cf.sigmoid(output_gate_input)
next_h = output_gate * cf.tanh(next_c)
return next_h, next_c
示例2: __call__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import sigmoid [as 别名]
def __call__(self, prev_hg, prev_cg, prev_z, v, r, prev_u):
v = self.broadcast_v(v)
if r.shape[2] == 1:
r = self.broadcast_r(r)
lstm_input = cf.concat((prev_hg, v, r, prev_z), axis=1)
gate_inputs = self.lstm(lstm_input)
forget_gate_input, input_gate_input, tanh_input, output_gate_input = cf.split_axis(
gate_inputs, 4, axis=1)
forget_gate = cf.sigmoid(forget_gate_input)
input_gate = cf.sigmoid(input_gate_input)
next_c = forget_gate * prev_cg + input_gate * cf.tanh(tanh_input)
output_gate = cf.sigmoid(output_gate_input)
next_h = output_gate * cf.tanh(next_c)
next_u = self.upsample_h(next_h) + prev_u
return next_h, next_c, next_u
示例3: faster_call2
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import sigmoid [as 别名]
def faster_call2(self, h, x):
r_z_h_x = self.W_r_z_h(x)
r_z_h = self.U_r_z(h)
r_x, z_x, h_x = split_axis(r_z_h_x, (self.n_units, self.n_units * 2), axis=1)
assert r_x.data.shape[1] == self.n_units
assert z_x.data.shape[1] == self.n_units
assert h_x.data.shape[1] == self.n_units
r_h, z_h = split_axis(r_z_h, (self.n_units,), axis=1)
# r = sigmoid.sigmoid(r_x + r_h)
# z = sigmoid.sigmoid(z_x + z_h)
# h_bar = tanh.tanh(h_x + self.U(sigm_a_plus_b_by_h(r_x, r_h, h)))
# h_new = (1 - z) * h + z * h_bar
# return h_new
return compute_output_GRU(z_x, z_h, h_x, h, self.U(sigm_a_plus_b_by_h_fast(r_x, r_h, h)))
示例4: __init__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import sigmoid [as 别名]
def __init__(self,
channels,
reduction=16,
round_mid=False,
mid_activation=(lambda: F.relu),
out_activation=(lambda: F.sigmoid)):
super(SEBlock, self).__init__()
self.use_conv2 = (reduction > 1)
mid_channels = channels // reduction if not round_mid else round_channels(float(channels) / reduction)
with self.init_scope():
self.fc1 = L.Linear(
in_size=channels,
out_size=mid_channels)
if self.use_conv2:
self.activ = get_activation_layer(mid_activation)
self.fc2 = L.Linear(
in_size=mid_channels,
out_size=channels)
self.sigmoid = get_activation_layer(out_activation)
示例5: __init__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import sigmoid [as 别名]
def __init__(self,
in_channels,
out_channels,
do_nms,
**kwargs):
super(CenterNetHeatmapBlock, self).__init__(**kwargs)
self.do_nms = do_nms
with self.init_scope():
self.head = CenterNetHeadBlock(
in_channels=in_channels,
out_channels=out_channels)
self.sigmoid = F.sigmoid
if self.do_nms:
self.pool = partial(
F.max_pooling_2d,
ksize=3,
stride=1,
pad=1,
cover_all=False)
示例6: get_loss_func
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import sigmoid [as 别名]
def get_loss_func(self, C=1.0, k=1):
"""Get loss function of VAE.
The loss value is equal to ELBO (Evidence Lower Bound)
multiplied by -1.
Args:
C (int): Usually this is 1.0. Can be changed to control the
second term of ELBO bound, which works as regularization.
k (int): Number of Monte Carlo samples used in encoded vector.
"""
def lf(x):
mu, ln_var = self.encode(x)
batchsize = len(mu.data)
# reconstruction loss
rec_loss = 0
for l in six.moves.range(k):
z = F.gaussian(mu, ln_var)
rec_loss += F.bernoulli_nll(x, self.decode(z, sigmoid=False)) \
/ (k * batchsize)
self.rec_loss = rec_loss
self.loss = self.rec_loss + \
C * gaussian_kl_divergence(mu, ln_var) / batchsize
return self.loss
return lf
示例7: test_fake_as_funcnode_without_replace
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import sigmoid [as 别名]
def test_fake_as_funcnode_without_replace():
class Model(chainer.Chain):
def _init__(self):
super().__init__()
def add(self, xs, value=0.01):
return xs.array + value
def __call__(self, xs):
return F.sigmoid(self.add(xs))
model = Model()
x = input_generator.increasing(3, 4)
onnx_model = export(model, x)
sigmoid_nodes = [
node for node in onnx_model.graph.node if node.op_type == 'Sigmoid']
assert len(sigmoid_nodes) == 1
# sigmoid node should be expected to connect with input
# but the connection is cut because `add` method takes array.
assert not sigmoid_nodes[0].input[0] == 'Input_0'
示例8: region_loss
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import sigmoid [as 别名]
def region_loss(output, gt_points):
# rpoints lie in [0, feat_size]
# points lie in [0, 1]
# anchors = [0.1067, 0.9223]
B, C, H, W = output.shape
det_confs = F.sigmoid(output[:, 18])
rpoints = output[:, :18].reshape(B, 9, 2, H, W)
rpoints0 = F.sigmoid(rpoints[:, 0])
rpoints = F.concat(
(rpoints0[:, None], rpoints[:, 1:]), axis=1)
rpoints_data = rpoints.data
points_data = rpoints_to_points(rpoints_data)
gt_rpoints, gt_confs, coord_mask, conf_mask = create_target(
points_data, gt_points)
point_loss = F.sum(
coord_mask[:, None, None] * (rpoints - gt_rpoints) ** 2) / (2 * B)
conf_loss = F.sum(conf_mask * (det_confs - gt_confs) ** 2) / (2 * B)
return point_loss, conf_loss
示例9: __call__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import sigmoid [as 别名]
def __call__(self, imgs):
with chainer.using_config('train', False), \
chainer.function.no_backprop_mode():
transform = BatchTransform(self.model.mean)
imgs = transform(imgs)
imgs = self.model.xp.array(imgs)
scores = self.model(imgs)
probs = chainer.cuda.to_cpu(F.sigmoid(scores).data)
labels = []
scores = []
for prob in probs:
label = np.where(prob >= self.thresh)[0]
labels.append(label)
scores.append(prob[label])
return labels, scores
示例10: calc_accuracy
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import sigmoid [as 别名]
def calc_accuracy(pred_scores, gt_labels):
# https://arxiv.org/pdf/1612.03663.pdf
# end of section 3.1
pred_probs = F.sigmoid(pred_scores).data
accs = []
n_pos = []
n_pred = []
for pred_prob, gt_label in zip(pred_probs, gt_labels):
gt_label = chainer.cuda.to_cpu(gt_label)
pred_prob = chainer.cuda.to_cpu(pred_prob)
pred_label = np.where(pred_prob > 0.5)[0]
correct = np.intersect1d(gt_label, pred_label)
diff_gt = np.setdiff1d(gt_label, correct)
diff_pred = np.setdiff1d(pred_label, correct)
accs.append(
len(correct) / (len(correct) + len(diff_gt) + len(diff_pred)))
n_pos.append(len(gt_label))
n_pred.append(len(pred_label))
return {
'accuracy': np.mean(accs),
'n_pos': np.mean(n_pos),
'n_pred': np.mean(n_pred)}
示例11: forward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import sigmoid [as 别名]
def forward(self, inputs):
current_input = inputs
for layer in self._layers:
projected_input = layer(current_input)
linear_part = current_input
# NOTE: if you modify this, think about whether you should modify the initialization
# above, too.
nonlinear_part = projected_input[:,
(0 * self._input_dim):
(1 * self._input_dim)]
gate = projected_input[:,
(1 * self._input_dim):
(2 * self._input_dim)]
nonlinear_part = self._activation(nonlinear_part)
gate = F.sigmoid(gate)
current_input = gate * linear_part + (1 - gate) * nonlinear_part
return current_input
示例12: __init__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import sigmoid [as 别名]
def __init__(self, base=64):
super(Generator, self).__init__(
CLBlock (1, base*1, 128),
CLBlock (base*1, base*1, 128),
CLBlock (base*1, base*1, 128),
CLBlock (base*1, base*1, 128),
CLBlock (base*1, base*1, 128),
CLBlock (base*1, base*1, 128),
ConvBlock(base*1, 1, mode='none', activation=F.sigmoid, bn=False)
)
示例13: __init__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import sigmoid [as 别名]
def __init__(self, base=64):
super(Generator, self).__init__(
ConvBlock(1, base*1, mode='none'),
ConvBlock(base*1, base*2, mode='down'),
ConvBlock(base*2, base*4, mode='down'),
ResBlock (base*4, base*4),
ResBlock (base*4, base*4),
ResBlock (base*4, base*4),
ResBlock (base*4, base*4),
ResBlock (base*4, base*4),
ConvBlock(base*4, base*2, mode='up'),
ConvBlock(base*2, base*1, mode='up'),
ConvBlock(base*1, 1, mode='none', bn=False, activation=F.sigmoid)
)
示例14: __call__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import sigmoid [as 别名]
def __call__(self, x):
image, steps = x
h = self.image2hidden(image) * F.sigmoid(self.embed(steps))
return self.hidden2out(h)
示例15: __call__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import sigmoid [as 别名]
def __call__(self, x, t):
h = F.relu(self.conv1(x))
h = F.relu(self.conv2(h))
h = F.relu(self.conv3(h))
h = F.dropout(F.relu(self.fc4(h)), train=self.train)
h = self.fc5(h)
self.pred = F.reshape(h, (x.data.shape[0], 16, 16))
if t is not None:
self.loss = F.sigmoid_cross_entropy(self.pred, t, normalize=False)
return self.loss
else:
self.pred = F.sigmoid(self.pred)
return self.pred