本文整理汇总了Python中chainer.functions.softmax_cross_entropy方法的典型用法代码示例。如果您正苦于以下问题:Python functions.softmax_cross_entropy方法的具体用法?Python functions.softmax_cross_entropy怎么用?Python functions.softmax_cross_entropy使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类chainer.functions
的用法示例。
在下文中一共展示了functions.softmax_cross_entropy方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: forward_one_step
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softmax_cross_entropy [as 别名]
def forward_one_step(self, x_data, y_data, state, train=True, dropout_ratio=0.5):
x = Variable(x_data, volatile=not train)
t = Variable(y_data, volatile=not train)
h0 = self.embed(x)
h1_in = self.l1_x(F.dropout(h0, ratio=dropout_ratio, train=train)) + self.l1_h(state['h1'])
c1, h1 = F.lstm(state['c1'], h1_in)
h2_in = self.l2_x(F.dropout(h1, ratio=dropout_ratio, train=train)) + self.l2_h(state['h2'])
c2, h2 = F.lstm(state['c2'], h2_in)
y = self.l3(F.dropout(h2, ratio=dropout_ratio, train=train))
state = {'c1': c1, 'h1': h1, 'c2': c2, 'h2': h2}
if train:
return state, F.softmax_cross_entropy(y, t)
else:
return state, F.softmax(y)
示例2: __call__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softmax_cross_entropy [as 别名]
def __call__(self, x, t):
h = F.relu(self.conv1(x))
h = F.max_pooling_2d(h, 2, 1)
h = F.relu(self.conv2(h))
h = F.relu(self.conv3(h))
h = F.relu(self.fc4(h))
h = self.fc5(h)
h = F.reshape(h, (x.data.shape[0], 3, 16, 16))
h = self.channelwise_inhibited(h)
if self.train:
self.loss = F.softmax_cross_entropy(h, t, normalize=False)
return self.loss
else:
self.pred = F.softmax(h)
return self.pred
示例3: __call__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softmax_cross_entropy [as 别名]
def __call__(self, x, t):
h = F.relu(self.conv1(x))
h = F.max_pooling_2d(h, 2, 1)
h = F.relu(self.conv2(h))
h = F.relu(self.conv3(h))
h = F.dropout(F.relu(self.fc4(h)), train=self.train)
h = self.fc5(h)
h = F.reshape(h, (x.data.shape[0], 3, 16, 16))
h = self.channelwise_inhibited(h)
if self.train:
self.loss = F.softmax_cross_entropy(h, t, normalize=False)
return self.loss
else:
self.pred = F.softmax(h)
return self.pred
示例4: _get_loss_gen
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softmax_cross_entropy [as 别名]
def _get_loss_gen(self):
batchsize = self.y_fake.data.shape[0]
L_mce = F.softmax_cross_entropy(self.pred_label_map, self.ground_truth, normalize=False)
L_bce = F.softmax_cross_entropy(self.y_fake, Variable(self.xp.ones(batchsize, dtype=self.xp.int32), volatile=not self.gen.train))
loss = L_mce + self.L_bce_weight * L_bce
# log report
label_true = chainer.cuda.to_cpu(self.ground_truth.data)
label_pred = chainer.cuda.to_cpu(self.pred_label_map.data).argmax(axis=1)
logs = []
for i in six.moves.range(batchsize):
acc, acc_cls, iu, fwavacc = utils.label_accuracy_score(
label_true[i], label_pred[i], self.n_class)
logs.append((acc, acc_cls, iu, fwavacc))
log = np.array(logs).mean(axis=0)
values = {
'loss': loss,
'accuracy': log[0],
'accuracy_cls': log[1],
'iu': log[2],
'fwavacc': log[3],
}
chainer.report(values, self.gen)
return loss
示例5: calc_loss
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softmax_cross_entropy [as 别名]
def calc_loss(self):
batchsize = self.ground_truth.shape[0]
self.loss = F.softmax_cross_entropy(self.pred_label_map, self.ground_truth, normalize=False)
# log report
label_true = chainer.cuda.to_cpu(self.ground_truth.data)
label_pred = chainer.cuda.to_cpu(self.pred_label_map.data).argmax(axis=1)
logs = []
for i in six.moves.range(batchsize):
acc, acc_cls, iu, fwavacc = utils.label_accuracy_score(
label_true[i], label_pred[i], self.n_class)
logs.append((acc, acc_cls, iu, fwavacc))
log = np.array(logs).mean(axis=0)
values = {
'loss': self.loss,
'accuracy': log[0],
'accuracy_cls': log[1],
'iu': log[2],
'fwavacc': log[3],
}
chainer.report(values, self.model)
示例6: forward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softmax_cross_entropy [as 别名]
def forward(self, x, t):
# def forward(self, x):
h = F.max_pooling_2d(F.local_response_normalization(
F.relu(self.conv1(x))), 3, stride=2)
h = F.max_pooling_2d(F.local_response_normalization(
F.relu(self.conv2(h))), 3, stride=2)
h = F.relu(self.conv3(h))
h = F.relu(self.conv4(h))
h = F.max_pooling_2d(F.relu(self.conv5(h)), 3, stride=2)
h = F.dropout(F.relu(self.fc6(h)))
h = F.dropout(F.relu(self.fc7(h)))
h = self.fc8(h)
loss = F.softmax_cross_entropy(h, t)
#loss = h
# chainer.report({'loss': loss, 'accuracy': F.accuracy(h, t)}, self)
return loss
# from https://github.com/chainer/chainer/blob/master/examples/imagenet/alex.py
示例7: forward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softmax_cross_entropy [as 别名]
def forward(self, x, t):
# def forward(self, x):
h = F.max_pooling_2d(F.local_response_normalization(
F.relu(self.conv1(x))), 3, stride=2)
h = F.max_pooling_2d(F.local_response_normalization(
F.relu(self.conv2(h))), 3, stride=2)
h = F.relu(self.conv3(h))
h = F.relu(self.conv4(h))
h = F.max_pooling_2d(F.relu(self.conv5(h)), 3, stride=2)
h = F.dropout(F.relu(self.fc6(h)))
h = F.dropout(F.relu(self.fc7(h)))
h = self.fc8(h)
loss = F.softmax_cross_entropy(h, t)
#loss = h
# chainer.report({'loss': loss, 'accuracy': F.accuracy(h, t)}, self)
return loss
示例8: softmax_cross_entropy
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softmax_cross_entropy [as 别名]
def softmax_cross_entropy(self, y, t):
import numpy as np
log_softmax = F.log_softmax(y)
# SelectItem is not supported by onnx-chainer.
# TODO(hamaji): Support it?
# log_prob = F.select_item(log_softmax, t)
# TODO(hamaji): Currently, F.sum with axis=1 cannot be
# backpropped properly.
# log_prob = F.sum(log_softmax * t, axis=1)
# self.batch_size = chainer.Variable(np.array(t.size, np.float32),
# name='batch_size')
# return -F.sum(log_prob, axis=0) / self.batch_size
log_prob = F.sum(log_softmax * t, axis=(0, 1))
batch_size = chainer.Variable(np.array(t.shape[0], np.float32),
name='batch_size')
self.extra_inputs = [batch_size]
loss = -log_prob / batch_size
loss.name = 'loss'
return loss
示例9: forward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softmax_cross_entropy [as 别名]
def forward(self, x, t):
h = self.bn1(self.conv1(x))
h = F.max_pooling_2d(F.relu(h), 3, stride=2)
h = self.res2(h)
h = self.res3(h)
h = self.res4(h)
h = self.res5(h)
h = F.average_pooling_2d(h, 7, stride=1)
h = self.fc(h)
#loss = F.softmax_cross_entropy(h, t)
loss = self.softmax_cross_entropy(h, t)
if self.compute_accuracy:
chainer.report({'loss': loss, 'accuracy': F.accuracy(h, np.argmax(t, axis=1))}, self)
else:
chainer.report({'loss': loss}, self)
return loss
示例10: softmax_cross_entropy
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softmax_cross_entropy [as 别名]
def softmax_cross_entropy(self, y, t):
import numpy as np
log_softmax = F.log_softmax(y)
# SelectItem is not supported by onnx-chainer.
# TODO(hamaji): Support it?
# log_prob = F.select_item(log_softmax, t)
# TODO(hamaji): Currently, F.sum with axis=1 cannot be
# backpropped properly.
# log_prob = F.sum(log_softmax * t, axis=1)
# self.batch_size = chainer.Variable(np.array(t.size, np.float32),
# name='batch_size')
# return -F.sum(log_prob, axis=0) / self.batch_size
log_prob = F.sum(log_softmax * t, axis=(0, 1))
batch_size = chainer.Variable(self.xp.array(t.shape[0], np.float32),
name='batch_size')
self.extra_inputs = [batch_size]
loss = -log_prob / batch_size
loss.name = 'loss'
return loss
示例11: forward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softmax_cross_entropy [as 别名]
def forward(self, x, t):
h = F.max_pooling_2d(F.local_response_normalization(
F.relu(self.conv1(x))), 3, stride=2)
h = F.max_pooling_2d(F.local_response_normalization(
F.relu(self.conv2(h))), 3, stride=2)
h = F.relu(self.conv3(h))
h = F.relu(self.conv4(h))
h = F.max_pooling_2d(F.relu(self.conv5(h)), 3, stride=2)
h = F.dropout(F.relu(self.fc6(h)))
h = F.dropout(F.relu(self.fc7(h)))
h = self.fc8(h)
# EDIT(hamaji): ONNX-chainer cannot output SoftmaxCrossEntropy.
# loss = F.softmax_cross_entropy(h, t)
loss = self.softmax_cross_entropy(h, t)
if self.compute_accuracy:
chainer.report({'loss': loss, 'accuracy': F.accuracy(h, t)}, self)
else:
chainer.report({'loss': loss}, self)
return loss
示例12: loss
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softmax_cross_entropy [as 别名]
def loss(self,es,x,y,t):
""" Forward propagation and loss calculation
Args:
es (pair of ~chainer.Variable): encoder state
x (list of ~chainer.Variable): list of input sequences
y (list of ~chainer.Variable): list of output sequences
t (list of ~chainer.Variable): list of target sequences
if t is None, it returns only states
Return:
es (pair of ~chainer.Variable(s)): encoder state
ds (pair of ~chainer.Variable(s)): decoder state
loss (~chainer.Variable) : cross-entropy loss
"""
es,ey = self.encoder(es,x)
ds,dy = self.decoder(es,y)
if t is not None:
loss = F.softmax_cross_entropy(dy,t)
# avoid NaN gradients (See: https://github.com/pfnet/chainer/issues/2505)
if chainer.config.train:
loss += F.sum(F.concat(ey, axis=0)) * 0
return es, ds, loss
else: # if target is None, it only returns states
return es, ds
示例13: forward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softmax_cross_entropy [as 别名]
def forward(net, image_batch, sentence_batch, train=True):
images = xp.asarray(image_batch)
n, sentence_length = sentence_batch.shape
net.initialize(images)
loss = 0
acc = 0
size = 0
for i in range(sentence_length - 1):
target = xp.where(xp.asarray(sentence_batch[:, i]) != eos, 1, 0).astype(np.float32)
if (target == 0).all():
break
with chainer.using_config('train', train):
with chainer.using_config('enable_backprop', train):
x = xp.asarray(sentence_batch[:, i])
t = xp.asarray(sentence_batch[:, i + 1])
y = net(x)
y_max_index = xp.argmax(y.data, axis=1)
mask = target.reshape((len(target), 1)).repeat(y.data.shape[1], axis=1)
y = y * mask
loss += F.softmax_cross_entropy(y, t)
acc += xp.sum((y_max_index == t) * target)
size += xp.sum(target)
return loss / size, float(acc) / size, float(size)
示例14: forward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softmax_cross_entropy [as 别名]
def forward(self, img_feats, captions):
"""Batch of image features and image captions to a singe loss.
Compute the softmax cross-entropy captioning loss in a single pass
without iterating over the sequences.
"""
hx, cx, _ = self.reset(img_feats)
# Extract all inputs and targets for all captions in the batch
xs = [c[:-1] for c in captions] # del eos
ts = [c[1:] for c in captions] # del bos
# Get the predictions `ys`
_, _, ys = self.step(hx, cx, xs)
# Since `ys` is concatenated, we also concatenate the target tokens
# before computing the loss
ts = F.concat(ts, axis=0)
loss = F.softmax_cross_entropy(ys, ts)
return loss
示例15: forward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import softmax_cross_entropy [as 别名]
def forward(self, inputs, device):
x, = inputs
t = device.send(self.t)
class_weight = device.send(self.class_weight)
loss = functions.softmax_cross_entropy(
x, t, normalize=self.normalize, reduce=self.reduce,
cache_score=self.cache_score, class_weight=class_weight,
enable_double_backprop=self.enable_double_backprop)
if not (self.enable_double_backprop or device.xp is chainerx):
assert (loss.creator.y is not None) == self.cache_score
# All the loss values except those corresponding to the ignored label
# must be positive.
# TODO(niboshi): Use device.xp.where once chainerx supports it.
assert numpy.where(
backend.CpuDevice().send(t == -1),
True,
backend.CpuDevice().send(loss.array) > 0).all()
return loss,