本文整理汇总了Python中chainer.functions.transpose_sequence方法的典型用法代码示例。如果您正苦于以下问题:Python functions.transpose_sequence方法的具体用法?Python functions.transpose_sequence怎么用?Python functions.transpose_sequence使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类chainer.functions
的用法示例。
在下文中一共展示了functions.transpose_sequence方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: run_with_n_step_lstm
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import transpose_sequence [as 别名]
def run_with_n_step_lstm(xs, h, c, w, b):
xs = F.transpose_sequence(xs)
print(w.shape)
wx, wh = F.split_axis(w, 2, 1)
ws = F.split_axis(wx, 4, 0) + F.split_axis(wh, 4, 0)
b = b / 2
bs = F.split_axis(b, 4, 0) * 2
print(bs)
h, _, _ = F.n_step_lstm(1, 0.0, h, c, ws, bs, xs)
return h
示例2: check_forward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import transpose_sequence [as 别名]
def check_forward(self, xs_data):
xs = [chainer.Variable(x) for x in xs_data]
ys = functions.transpose_sequence(xs)
self.assertEqual(len(ys), len(self.trans_lengths))
for y, l in zip(ys, self.trans_lengths):
self.assertEqual(len(y.data), l)
for i, l in enumerate(self.trans_lengths):
for j in six.moves.range(l):
testing.assert_allclose(ys[i].data[j], self.xs[j][i])
示例3: check_backward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import transpose_sequence [as 别名]
def check_backward(self, xs_data, gs_data):
# In this situation the function returns no result
if len(self.trans_lengths) == 0:
return
def f(*xs):
return functions.transpose_sequence(xs)
gradient_check.check_backward(
f, tuple(xs_data), tuple(gs_data))
示例4: test_output
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import transpose_sequence [as 别名]
def test_output(self):
class Model(chainer.Chain):
def __init__(self):
super(Model, self).__init__()
def __call__(self, *xs):
return F.transpose_sequence(xs)
model = Model()
xs = [input_generator.increasing(*shape) for
shape in self.in_shapes]
self.expect(model, xs, name=self.name)
示例5: __call__
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import transpose_sequence [as 别名]
def __call__(self, xs, ys):
xs = permutate_list(xs, argsort_list_descent(xs), inv=False)
xs = F.transpose_sequence(xs)
ys = permutate_list(ys, argsort_list_descent(ys), inv=False)
ys = F.transpose_sequence(ys)
return super(CRF, self).__call__(xs, ys)
示例6: argmax
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import transpose_sequence [as 别名]
def argmax(self, xs):
xs = permutate_list(xs, argsort_list_descent(xs), inv=False)
xs = F.transpose_sequence(xs)
score, path = super(CRF, self).argmax(xs)
path = F.transpose_sequence(path)
return score, path
示例7: forward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import transpose_sequence [as 别名]
def forward(self, *inputs):
batch = len(inputs) // 6
lefts = inputs[0: batch]
rights = inputs[batch: batch * 2]
dests = inputs[batch * 2: batch * 3]
labels = inputs[batch * 3: batch * 4]
sequences = inputs[batch * 4: batch * 5]
leaf_labels = inputs[batch * 5: batch * 6]
inds = numpy.argsort([-len(l) for l in lefts])
# Sort all arrays in descending order and transpose them
lefts = F.transpose_sequence([lefts[i] for i in inds])
rights = F.transpose_sequence([rights[i] for i in inds])
dests = F.transpose_sequence([dests[i] for i in inds])
labels = F.transpose_sequence([labels[i] for i in inds])
sequences = F.transpose_sequence([sequences[i] for i in inds])
leaf_labels = F.transpose_sequence(
[leaf_labels[i] for i in inds])
batch = len(inds)
maxlen = len(sequences)
loss = 0
count = 0
correct = 0
dtype = chainer.get_dtype()
stack = self.xp.zeros((batch, maxlen * 2, self.n_units), dtype)
for i, (word, label) in enumerate(zip(sequences, leaf_labels)):
batch = word.shape[0]
es = self.leaf(word)
ds = self.xp.full((batch,), i, self.xp.int32)
y = self.label(es)
loss += F.softmax_cross_entropy(y, label, normalize=False) * batch
count += batch
predict = self.xp.argmax(y.array, axis=1)
correct += (predict == label.array).sum()
stack = thin_stack.thin_stack_set(stack, ds, es)
for left, right, dest, label in zip(lefts, rights, dests, labels):
l, stack = thin_stack.thin_stack_get(stack, left)
r, stack = thin_stack.thin_stack_get(stack, right)
o = self.node(l, r)
y = self.label(o)
batch = l.shape[0]
loss += F.softmax_cross_entropy(y, label, normalize=False) * batch
count += batch
predict = self.xp.argmax(y.array, axis=1)
correct += (predict == label.array).sum()
stack = thin_stack.thin_stack_set(stack, dest, o)
loss /= count
reporter.report({'loss': loss}, self)
reporter.report({'total': count}, self)
reporter.report({'correct': correct}, self)
return loss
示例8: forward
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import transpose_sequence [as 别名]
def forward(self, *inputs):
batch = len(inputs) // 6
lefts = inputs[0: batch]
rights = inputs[batch: batch * 2]
dests = inputs[batch * 2: batch * 3]
labels = inputs[batch * 3: batch * 4]
sequences = inputs[batch * 4: batch * 5]
leaf_labels = inputs[batch * 5: batch * 6]
inds = numpy.argsort([-len(l) for l in lefts])
# Sort all arrays in descending order and transpose them
lefts = F.transpose_sequence([lefts[i] for i in inds])
rights = F.transpose_sequence([rights[i] for i in inds])
dests = F.transpose_sequence([dests[i] for i in inds])
labels = F.transpose_sequence([labels[i] for i in inds])
sequences = F.transpose_sequence([sequences[i] for i in inds])
leaf_labels = F.transpose_sequence(
[leaf_labels[i] for i in inds])
batch = len(inds)
maxlen = len(sequences)
loss = 0
count = 0
correct = 0
stack = self.xp.zeros((batch, maxlen * 2, self.n_units), 'f')
for i, (word, label) in enumerate(zip(sequences, leaf_labels)):
batch = word.shape[0]
es = self.leaf(word)
ds = self.xp.full((batch,), i, 'i')
y = self.label(es)
loss += F.softmax_cross_entropy(y, label, normalize=False) * batch
count += batch
predict = self.xp.argmax(y.data, axis=1)
correct += (predict == label.data).sum()
stack = thin_stack.thin_stack_set(stack, ds, es)
for left, right, dest, label in zip(lefts, rights, dests, labels):
l, stack = thin_stack.thin_stack_get(stack, left)
r, stack = thin_stack.thin_stack_get(stack, right)
o = self.node(l, r)
y = self.label(o)
batch = l.shape[0]
loss += F.softmax_cross_entropy(y, label, normalize=False) * batch
count += batch
predict = self.xp.argmax(y.data, axis=1)
correct += (predict == label.data).sum()
stack = thin_stack.thin_stack_set(stack, dest, o)
loss /= count
reporter.report({'loss': loss}, self)
reporter.report({'total': count}, self)
reporter.report({'correct': correct}, self)
return loss
示例9: predict
# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import transpose_sequence [as 别名]
def predict(self, y_list, t, compute_loss=True):
predict_list = []
cnt = 0
for n_len in self.n_length:
pred = F.concat(y_list[cnt:cnt + n_len], axis=0)
predict_list.append(pred)
cnt += n_len
inds = self.inds
# inds_trans = [inds[i] for i in inds]
inds_rev = sorted([(i, ind) for i, ind in enumerate(inds)], key=lambda x: x[1])
hs = [predict_list[i] for i in inds]
ts_original = None
if compute_loss:
ts_original = [self.xp.array(t[i], self.xp.int32) for i in inds]
hs = F.transpose_sequence(hs)
loss = None
if compute_loss and ts_original is not None:
# loss
ts = F.transpose_sequence(ts_original)
loss = self.lossfun(hs, ts)
# predict
score, predicts_trans = self.lossfun.argmax(hs)
predicts = F.transpose_sequence(predicts_trans)
gold_predict_pairs = []
if compute_loss:
for pred, gold in zip(predicts, ts_original):
pred = to_cpu(pred.data)
gold = to_cpu(gold)
gold_predict_pairs.append([gold, pred])
else:
for pred in predicts:
pred = to_cpu(pred.data)
gold_predict_pairs.append([pred])
gold_predict_pairs = [gold_predict_pairs[e_i] for e_i, _ in inds_rev]
self.y = gold_predict_pairs
return gold_predict_pairs, loss