當前位置: 首頁>>代碼示例>>Python>>正文


Python functions.transpose_sequence方法代碼示例

本文整理匯總了Python中chainer.functions.transpose_sequence方法的典型用法代碼示例。如果您正苦於以下問題:Python functions.transpose_sequence方法的具體用法?Python functions.transpose_sequence怎麽用?Python functions.transpose_sequence使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在chainer.functions的用法示例。


在下文中一共展示了functions.transpose_sequence方法的9個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: run_with_n_step_lstm

# 需要導入模塊: from chainer import functions [as 別名]
# 或者: from chainer.functions import transpose_sequence [as 別名]
def run_with_n_step_lstm(xs, h, c, w, b):
    xs = F.transpose_sequence(xs)
    print(w.shape)
    wx, wh = F.split_axis(w, 2, 1)
    ws = F.split_axis(wx, 4, 0) + F.split_axis(wh, 4, 0)
    b = b / 2
    bs = F.split_axis(b, 4, 0) * 2
    print(bs)
    h, _, _ = F.n_step_lstm(1, 0.0, h, c, ws, bs, xs)
    return h 
開發者ID:pfnet-research,項目名稱:chainer-compiler,代碼行數:12,代碼來源:MyLSTM.py

示例2: check_forward

# 需要導入模塊: from chainer import functions [as 別名]
# 或者: from chainer.functions import transpose_sequence [as 別名]
def check_forward(self, xs_data):
        xs = [chainer.Variable(x) for x in xs_data]
        ys = functions.transpose_sequence(xs)
        self.assertEqual(len(ys), len(self.trans_lengths))
        for y, l in zip(ys, self.trans_lengths):
            self.assertEqual(len(y.data), l)

        for i, l in enumerate(self.trans_lengths):
            for j in six.moves.range(l):
                testing.assert_allclose(ys[i].data[j], self.xs[j][i]) 
開發者ID:chainer,項目名稱:chainer,代碼行數:12,代碼來源:test_transpose_sequence.py

示例3: check_backward

# 需要導入模塊: from chainer import functions [as 別名]
# 或者: from chainer.functions import transpose_sequence [as 別名]
def check_backward(self, xs_data, gs_data):
        # In this situation the function returns no result
        if len(self.trans_lengths) == 0:
            return

        def f(*xs):
            return functions.transpose_sequence(xs)

        gradient_check.check_backward(
            f, tuple(xs_data), tuple(gs_data)) 
開發者ID:chainer,項目名稱:chainer,代碼行數:12,代碼來源:test_transpose_sequence.py

示例4: test_output

# 需要導入模塊: from chainer import functions [as 別名]
# 或者: from chainer.functions import transpose_sequence [as 別名]
def test_output(self):

        class Model(chainer.Chain):
            def __init__(self):
                super(Model, self).__init__()

            def __call__(self, *xs):
                return F.transpose_sequence(xs)

        model = Model()
        xs = [input_generator.increasing(*shape) for
              shape in self.in_shapes]

        self.expect(model, xs, name=self.name) 
開發者ID:chainer,項目名稱:chainer,代碼行數:16,代碼來源:test_arrays.py

示例5: __call__

# 需要導入模塊: from chainer import functions [as 別名]
# 或者: from chainer.functions import transpose_sequence [as 別名]
def __call__(self, xs, ys):
        xs = permutate_list(xs, argsort_list_descent(xs), inv=False)
        xs = F.transpose_sequence(xs)
        ys = permutate_list(ys, argsort_list_descent(ys), inv=False)
        ys = F.transpose_sequence(ys)
        return super(CRF, self).__call__(xs, ys) 
開發者ID:chantera,項目名稱:blstm-cws,代碼行數:8,代碼來源:model.py

示例6: argmax

# 需要導入模塊: from chainer import functions [as 別名]
# 或者: from chainer.functions import transpose_sequence [as 別名]
def argmax(self, xs):
        xs = permutate_list(xs, argsort_list_descent(xs), inv=False)
        xs = F.transpose_sequence(xs)
        score, path = super(CRF, self).argmax(xs)
        path = F.transpose_sequence(path)
        return score, path 
開發者ID:chantera,項目名稱:blstm-cws,代碼行數:8,代碼來源:model.py

示例7: forward

# 需要導入模塊: from chainer import functions [as 別名]
# 或者: from chainer.functions import transpose_sequence [as 別名]
def forward(self, *inputs):
        batch = len(inputs) // 6
        lefts = inputs[0: batch]
        rights = inputs[batch: batch * 2]
        dests = inputs[batch * 2: batch * 3]
        labels = inputs[batch * 3: batch * 4]
        sequences = inputs[batch * 4: batch * 5]
        leaf_labels = inputs[batch * 5: batch * 6]

        inds = numpy.argsort([-len(l) for l in lefts])
        # Sort all arrays in descending order and transpose them
        lefts = F.transpose_sequence([lefts[i] for i in inds])
        rights = F.transpose_sequence([rights[i] for i in inds])
        dests = F.transpose_sequence([dests[i] for i in inds])
        labels = F.transpose_sequence([labels[i] for i in inds])
        sequences = F.transpose_sequence([sequences[i] for i in inds])
        leaf_labels = F.transpose_sequence(
            [leaf_labels[i] for i in inds])

        batch = len(inds)
        maxlen = len(sequences)

        loss = 0
        count = 0
        correct = 0

        dtype = chainer.get_dtype()
        stack = self.xp.zeros((batch, maxlen * 2, self.n_units), dtype)
        for i, (word, label) in enumerate(zip(sequences, leaf_labels)):
            batch = word.shape[0]
            es = self.leaf(word)
            ds = self.xp.full((batch,), i, self.xp.int32)
            y = self.label(es)
            loss += F.softmax_cross_entropy(y, label, normalize=False) * batch
            count += batch
            predict = self.xp.argmax(y.array, axis=1)
            correct += (predict == label.array).sum()

            stack = thin_stack.thin_stack_set(stack, ds, es)

        for left, right, dest, label in zip(lefts, rights, dests, labels):
            l, stack = thin_stack.thin_stack_get(stack, left)
            r, stack = thin_stack.thin_stack_get(stack, right)
            o = self.node(l, r)
            y = self.label(o)
            batch = l.shape[0]
            loss += F.softmax_cross_entropy(y, label, normalize=False) * batch
            count += batch
            predict = self.xp.argmax(y.array, axis=1)
            correct += (predict == label.array).sum()

            stack = thin_stack.thin_stack_set(stack, dest, o)

        loss /= count
        reporter.report({'loss': loss}, self)
        reporter.report({'total': count}, self)
        reporter.report({'correct': correct}, self)
        return loss 
開發者ID:chainer,項目名稱:chainer,代碼行數:60,代碼來源:train_recursive_minibatch.py

示例8: forward

# 需要導入模塊: from chainer import functions [as 別名]
# 或者: from chainer.functions import transpose_sequence [as 別名]
def forward(self, *inputs):
        batch = len(inputs) // 6
        lefts = inputs[0: batch]
        rights = inputs[batch: batch * 2]
        dests = inputs[batch * 2: batch * 3]
        labels = inputs[batch * 3: batch * 4]
        sequences = inputs[batch * 4: batch * 5]
        leaf_labels = inputs[batch * 5: batch * 6]

        inds = numpy.argsort([-len(l) for l in lefts])
        # Sort all arrays in descending order and transpose them
        lefts = F.transpose_sequence([lefts[i] for i in inds])
        rights = F.transpose_sequence([rights[i] for i in inds])
        dests = F.transpose_sequence([dests[i] for i in inds])
        labels = F.transpose_sequence([labels[i] for i in inds])
        sequences = F.transpose_sequence([sequences[i] for i in inds])
        leaf_labels = F.transpose_sequence(
            [leaf_labels[i] for i in inds])

        batch = len(inds)
        maxlen = len(sequences)

        loss = 0
        count = 0
        correct = 0

        stack = self.xp.zeros((batch, maxlen * 2, self.n_units), 'f')
        for i, (word, label) in enumerate(zip(sequences, leaf_labels)):
            batch = word.shape[0]
            es = self.leaf(word)
            ds = self.xp.full((batch,), i, 'i')
            y = self.label(es)
            loss += F.softmax_cross_entropy(y, label, normalize=False) * batch
            count += batch
            predict = self.xp.argmax(y.data, axis=1)
            correct += (predict == label.data).sum()

            stack = thin_stack.thin_stack_set(stack, ds, es)

        for left, right, dest, label in zip(lefts, rights, dests, labels):
            l, stack = thin_stack.thin_stack_get(stack, left)
            r, stack = thin_stack.thin_stack_get(stack, right)
            o = self.node(l, r)
            y = self.label(o)
            batch = l.shape[0]
            loss += F.softmax_cross_entropy(y, label, normalize=False) * batch
            count += batch
            predict = self.xp.argmax(y.data, axis=1)
            correct += (predict == label.data).sum()

            stack = thin_stack.thin_stack_set(stack, dest, o)

        loss /= count
        reporter.report({'loss': loss}, self)
        reporter.report({'total': count}, self)
        reporter.report({'correct': correct}, self)
        return loss 
開發者ID:pfnet,項目名稱:pfio,代碼行數:59,代碼來源:train_recursive_minibatch.py

示例9: predict

# 需要導入模塊: from chainer import functions [as 別名]
# 或者: from chainer.functions import transpose_sequence [as 別名]
def predict(self, y_list, t, compute_loss=True):
        predict_list = []
        cnt = 0
        for n_len in self.n_length:
            pred = F.concat(y_list[cnt:cnt + n_len], axis=0)
            predict_list.append(pred)
            cnt += n_len

        inds = self.inds
        # inds_trans = [inds[i] for i in inds]
        inds_rev = sorted([(i, ind) for i, ind in enumerate(inds)], key=lambda x: x[1])

        hs = [predict_list[i] for i in inds]
        ts_original = None
        if compute_loss:
            ts_original = [self.xp.array(t[i], self.xp.int32) for i in inds]

        hs = F.transpose_sequence(hs)

        loss = None
        if compute_loss and ts_original is not None:
            # loss
            ts = F.transpose_sequence(ts_original)
            loss = self.lossfun(hs, ts)

        # predict
        score, predicts_trans = self.lossfun.argmax(hs)
        predicts = F.transpose_sequence(predicts_trans)
        gold_predict_pairs = []
        if compute_loss:
            for pred, gold in zip(predicts, ts_original):
                pred = to_cpu(pred.data)
                gold = to_cpu(gold)
                gold_predict_pairs.append([gold, pred])
        else:
            for pred in predicts:
                pred = to_cpu(pred.data)
                gold_predict_pairs.append([pred])

        gold_predict_pairs = [gold_predict_pairs[e_i] for e_i, _ in inds_rev]
        self.y = gold_predict_pairs

        return gold_predict_pairs, loss 
開發者ID:aonotas,項目名稱:deep-crf,代碼行數:45,代碼來源:bi_lstm.py


注:本文中的chainer.functions.transpose_sequence方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。