本文整理汇总了Python中theano.tensor.lvector方法的典型用法代码示例。如果您正苦于以下问题:Python tensor.lvector方法的具体用法?Python tensor.lvector怎么用?Python tensor.lvector使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类theano.tensor
的用法示例。
在下文中一共展示了tensor.lvector方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_op
# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import lvector [as 别名]
def test_op(self):
n = tensor.lscalar()
f = theano.function([self.p, n], multinomial(n, self.p))
_n = 5
tested = f(self._p, _n)
assert tested.shape == self._p.shape
assert numpy.allclose(numpy.floor(tested.todense()), tested.todense())
assert tested[2, 1] == _n
n = tensor.lvector()
f = theano.function([self.p, n], multinomial(n, self.p))
_n = numpy.asarray([1, 2, 3, 4], dtype='int64')
tested = f(self._p, _n)
assert tested.shape == self._p.shape
assert numpy.allclose(numpy.floor(tested.todense()), tested.todense())
assert tested[2, 1] == _n[2]
示例2: test_softmax_optimizations
# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import lvector [as 别名]
def test_softmax_optimizations(self):
x = tensor.matrix('x')
one_of_n = tensor.lvector('one_of_n')
op = crossentropy_categorical_1hot
xe = op(x, one_of_n)
fgraph = gof.FunctionGraph(
[x, one_of_n],
[op(softmax_op(x), one_of_n)])
assert fgraph.outputs[0].owner.op == op
theano.compile.mode.optdb.query(
theano.compile.mode.OPT_FAST_RUN).optimize(fgraph)
assert str(fgraph.outputs[0].owner.op) == 'OutputGuard'
assert (fgraph.outputs[0].owner.inputs[0].owner.op ==
crossentropy_softmax_argmax_1hot_with_bias)
示例3: test_softmax_optimizations_w_bias_vector
# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import lvector [as 别名]
def test_softmax_optimizations_w_bias_vector(self):
x = tensor.vector('x')
b = tensor.vector('b')
one_of_n = tensor.lvector('one_of_n')
op = crossentropy_categorical_1hot
fgraph = gof.FunctionGraph(
[x, b, one_of_n],
[op(softmax_op(x + b), one_of_n)])
assert fgraph.outputs[0].owner.op == op
# print 'BEFORE'
# for node in fgraph.toposort():
# print node.op
# print printing.pprint(node.outputs[0])
# print '----'
theano.compile.mode.optdb.query(
theano.compile.mode.OPT_FAST_RUN).optimize(fgraph)
# print 'AFTER'
# for node in fgraph.toposort():
# print node.op
# print '===='
assert len(fgraph.toposort()) == 3
assert str(fgraph.outputs[0].owner.op) == 'OutputGuard'
assert (fgraph.outputs[0].owner.inputs[0].owner.op ==
crossentropy_softmax_argmax_1hot_with_bias)
示例4: test_err_bound_list
# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import lvector [as 别名]
def test_err_bound_list(self):
n = self.shared(numpy.ones((2, 3), dtype=self.dtype) * 5)
l = lvector()
t = n[l]
# We test again AdvancedSubtensor1 as we transfer data to the cpu.
self.assertTrue(isinstance(t.owner.op, tensor.AdvancedSubtensor1))
f = self.function([l], t, op=self.adv_sub1)
# the grad
g = self.function([l],
inc_subtensor(t, numpy.asarray([[1.]], self.dtype)),
op=self.adv_incsub1)
for shp in [[0, 4], [0, -3], [-10]]:
self.assertRaises(IndexError, f, shp)
self.assertRaises(IndexError, g, shp)
示例5: test_grad
# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import lvector [as 别名]
def test_grad(self):
ones = numpy.ones((1, 3), dtype=self.dtype)
n = self.shared(ones * 5, broadcastable=(True, False))
idx = tensor.lvector()
idx2 = tensor.lvector()
t = n[idx, idx2]
self.assertTrue(isinstance(t.owner.op, tensor.AdvancedSubtensor))
utt.verify_grad(lambda m: m[[1, 3], [2, 4]],
[numpy.random.rand(5, 5).astype(self.dtype)])
def fun(x, y):
return advanced_inc_subtensor(x, y, [1, 3], [2, 4])
utt.verify_grad(fun, [numpy.random.rand(5, 5).astype(self.dtype),
numpy.random.rand(2).astype(self.dtype)])
def fun(x, y):
return advanced_set_subtensor(x, y, [1, 3], [2, 4])
utt.verify_grad(fun, [numpy.random.rand(5, 5).astype(self.dtype),
numpy.random.rand(2).astype(self.dtype)])
示例6: test_softmax_optimizations
# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import lvector [as 别名]
def test_softmax_optimizations():
from theano.tensor.nnet.nnet import softmax, crossentropy_categorical_1hot
x = tensor.fmatrix('x')
one_of_n = tensor.lvector('one_of_n')
op = crossentropy_categorical_1hot
xe = op(x, one_of_n)
fgraph = theano.gof.FunctionGraph(
[x, one_of_n],
[op(softmax(x), one_of_n)])
assert fgraph.outputs[0].owner.op == op
mode_with_gpu.optimizer.optimize(fgraph)
assert str(fgraph.outputs[0].owner.op) == 'OutputGuard'
assert fgraph.outputs[0].owner.inputs[0].owner.op == cuda.host_from_gpu
assert fgraph.outputs[0].owner.inputs[0].owner.inputs[0].owner.op == cuda.nnet.gpu_crossentropy_softmax_argmax_1hot_with_bias
示例7: test_sparse_from_list
# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import lvector [as 别名]
def test_sparse_from_list(self):
x = tensor.matrix('x')
vals = tensor.matrix('vals')
ilist = tensor.lvector('ilist')
out = construct_sparse_from_list(x, vals, ilist)
self._compile_and_check(
[x, vals, ilist],
[out],
[numpy.zeros((40, 10), dtype=config.floatX),
numpy.random.randn(12, 10).astype(config.floatX),
numpy.random.randint(low=0, high=40, size=(12,))],
ConstructSparseFromList
)
示例8: test_infer_shape
# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import lvector [as 别名]
def test_infer_shape(self):
admat = matrix()
advec = vector()
alvec = lvector()
rng = numpy.random.RandomState(utt.fetch_seed())
admat_val = rng.rand(10, 5).astype(config.floatX)
admat_val /= admat_val.sum(axis=1).reshape(10, 1)
advec_val = rng.rand(10).astype(config.floatX)
alvec_val = rng.randint(low=0, high=5, size=10)
self._compile_and_check([advec, admat, alvec],
[CrossentropySoftmax1HotWithBiasDx()(advec, admat, alvec)],
[advec_val, admat_val, alvec_val],
CrossentropySoftmax1HotWithBiasDx)
示例9: test_neg_idx
# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import lvector [as 别名]
def test_neg_idx(self):
admat = matrix()
advec = vector()
alvec = lvector()
rng = numpy.random.RandomState(utt.fetch_seed())
admat_val = rng.rand(10, 5).astype(config.floatX)
admat_val /= admat_val.sum(axis=1).reshape(10, 1)
advec_val = rng.rand(10).astype(config.floatX)
alvec_val = rng.randint(low=0, high=5, size=10)
alvec_val[1] = -1
out = CrossentropySoftmax1HotWithBiasDx()(advec, admat, alvec)
f = theano.function([advec, admat, alvec], out)
self.assertRaises(ValueError, f, advec_val, admat_val, alvec_val)
示例10: test_grad
# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import lvector [as 别名]
def test_grad(self):
x = tensor.matrix('x')
one_of_n = tensor.lvector('one_of_n')
op = crossentropy_categorical_1hot
xe = op(x, one_of_n)
f = theano.function([x, one_of_n], xe)
x_val = numpy.asarray([[.4, .6, .0], [.1, .8, .1]],
dtype=config.floatX)
xe_val = f(x_val, [0, 1])
assert numpy.allclose(xe_val, -numpy.log([.4, .8]))
def oplike(x):
return op(x, [0, 1])
tensor.verify_grad(oplike, [x_val], rng=numpy.random)
示例11: test_softmax_optimizations_vector
# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import lvector [as 别名]
def test_softmax_optimizations_vector(self):
x = tensor.vector('x')
one_of_n = tensor.lvector('one_of_n')
op = crossentropy_categorical_1hot
fgraph = gof.FunctionGraph(
[x, one_of_n],
[op(softmax_op(x), one_of_n)])
assert fgraph.outputs[0].owner.op == op
theano.compile.mode.optdb.query(
theano.compile.mode.OPT_FAST_RUN).optimize(fgraph)
assert str(fgraph.outputs[0].owner.op) == 'OutputGuard'
assert (fgraph.outputs[0].owner.inputs[0].owner.op ==
crossentropy_softmax_argmax_1hot_with_bias)
示例12: test_softmax_optimizations_w_bias2
# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import lvector [as 别名]
def test_softmax_optimizations_w_bias2(self):
x = tensor.matrix('x')
b = tensor.vector('b')
c = tensor.vector('c')
one_of_n = tensor.lvector('one_of_n')
op = crossentropy_categorical_1hot
fgraph = gof.FunctionGraph(
[x, b, c, one_of_n],
[op(softmax_op(T.add(x, b, c)), one_of_n)])
assert fgraph.outputs[0].owner.op == op
# print 'BEFORE'
# for node in fgraph.toposort():
# print node.op
# print '----'
theano.compile.mode.optdb.query(
theano.compile.mode.OPT_FAST_RUN).optimize(fgraph)
# print 'AFTER'
# for node in fgraph.toposort():
# print node.op
# print '===='
assert len(fgraph.toposort()) == 3
assert str(fgraph.outputs[0].owner.op) == 'OutputGuard'
assert (fgraph.outputs[0].owner.inputs[0].owner.op ==
crossentropy_softmax_argmax_1hot_with_bias)
示例13: test_softmax_grad_optimizations
# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import lvector [as 别名]
def test_softmax_grad_optimizations(self):
x = tensor.matrix('x')
one_of_n = tensor.lvector('one_of_n')
op = crossentropy_categorical_1hot
xe = op(softmax_op(x), one_of_n)
sum_xe = tensor.sum(xe)
g_x = tensor.grad(sum_xe, x)
fgraph = gof.FunctionGraph(
[x, one_of_n],
[g_x])
self.assertTrue(hasattr(fgraph.outputs[0].tag, 'trace'))
# print 'BEFORE'
# for node in fgraph.toposort():
# print node.op, node.inputs
# print '----'
theano.compile.mode.optdb.query(
theano.compile.mode.OPT_FAST_RUN).optimize(fgraph)
# print 'AFTER'
# for node in fgraph.toposort():
# print node.op, node.inputs
has_cx1hot = False
has_cx1hotdx = False
has_softmax = False
has_softmaxdx = False
for node in fgraph.toposort():
if node.op == crossentropy_softmax_argmax_1hot_with_bias:
has_cx1hot = True
if node.op == crossentropy_softmax_1hot_with_bias_dx:
has_cx1hotdx = True
if node.op == softmax_op:
has_softmax = True
if node.op == softmax_grad:
has_softmaxdx = True
assert not has_cx1hot
assert has_cx1hotdx
assert has_softmax
assert not has_softmaxdx
示例14: test_softmax_grad_optimizations_vector
# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import lvector [as 别名]
def test_softmax_grad_optimizations_vector(self):
x = tensor.vector('x')
one_of_n = tensor.lvector('one_of_n')
op = crossentropy_categorical_1hot
xe = op(softmax_op(x), one_of_n)
sum_xe = tensor.sum(xe)
g_x = tensor.grad(sum_xe, x)
fgraph = gof.FunctionGraph(
[x, one_of_n],
[g_x])
# print 'BEFORE'
# for node in fgraph.toposort():
# print node.op, node.inputs
# print '----'
theano.compile.mode.optdb.query(
theano.compile.mode.OPT_FAST_RUN).optimize(fgraph)
# print 'AFTER'
# for node in fgraph.toposort():
# print node.op, node.inputs
has_cx1hot = False
has_cx1hotdx = False
has_softmax = False
has_softmaxdx = False
for node in fgraph.toposort():
if node.op == crossentropy_softmax_argmax_1hot_with_bias:
has_cx1hot = True
if node.op == crossentropy_softmax_1hot_with_bias_dx:
has_cx1hotdx = True
if node.op == softmax_op:
has_softmax = True
if node.op == softmax_grad:
has_softmaxdx = True
assert not has_cx1hot
assert has_cx1hotdx
assert has_softmax
assert not has_softmaxdx
示例15: test_bug_2009_06_02_trac_387
# 需要导入模块: from theano import tensor [as 别名]
# 或者: from theano.tensor import lvector [as 别名]
def test_bug_2009_06_02_trac_387():
y = tensor.lvector('y')
f = theano.function([y],
tensor.int_div(
tensor.DimShuffle(y[0].broadcastable, ['x'])(y[0]), 2))
print(f(numpy.ones(1, dtype='int64') * 3))
# XXX: there is no assert, nor comment that DEBUGMODE is to do the
# checking. What was the bug, and how is it being tested?