本文整理汇总了Python中theano.tensor.slinalg.cholesky函数的典型用法代码示例。如果您正苦于以下问题:Python cholesky函数的具体用法?Python cholesky怎么用?Python cholesky使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了cholesky函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: return_output
def return_output(self,Dif):
#Dif is theano.Tensor.matrix type
Frac = Dif/self.gamma
Cov = self.v0*T.pow(Frac,self.alpha)
L = sin.cholesky(T.exp(-Cov))
eps = self.srng.normal(avg=0,std=0.001,size=(self.time,self.lsize))
return T.dot(L,eps)
示例2: blk_tridag_chol
def blk_tridag_chol(A, B):
'''
Compute the cholesky decompoisition of a symmetric, positive definite
block-tridiagonal matrix.
Inputs:
A - [T x n x n] tensor, where each A[i,:,:] is the ith block diagonal matrix
B - [T-1 x n x n] tensor, where each B[i,:,:] is the ith (upper) 1st block
off-diagonal matrix
Outputs:
R - python list with two elements
* R[0] - [T x n x n] tensor of block diagonal elements of Cholesky decomposition
* R[1] - [T-1 x n x n] tensor of (lower) 1st block off-diagonal elements of Cholesky
'''
# Code for computing the cholesky decomposition of a symmetric block tridiagonal matrix
def compute_chol(Aip1, Bi, Li, Ci):
Ci = T.dot(Bi.T, Tla.matrix_inverse(Li).T)
Dii = Aip1 - T.dot(Ci, Ci.T)
Lii = Tsla.cholesky(Dii)
return [Lii,Ci]
L1 = Tsla.cholesky(A[0])
C1 = T.zeros_like(B[0])
# this scan returns the diagonal and off-diagonal blocks of the cholesky decomposition
mat, updates = theano.scan(fn=compute_chol, sequences=[A[1:], B], outputs_info=[L1,C1])
mat[0] = T.concatenate([T.shape_padleft(L1), mat[0]])
return mat
示例3: psd_solve_with_chol
def psd_solve_with_chol(node):
if node.op == solve:
A, b = node.inputs # result is solution Ax=b
if is_psd(A):
L = cholesky(A) # assume lower triangular factor
x = solve_cholesky(L, b)
return [x]
示例4: test_cholesky_grad_indef
def test_cholesky_grad_indef():
x = theano.tensor.matrix()
matrix = np.array([[1, 0.2], [0.2, -2]]).astype(config.floatX)
cholesky = GpuCholesky(lower=True)
chol_f = theano.function([x], theano.tensor.grad(cholesky(x).sum(), [x]))
with assert_raises(LinAlgError):
chol_f(matrix)
示例5: test_gpu_cholesky_opt
def test_gpu_cholesky_opt(self):
if not imported_scipy:
self.skipTest('SciPy is not enabled, skipping test')
A = theano.tensor.matrix("A", dtype="float64")
fn = theano.function([A], cholesky(A), mode=mode_with_gpu)
assert any([isinstance(node.op, GpuCholesky)
for node in fn.maker.fgraph.toposort()])
示例6: psd_solve_with_chol
def psd_solve_with_chol(node):
if node.op == solve:
A, b = node.inputs # result is solution Ax=b
if is_psd(A):
L = cholesky(A)
# N.B. this can be further reduced to a yet-unwritten cho_solve Op
# __if__ no other Op makes use of the the L matrix during the
# stabilization
Li_b = Solve('lower_triangular')(L, b)
x = Solve('upper_triangular')(L.T, Li_b)
return [x]
示例7: test_local_lift_cholesky
def test_local_lift_cholesky():
if not cusolver_available:
raise SkipTest('No cuSolver')
A = tensor.fmatrix()
o = slinalg.cholesky(A)
f_cpu = theano.function([A], o, mode=mode_without_gpu)
f_gpu = theano.function([A], o, mode=mode_with_gpu)
assert not any(isinstance(n.op, slinalg.Cholesky)
for n in f_gpu.maker.fgraph.apply_nodes)
# GpuCholesky op in this graph should be inplace (as his input is not reused by other op).
assert any(isinstance(n.op, GpuCholesky) and n.op.inplace
for n in f_gpu.maker.fgraph.apply_nodes)
M_val = np.random.normal(size=(3, 3)).astype("float32")
# A = M.dot(M) will be positive definite for all non-singular M
A_val = M_val.dot(M_val.T)
utt.assert_allclose(f_cpu(A_val), f_gpu(A_val))
示例8: setUp
def setUp(self):
super(test_MatrixInverseCholesky, self).setUp()
self.op_class = MatrixInverseCholesky
self.op = MatrixInverseCholesky(lower = True)
self.dtype = config.floatX
self.A = theano.tensor.matrix("A", self.dtype)
self.L = cholesky(self.A)
self.B = theano.tensor.matrix(self.dtype)
self.dim = 5
self.B_cols = 2
self.rng = numpy.random.RandomState(utt.fetch_seed())
self.A_mat = numpy.asarray(self.rng.rand(self.dim, self.dim), dtype=self.dtype)
self.A_mat = self.A_mat.T.dot(self.A_mat)
self.B_mat = numpy.asarray(self.rng.rand(self.dim, self.B_cols), dtype=self.dtype)
self.L_mat = scipy.linalg.cholesky(self.A_mat, lower=True)
示例9: test_gpu_cholesky_not_inplace
def test_gpu_cholesky_not_inplace():
if not cusolver_available:
raise SkipTest('No cuSolver')
A = tensor.fmatrix()
A_squared = A**2
B = slinalg.cholesky(A_squared)
D = B + A_squared
f_cpu = theano.function([A], D, mode=mode_without_gpu)
f_gpu = theano.function([A], D, mode=mode_with_gpu)
# GpuCholesky op in this graph should NOT be inplace (as his input is reused in another op)
count_cholesky_not_inplace = len([n.op for n in f_gpu.maker.fgraph.apply_nodes
if isinstance(n.op, GpuCholesky) and not n.op.inplace])
assert count_cholesky_not_inplace == 1, count_cholesky_not_inplace
M_val = np.random.normal(size=(3, 3)).astype("float32")
# A = M.dot(M) will be positive definite for all non-singular M
A_val = M_val.dot(M_val.T)
utt.assert_allclose(f_cpu(A_val), f_gpu(A_val))
示例10: setUp
def setUp(self):
super(test_SolveCholesky, self).setUp()
self.op_class = SolveCholesky
self.op = SolveCholesky()
self.dtype = config.floatX
self.A = theano.tensor.matrix(self.dtype)
self.L = cholesky(self.A)
self.B = theano.tensor.matrix(self.dtype)
self.b = theano.tensor.vector(self.dtype)
self.dim = 5
rng = numpy.random.RandomState(utt.fetch_seed())
self.A_mat = numpy.asarray(rng.rand(self.dim, self.dim), dtype=self.dtype)
self.A_mat = self.A_mat.T.dot(self.A_mat)
self.B_mat = numpy.asarray(rng.rand(self.dim, self.dim), dtype=self.dtype)
self.b_vec = numpy.asarray(rng.rand(self.dim), dtype=self.dtype)
self.L_mat = scipy.linalg.cholesky(self.A_mat, lower=True)
示例11: test_cholesky_grad
def test_cholesky_grad():
if not imported_scipy:
raise SkipTest("Scipy needed for the Cholesky op.")
rng = np.random.RandomState(utt.fetch_seed())
r = rng.randn(5, 5).astype(config.floatX)
# The dots are inside the graph since Cholesky needs separable matrices
# Check the default.
yield (lambda: utt.verify_grad(lambda r: cholesky(r.dot(r.T)),
[r], 3, rng))
# Explicit lower-triangular.
yield (lambda: utt.verify_grad(lambda r: Cholesky(lower=True)(r.dot(r.T)),
[r], 3, rng))
# Explicit upper-triangular.
yield (lambda: utt.verify_grad(lambda r: Cholesky(lower=False)(r.dot(r.T)),
[r], 3, rng))
示例12: test_cholesky_and_cholesky_grad_shape
def test_cholesky_and_cholesky_grad_shape():
if not imported_scipy:
raise SkipTest("Scipy needed for the Cholesky op.")
rng = numpy.random.RandomState(utt.fetch_seed())
x = tensor.matrix()
for l in (cholesky(x), Cholesky(lower=True)(x), Cholesky(lower=False)(x)):
f_chol = theano.function([x], l.shape)
g = tensor.grad(l.sum(), x)
f_cholgrad = theano.function([x], g.shape)
topo_chol = f_chol.maker.fgraph.toposort()
topo_cholgrad = f_cholgrad.maker.fgraph.toposort()
if config.mode != "FAST_COMPILE":
assert sum([node.op.__class__ == Cholesky for node in topo_chol]) == 0
assert sum([node.op.__class__ == CholeskyGrad for node in topo_cholgrad]) == 0
for shp in [2, 3, 5]:
m = numpy.cov(rng.randn(shp, shp + 10)).astype(config.floatX)
yield numpy.testing.assert_equal, f_chol(m), (shp, shp)
yield numpy.testing.assert_equal, f_cholgrad(m), (shp, shp)
示例13: test_cholesky
def test_cholesky():
if not imported_scipy:
raise SkipTest("Scipy needed for the Cholesky op.")
rng = numpy.random.RandomState(utt.fetch_seed())
r = rng.randn(5, 5).astype(config.floatX)
pd = numpy.dot(r, r.T)
x = tensor.matrix()
chol = cholesky(x)
# Check the default.
ch_f = function([x], chol)
yield check_lower_triangular, pd, ch_f
# Explicit lower-triangular.
chol = Cholesky(lower=True)(x)
ch_f = function([x], chol)
yield check_lower_triangular, pd, ch_f
# Explicit upper-triangular.
chol = Cholesky(lower=False)(x)
ch_f = function([x], chol)
yield check_upper_triangular, pd, ch_f
示例14: compute_chol
def compute_chol(Aip1, Bi, Li, Ci):
Ci = T.dot(Bi.T, Tla.matrix_inverse(Li).T)
Dii = Aip1 - T.dot(Ci, Ci.T)
Lii = Tsla.cholesky(Dii)
return [Lii,Ci]
示例15: test_gpu_cholesky_opt
def test_gpu_cholesky_opt(self):
A = theano.tensor.matrix("A", dtype="float32")
fn = theano.function([A], cholesky(A), mode=mode_with_gpu.excluding('cusolver'))
assert any([isinstance(node.op, GpuMagmaCholesky)
for node in fn.maker.fgraph.toposort()])