本文整理汇总了Python中bayespy.nodes.GaussianARD._message_to_child方法的典型用法代码示例。如果您正苦于以下问题:Python GaussianARD._message_to_child方法的具体用法?Python GaussianARD._message_to_child怎么用?Python GaussianARD._message_to_child使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类bayespy.nodes.GaussianARD
的用法示例。
在下文中一共展示了GaussianARD._message_to_child方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_initialization
# 需要导入模块: from bayespy.nodes import GaussianARD [as 别名]
# 或者: from bayespy.nodes.GaussianARD import _message_to_child [as 别名]
def test_initialization(self):
"""
Test initialization methods of GaussianARD
"""
X = GaussianARD(1, 2, shape=(2,), plates=(3,))
# Prior initialization
mu = 1 * np.ones((3, 2))
alpha = 2 * np.ones((3, 2))
X.initialize_from_prior()
u = X._message_to_child()
self.assertAllClose(u[0]*np.ones((3,2)),
mu)
self.assertAllClose(u[1]*np.ones((3,2,2)),
linalg.outer(mu, mu, ndim=1) +
misc.diag(1/alpha, ndim=1))
# Parameter initialization
mu = np.random.randn(3, 2)
alpha = np.random.rand(3, 2)
X.initialize_from_parameters(mu, alpha)
u = X._message_to_child()
self.assertAllClose(u[0], mu)
self.assertAllClose(u[1], linalg.outer(mu, mu, ndim=1) +
misc.diag(1/alpha, ndim=1))
# Value initialization
x = np.random.randn(3, 2)
X.initialize_from_value(x)
u = X._message_to_child()
self.assertAllClose(u[0], x)
self.assertAllClose(u[1], linalg.outer(x, x, ndim=1))
# Random initialization
X.initialize_from_random()
pass
示例2: test_message_to_child
# 需要导入模块: from bayespy.nodes import GaussianARD [as 别名]
# 或者: from bayespy.nodes.GaussianARD import _message_to_child [as 别名]
def test_message_to_child(self):
"""
Test moments of GaussianARD.
"""
# Check that moments have full shape when broadcasting
X = GaussianARD(np.zeros((2,)),
np.ones((3,2)),
shape=(4,3,2))
(u0, u1) = X._message_to_child()
self.assertEqual(np.shape(u0),
(4,3,2))
self.assertEqual(np.shape(u1),
(4,3,2,4,3,2))
# Check the formula
X = GaussianARD(2, 3)
(u0, u1) = X._message_to_child()
self.assertAllClose(u0, 2)
self.assertAllClose(u1, 2**2 + 1/3)
# Check the formula for multidimensional arrays
X = GaussianARD(2*np.ones((2,1,4)),
3*np.ones((2,3,1)),
ndim=3)
(u0, u1) = X._message_to_child()
self.assertAllClose(u0, 2*np.ones((2,3,4)))
self.assertAllClose(u1,
2**2 * np.ones((2,3,4,2,3,4))
+ 1/3 * misc.identity(2,3,4))
# Check the formula for dim-broadcasted mu
X = GaussianARD(2*np.ones((3,1)),
3*np.ones((2,3,4)),
ndim=3)
(u0, u1) = X._message_to_child()
self.assertAllClose(u0, 2*np.ones((2,3,4)))
self.assertAllClose(u1,
2**2 * np.ones((2,3,4,2,3,4))
+ 1/3 * misc.identity(2,3,4))
# Check the formula for dim-broadcasted alpha
X = GaussianARD(2*np.ones((2,3,4)),
3*np.ones((3,1)),
ndim=3)
(u0, u1) = X._message_to_child()
self.assertAllClose(u0, 2*np.ones((2,3,4)))
self.assertAllClose(u1,
2**2 * np.ones((2,3,4,2,3,4))
+ 1/3 * misc.identity(2,3,4))
# Check the formula for dim-broadcasted mu and alpha
X = GaussianARD(2*np.ones((3,1)),
3*np.ones((3,1)),
shape=(2,3,4))
(u0, u1) = X._message_to_child()
self.assertAllClose(u0, 2*np.ones((2,3,4)))
self.assertAllClose(u1,
2**2 * np.ones((2,3,4,2,3,4))
+ 1/3 * misc.identity(2,3,4))
# Check the formula for dim-broadcasted mu with plates
mu = GaussianARD(2*np.ones((5,1,3,4)),
np.ones((5,1,3,4)),
shape=(3,4),
plates=(5,1))
X = GaussianARD(mu,
3*np.ones((5,2,3,4)),
shape=(2,3,4),
plates=(5,))
(u0, u1) = X._message_to_child()
self.assertAllClose(u0, 2*np.ones((5,2,3,4)))
self.assertAllClose(u1,
2**2 * np.ones((5,2,3,4,2,3,4))
+ 1/3 * misc.identity(2,3,4))
# Check posterior
X = GaussianARD(2, 3)
Y = GaussianARD(X, 1)
Y.observe(10)
X.update()
(u0, u1) = X._message_to_child()
self.assertAllClose(u0,
1/(3+1) * (3*2 + 1*10))
self.assertAllClose(u1,
(1/(3+1) * (3*2 + 1*10))**2 + 1/(3+1))
pass
示例3: test_message_to_parent
# 需要导入模块: from bayespy.nodes import GaussianARD [as 别名]
# 或者: from bayespy.nodes.GaussianARD import _message_to_child [as 别名]
def test_message_to_parent(self):
"""
Test the message to parents of Mixture node.
"""
K = 3
# Broadcasting the moments on the cluster axis
Mu = GaussianARD(2, 1,
ndim=0,
plates=(K,))
(mu, mumu) = Mu._message_to_child()
Alpha = Gamma(3, 1,
plates=(K,))
(alpha, logalpha) = Alpha._message_to_child()
z = Categorical(np.ones(K)/K)
X = Mixture(z, GaussianARD, Mu, Alpha)
tau = 4
Y = GaussianARD(X, tau)
y = 5
Y.observe(y)
(x, xx) = X._message_to_child()
m = z._message_from_children()
self.assertAllClose(m[0] * np.ones(K),
random.gaussian_logpdf(xx*alpha,
x*alpha*mu,
mumu*alpha,
logalpha,
0)
* np.ones(K))
m = Mu._message_from_children()
self.assertAllClose(m[0],
1/K * (alpha*x) * np.ones(3))
self.assertAllClose(m[1],
-0.5 * 1/K * alpha * np.ones(3))
# Some parameters do not have cluster plate axis
Mu = GaussianARD(2, 1,
ndim=0,
plates=(K,))
(mu, mumu) = Mu._message_to_child()
Alpha = Gamma(3, 1) # Note: no cluster plate axis!
(alpha, logalpha) = Alpha._message_to_child()
z = Categorical(np.ones(K)/K)
X = Mixture(z, GaussianARD, Mu, Alpha)
tau = 4
Y = GaussianARD(X, tau)
y = 5
Y.observe(y)
(x, xx) = X._message_to_child()
m = z._message_from_children()
self.assertAllClose(m[0] * np.ones(K),
random.gaussian_logpdf(xx*alpha,
x*alpha*mu,
mumu*alpha,
logalpha,
0)
* np.ones(K))
m = Mu._message_from_children()
self.assertAllClose(m[0],
1/K * (alpha*x) * np.ones(3))
self.assertAllClose(m[1],
-0.5 * 1/K * alpha * np.ones(3))
# Cluster assignments do not have as many plate axes as parameters.
M = 2
Mu = GaussianARD(2, 1,
ndim=0,
plates=(K,M))
(mu, mumu) = Mu._message_to_child()
Alpha = Gamma(3, 1,
plates=(K,M))
(alpha, logalpha) = Alpha._message_to_child()
z = Categorical(np.ones(K)/K)
X = Mixture(z, GaussianARD, Mu, Alpha, cluster_plate=-2)
tau = 4
Y = GaussianARD(X, tau)
y = 5 * np.ones(M)
Y.observe(y)
(x, xx) = X._message_to_child()
m = z._message_from_children()
self.assertAllClose(m[0]*np.ones(K),
np.sum(random.gaussian_logpdf(xx*alpha,
x*alpha*mu,
mumu*alpha,
logalpha,
0) *
np.ones((K,M)),
axis=-1))
m = Mu._message_from_children()
self.assertAllClose(m[0] * np.ones((K,M)),
1/K * (alpha*x) * np.ones((K,M)))
self.assertAllClose(m[1] * np.ones((K,M)),
-0.5 * 1/K * alpha * np.ones((K,M)))
# Mixed distribution broadcasts g
# This tests for a found bug. The bug caused an error.
#.........这里部分代码省略.........
示例4: test_message_to_parent
# 需要导入模块: from bayespy.nodes import GaussianARD [as 别名]
# 或者: from bayespy.nodes.GaussianARD import _message_to_child [as 别名]
def test_message_to_parent(self):
"""
Test the message to parents of Mixture node.
"""
K = 3
# Broadcasting the moments on the cluster axis
Mu = GaussianARD(2, 1,
ndim=0,
plates=(K,))
(mu, mumu) = Mu._message_to_child()
Alpha = Gamma(3, 1,
plates=(K,))
(alpha, logalpha) = Alpha._message_to_child()
z = Categorical(np.ones(K)/K)
X = Mixture(z, GaussianARD, Mu, Alpha)
tau = 4
Y = GaussianARD(X, tau)
y = 5
Y.observe(y)
(x, xx) = X._message_to_child()
m = z._message_from_children()
self.assertAllClose(m[0] * np.ones(K),
random.gaussian_logpdf(xx*alpha,
x*alpha*mu,
mumu*alpha,
logalpha,
0)
* np.ones(K))
m = Mu._message_from_children()
self.assertAllClose(m[0],
1/K * (alpha*x) * np.ones(3))
self.assertAllClose(m[1],
-0.5 * 1/K * alpha * np.ones(3))
# Some parameters do not have cluster plate axis
Mu = GaussianARD(2, 1,
ndim=0,
plates=(K,))
(mu, mumu) = Mu._message_to_child()
Alpha = Gamma(3, 1) # Note: no cluster plate axis!
(alpha, logalpha) = Alpha._message_to_child()
z = Categorical(np.ones(K)/K)
X = Mixture(z, GaussianARD, Mu, Alpha)
tau = 4
Y = GaussianARD(X, tau)
y = 5
Y.observe(y)
(x, xx) = X._message_to_child()
m = z._message_from_children()
self.assertAllClose(m[0] * np.ones(K),
random.gaussian_logpdf(xx*alpha,
x*alpha*mu,
mumu*alpha,
logalpha,
0)
* np.ones(K))
m = Mu._message_from_children()
self.assertAllClose(m[0],
1/K * (alpha*x) * np.ones(3))
self.assertAllClose(m[1],
-0.5 * 1/K * alpha * np.ones(3))
# Cluster assignments do not have as many plate axes as parameters.
M = 2
Mu = GaussianARD(2, 1,
ndim=0,
plates=(K,M))
(mu, mumu) = Mu._message_to_child()
Alpha = Gamma(3, 1,
plates=(K,M))
(alpha, logalpha) = Alpha._message_to_child()
z = Categorical(np.ones(K)/K)
X = Mixture(z, GaussianARD, Mu, Alpha, cluster_plate=-2)
tau = 4
Y = GaussianARD(X, tau)
y = 5 * np.ones(M)
Y.observe(y)
(x, xx) = X._message_to_child()
m = z._message_from_children()
self.assertAllClose(m[0]*np.ones(K),
np.sum(random.gaussian_logpdf(xx*alpha,
x*alpha*mu,
mumu*alpha,
logalpha,
0) *
np.ones((K,M)),
axis=-1))
m = Mu._message_from_children()
self.assertAllClose(m[0] * np.ones((K,M)),
1/K * (alpha*x) * np.ones((K,M)))
self.assertAllClose(m[1] * np.ones((K,M)),
-0.5 * 1/K * alpha * np.ones((K,M)))
# Mixed distribution broadcasts g
# This tests for a found bug. The bug caused an error.
#.........这里部分代码省略.........
示例5: test_message_to_parent
# 需要导入模块: from bayespy.nodes import GaussianARD [as 别名]
# 或者: from bayespy.nodes.GaussianARD import _message_to_child [as 别名]
def test_message_to_parent(self):
"""
Test the message to parents of Mixture node.
"""
K = 3
# Broadcasting the moments on the cluster axis
Mu = GaussianARD(2, 1,
ndim=0,
plates=(K,))
(mu, mumu) = Mu._message_to_child()
Alpha = Gamma(3, 1,
plates=(K,))
(alpha, logalpha) = Alpha._message_to_child()
z = Categorical(np.ones(K)/K)
X = Mixture(z, GaussianARD, Mu, Alpha)
tau = 4
Y = GaussianARD(X, tau)
y = 5
Y.observe(y)
(x, xx) = X._message_to_child()
m = X._message_to_parent(0)
self.assertAllClose(m[0],
random.gaussian_logpdf(xx*alpha,
x*alpha*mu,
mumu*alpha,
logalpha,
0))
m = X._message_to_parent(1)
self.assertAllClose(m[0],
1/K * (alpha*x) * np.ones(3))
self.assertAllClose(m[1],
-0.5 * 1/K * alpha * np.ones(3))
# Some parameters do not have cluster plate axis
Mu = GaussianARD(2, 1,
ndim=0,
plates=(K,))
(mu, mumu) = Mu._message_to_child()
Alpha = Gamma(3, 1) # Note: no cluster plate axis!
(alpha, logalpha) = Alpha._message_to_child()
z = Categorical(np.ones(K)/K)
X = Mixture(z, GaussianARD, Mu, Alpha)
tau = 4
Y = GaussianARD(X, tau)
y = 5
Y.observe(y)
(x, xx) = X._message_to_child()
m = X._message_to_parent(0)
self.assertAllClose(m[0],
random.gaussian_logpdf(xx*alpha,
x*alpha*mu,
mumu*alpha,
logalpha,
0))
m = X._message_to_parent(1)
self.assertAllClose(m[0],
1/K * (alpha*x) * np.ones(3))
self.assertAllClose(m[1],
-0.5 * 1/K * alpha * np.ones(3))
# Cluster assignments do not have as many plate axes as parameters.
M = 2
Mu = GaussianARD(2, 1,
ndim=0,
plates=(K,M))
(mu, mumu) = Mu._message_to_child()
Alpha = Gamma(3, 1,
plates=(K,M))
(alpha, logalpha) = Alpha._message_to_child()
z = Categorical(np.ones(K)/K)
X = Mixture(z, GaussianARD, Mu, Alpha, cluster_plate=-2)
tau = 4
Y = GaussianARD(X, tau)
y = 5 * np.ones(M)
Y.observe(y)
(x, xx) = X._message_to_child()
m = X._message_to_parent(0)
self.assertAllClose(m[0]*np.ones(K),
np.sum(random.gaussian_logpdf(xx*alpha,
x*alpha*mu,
mumu*alpha,
logalpha,
0) *
np.ones((K,M)),
axis=-1))
m = X._message_to_parent(1)
self.assertAllClose(m[0] * np.ones((K,M)),
1/K * (alpha*x) * np.ones((K,M)))
self.assertAllClose(m[1] * np.ones((K,M)),
-0.5 * 1/K * alpha * np.ones((K,M)))
pass