本文整理汇总了Python中bayespy.nodes.Mixture._message_to_parent方法的典型用法代码示例。如果您正苦于以下问题:Python Mixture._message_to_parent方法的具体用法?Python Mixture._message_to_parent怎么用?Python Mixture._message_to_parent使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类bayespy.nodes.Mixture
的用法示例。
在下文中一共展示了Mixture._message_to_parent方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_message_to_parent
# 需要导入模块: from bayespy.nodes import Mixture [as 别名]
# 或者: from bayespy.nodes.Mixture import _message_to_parent [as 别名]
def test_message_to_parent(self):
"""
Test the message to parents of Mixture node.
"""
K = 3
# Broadcasting the moments on the cluster axis
Mu = GaussianARD(2, 1,
ndim=0,
plates=(K,))
(mu, mumu) = Mu._message_to_child()
Alpha = Gamma(3, 1,
plates=(K,))
(alpha, logalpha) = Alpha._message_to_child()
z = Categorical(np.ones(K)/K)
X = Mixture(z, GaussianARD, Mu, Alpha)
tau = 4
Y = GaussianARD(X, tau)
y = 5
Y.observe(y)
(x, xx) = X._message_to_child()
m = X._message_to_parent(0)
self.assertAllClose(m[0],
random.gaussian_logpdf(xx*alpha,
x*alpha*mu,
mumu*alpha,
logalpha,
0))
m = X._message_to_parent(1)
self.assertAllClose(m[0],
1/K * (alpha*x) * np.ones(3))
self.assertAllClose(m[1],
-0.5 * 1/K * alpha * np.ones(3))
# Some parameters do not have cluster plate axis
Mu = GaussianARD(2, 1,
ndim=0,
plates=(K,))
(mu, mumu) = Mu._message_to_child()
Alpha = Gamma(3, 1) # Note: no cluster plate axis!
(alpha, logalpha) = Alpha._message_to_child()
z = Categorical(np.ones(K)/K)
X = Mixture(z, GaussianARD, Mu, Alpha)
tau = 4
Y = GaussianARD(X, tau)
y = 5
Y.observe(y)
(x, xx) = X._message_to_child()
m = X._message_to_parent(0)
self.assertAllClose(m[0],
random.gaussian_logpdf(xx*alpha,
x*alpha*mu,
mumu*alpha,
logalpha,
0))
m = X._message_to_parent(1)
self.assertAllClose(m[0],
1/K * (alpha*x) * np.ones(3))
self.assertAllClose(m[1],
-0.5 * 1/K * alpha * np.ones(3))
# Cluster assignments do not have as many plate axes as parameters.
M = 2
Mu = GaussianARD(2, 1,
ndim=0,
plates=(K,M))
(mu, mumu) = Mu._message_to_child()
Alpha = Gamma(3, 1,
plates=(K,M))
(alpha, logalpha) = Alpha._message_to_child()
z = Categorical(np.ones(K)/K)
X = Mixture(z, GaussianARD, Mu, Alpha, cluster_plate=-2)
tau = 4
Y = GaussianARD(X, tau)
y = 5 * np.ones(M)
Y.observe(y)
(x, xx) = X._message_to_child()
m = X._message_to_parent(0)
self.assertAllClose(m[0]*np.ones(K),
np.sum(random.gaussian_logpdf(xx*alpha,
x*alpha*mu,
mumu*alpha,
logalpha,
0) *
np.ones((K,M)),
axis=-1))
m = X._message_to_parent(1)
self.assertAllClose(m[0] * np.ones((K,M)),
1/K * (alpha*x) * np.ones((K,M)))
self.assertAllClose(m[1] * np.ones((K,M)),
-0.5 * 1/K * alpha * np.ones((K,M)))
pass