本文整理汇总了Python中sklearn.mixture.GaussianMixture.score_samples方法的典型用法代码示例。如果您正苦于以下问题:Python GaussianMixture.score_samples方法的具体用法?Python GaussianMixture.score_samples怎么用?Python GaussianMixture.score_samples使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类sklearn.mixture.GaussianMixture
的用法示例。
在下文中一共展示了GaussianMixture.score_samples方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: GaussianMixture1D
# 需要导入模块: from sklearn.mixture import GaussianMixture [as 别名]
# 或者: from sklearn.mixture.GaussianMixture import score_samples [as 别名]
class GaussianMixture1D(object):
"""
Simple class to work with 1D mixtures of Gaussians
Parameters
----------
means : array_like
means of component distributions (default = 0)
sigmas : array_like
standard deviations of component distributions (default = 1)
weights : array_like
weight of component distributions (default = 1)
"""
def __init__(self, means=0, sigmas=1, weights=1):
data = np.array([t for t in np.broadcast(means, sigmas, weights)])
components = data.shape[0]
self._gmm = GaussianMixture(components, covariance_type='spherical')
self._gmm.means_ = data[:, :1]
self._gmm.weights_ = data[:, 2] / data[:, 2].sum()
self._gmm.covariances_ = data[:, 1] ** 2
self._gmm.precisions_cholesky_ = 1 / np.sqrt(self._gmm.covariances_)
self._gmm.fit = None # disable fit method for safety
def sample(self, size):
"""Random sample"""
return self._gmm.sample(size)
def pdf(self, x):
"""Compute probability distribution"""
if x.ndim == 1:
x = x[:, np.newaxis]
logprob = self._gmm.score_samples(x)
return np.exp(logprob)
def pdf_individual(self, x):
"""Compute probability distribution of each component"""
if x.ndim == 1:
x = x[:, np.newaxis]
logprob = self._gmm.score_samples(x)
responsibilities = self._gmm.predict_proba(x)
return responsibilities * np.exp(logprob[:, np.newaxis])
示例2: print
# 需要导入模块: from sklearn.mixture import GaussianMixture [as 别名]
# 或者: from sklearn.mixture.GaussianMixture import score_samples [as 别名]
centers = gmm.means_
covs = gmm.covariances_
print('GMM均值 = \n', centers)
print('GMM方差 = \n', covs)
y_hat = gmm.predict(x)
colors = '#A0FFA0', '#E080A0',
levels = 10
cm = mpl.colors.ListedColormap(colors)
x1_min, x1_max = x[:, 0].min(), x[:, 0].max()
x2_min, x2_max = x[:, 1].min(), x[:, 1].max()
x1_min, x1_max = expand(x1_min, x1_max)
x2_min, x2_max = expand(x2_min, x2_max)
x1, x2 = np.mgrid[x1_min:x1_max:500j, x2_min:x2_max:500j]
grid_test = np.stack((x1.flat, x2.flat), axis=1)
print(gmm.score_samples(grid_test))
grid_hat = -gmm.score_samples(grid_test)
grid_hat = grid_hat.reshape(x1.shape)
plt.figure(figsize=(7, 6), facecolor='w')
ax = plt.subplot(111)
cmesh = plt.pcolormesh(x1, x2, grid_hat, cmap=plt.cm.Spectral)
plt.colorbar(cmesh, shrink=0.9)
CS = plt.contour(x1, x2, grid_hat, levels=np.logspace(0, 2, num=levels, base=10), colors='w', linewidths=1)
plt.clabel(CS, fontsize=9, inline=True, fmt='%.1f')
plt.scatter(x[:, 0], x[:, 1], s=30, c=y, cmap=cm, marker='o', edgecolors='#202020')
for i, cc in enumerate(zip(centers, covs)):
center, cov = cc
value, vector = sp.linalg.eigh(cov)
width, height = value[0], value[1]
v = vector[0] / sp.linalg.norm(vector[0])