当前位置: 首页>>代码示例>>Python>>正文


Python nodes.Mixture类代码示例

本文整理汇总了Python中bayespy.nodes.Mixture的典型用法代码示例。如果您正苦于以下问题:Python Mixture类的具体用法?Python Mixture怎么用?Python Mixture使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了Mixture类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_mask_to_parent

    def test_mask_to_parent(self):
        """
        Test the mask handling in Mixture node
        """

        K = 3
        Z = Categorical(np.ones(K)/K,
                        plates=(4,5))
        Mu = GaussianARD(0, 1,
                         shape=(2,),
                         plates=(4,K,5))
        Alpha = Gamma(1, 1,
                      plates=(4,K,5,2))
        X = Mixture(Z, GaussianARD, Mu, Alpha, cluster_plate=-2)
        Y = GaussianARD(X, 1)
        mask = np.reshape((np.mod(np.arange(4*5), 2) == 0),
                          (4,5))
        Y.observe(np.ones((4,5,2)), 
                  mask=mask)
        self.assertArrayEqual(X._mask_to_parent(0),
                              mask)
        self.assertArrayEqual(X._mask_to_parent(1),
                              mask[:,None,:])
        self.assertArrayEqual(X._mask_to_parent(2),
                              mask[:,None,:,None])
                         
        pass
开发者ID:Sandy4321,项目名称:bayespy,代码行数:27,代码来源:test_mixture.py

示例2: test_mixture

 def test_mixture(self):
     """
     Test mixture of Bernoulli
     """
     P = Mixture([2,0,0], Bernoulli, [0.1, 0.2, 0.3])
     u = P._message_to_child()
     self.assertEqual(len(u), 1)
     self.assertAllClose(u[0], [0.3, 0.1, 0.1])
     pass
开发者ID:BayesianHuman,项目名称:bayespy,代码行数:9,代码来源:test_bernoulli.py

示例3: test_mixture

    def test_mixture(self):
        """
        Test binomial mixture
        """

        X = Mixture(2, Binomial, 10, [0.1, 0.2, 0.3, 0.4])
        u = X._message_to_child()
        self.assertAllClose(u[0], 3.0)

        pass
开发者ID:zehsilva,项目名称:bayespy,代码行数:10,代码来源:test_binomial.py

示例4: test_lowerbound

    def test_lowerbound(self):
        """
        Test log likelihood lower bound for Mixture node
        """

        # Mixed distribution broadcasts g
        # This tests for a found bug. The bug caused an error.
        Z = Categorical([0.3, 0.5, 0.2])
        X = Mixture(Z, Categorical, [[0.2,0.8], [0.1,0.9], [0.3,0.7]])
        X.lower_bound_contribution()
        
        pass
开发者ID:chagge,项目名称:bayespy,代码行数:12,代码来源:test_mixture.py

示例5: test_random

    def test_random(self):
        """
        Test random sampling of mixture node
        """

        o = 1e-20
        X = Mixture([1, 0, 2], Categorical, [ [o, o, o, 1],
                                              [o, o, 1, o],
                                              [1, o, o, o] ])
        x = X.random()
        self.assertAllClose(x, [2, 3, 0])

        pass
开发者ID:chagge,项目名称:bayespy,代码行数:13,代码来源:test_mixture.py

示例6: test_mixture

    def test_mixture(self):
        """
        Test multinomial mixture
        """

        p0 = [0.1, 0.5, 0.2, 0.2]
        p1 = [0.5, 0.1, 0.1, 0.3]
        p2 = [0.3, 0.2, 0.1, 0.4]
        X = Mixture(2, Multinomial, 10, [p0, p1, p2])
        u = X._message_to_child()
        self.assertAllClose(u[0],
                            10*np.array(p2))

        pass
开发者ID:buptpriswang,项目名称:bayespy,代码行数:14,代码来源:test_multinomial.py

示例7: test_message_to_child

    def test_message_to_child(self):
        """
        Test the message to child of Mixture node.
        """

        K = 3

        #
        # Estimate moments from parents only
        #

        # Simple case
        mu = GaussianARD([0,2,4], 1,
                         ndim=0,
                         plates=(K,))
        alpha = Gamma(1, 1,
                      plates=(K,))
        z = Categorical(np.ones(K)/K)
        X = Mixture(z, GaussianARD, mu, alpha)
        self.assertEqual(X.plates, ())
        self.assertEqual(X.dims, ( (), () ))
        u = X._message_to_child()
        self.assertAllClose(u[0],
                            2)
        self.assertAllClose(u[1],
                            2**2+1)

        # Broadcasting the moments on the cluster axis
        mu = GaussianARD(2, 1,
                         ndim=0,
                         plates=(K,))
        alpha = Gamma(1, 1,
                      plates=(K,))
        z = Categorical(np.ones(K)/K)
        X = Mixture(z, GaussianARD, mu, alpha)
        self.assertEqual(X.plates, ())
        self.assertEqual(X.dims, ( (), () ))
        u = X._message_to_child()
        self.assertAllClose(u[0],
                            2)
        self.assertAllClose(u[1],
                            2**2+1)

        #
        # Estimate moments with observed children
        #
        
        pass
开发者ID:chagge,项目名称:bayespy,代码行数:48,代码来源:test_mixture.py

示例8: test_gaussian_mixture_plot

def test_gaussian_mixture_plot():
    """
    Test the gaussian_mixture plotting function.

    The code is from http://www.bayespy.org/examples/gmm.html
    """
    np.random.seed(1)
    y0 = np.random.multivariate_normal([0, 0], [[1, 0], [0, 0.02]], size=50)
    y1 = np.random.multivariate_normal([0, 0], [[0.02, 0], [0, 1]], size=50)
    y2 = np.random.multivariate_normal([2, 2], [[1, -0.9], [-0.9, 1]], size=50)
    y3 = np.random.multivariate_normal([-2, -2], [[0.1, 0], [0, 0.1]], size=50)
    y = np.vstack([y0, y1, y2, y3])

    bpplt.pyplot.plot(y[:,0], y[:,1], 'rx')

    N = 200
    D = 2
    K = 10

    alpha = Dirichlet(1e-5*np.ones(K),
                      name='alpha')
    Z = Categorical(alpha,
                    plates=(N,),
                    name='z')

    mu = Gaussian(np.zeros(D), 1e-5*np.identity(D),
                  plates=(K,),
                  name='mu')
    Lambda = Wishart(D, 1e-5*np.identity(D),
                     plates=(K,),
                     name='Lambda')

    Y = Mixture(Z, Gaussian, mu, Lambda,
                name='Y')
    Z.initialize_from_random()

    Q = VB(Y, mu, Lambda, Z, alpha)
    Y.observe(y)
    Q.update(repeat=1000)

    bpplt.gaussian_mixture_2d(Y, scale=2)

    # Have to define these limits because on some particular environments these
    # may otherwise differ and thus result in an image comparsion failure
    bpplt.pyplot.xlim([-3, 6])
    bpplt.pyplot.ylim([-3, 5])
开发者ID:BayesianHuman,项目名称:bayespy,代码行数:46,代码来源:test_plot.py

示例9: test_deterministic_mappings

    def test_deterministic_mappings(self):
        x = Categorical([0.8, 0.2])
        y = Mixture(
            x,
            Categorical,
            [
                [0.10, 0.90],
                [0.00, 1.00],
            ]
        )

        y.observe(0)
        x.update()
        self.assertAllClose(x.u[0], [1, 0])

        y.observe(1)
        x.update()
        p = np.array([0.8*0.9, 0.2*1.0])
        self.assertAllClose(x.u[0], p / np.sum(p))

        pass
开发者ID:bayespy,项目名称:bayespy,代码行数:21,代码来源:test_mixture.py

示例10: test_nans

    def test_nans(self):
        """
        Test multinomial mixture
        """

        # The probabilities p1 cause problems
        p0 = [0.1, 0.9]
        p1 = [1.0-1e-50, 1e-50]
        Z = Categorical([1-1e-10, 1e-10])
        X = Mixture(Z, Multinomial, 10, [p0, p1])
        u = X._message_to_child()
        self.assertAllClose(u[0],
                            [1, 9])

        p0 = [0.1, 0.9]
        p1 = [1.0-1e-10, 1e-10]
        Z = Categorical([1-1e-50, 1e-50])
        X = Mixture(Z, Multinomial, 10, [p0, p1])
        u = X._message_to_child()
        self.assertAllClose(u[0],
                            [1, 9])
        
        with warnings.catch_warnings():
            warnings.simplefilter("ignore", RuntimeWarning)
            warnings.simplefilter("ignore", UserWarning)
            p0 = [0.1, 0.9]
            p1 = [1.0, 0.0]
            X = Mixture(0, Multinomial, 10, [p0, p1])
            u = X._message_to_child()
            self.assertAllClose(u[0],
                                np.nan*np.ones(2))

        
        pass
开发者ID:chagge,项目名称:bayespy,代码行数:34,代码来源:test_mixture.py

示例11: test_constant

    def test_constant(self):
        """
        Test constant categorical nodes
        """

        # Basic test
        Y = Mixture(2, Gamma, [1, 2, 3], [1, 1, 1])
        u = Y._message_to_child()
        self.assertAllClose(u[0],
                            3/1)

        # Test with one plate axis
        alpha = [[1, 2, 3],
                 [4, 5, 6]]
        Y = Mixture([2, 1], Gamma, alpha, 1)
        u = Y._message_to_child()
        self.assertAllClose(u[0],
                            [3, 5])

        # Test with two plate axes
        alpha = [ [[1, 2, 3],
                   [4, 5, 6]],
                  [[7, 8, 9],
                   [10, 11, 12]] ]
        Y = Mixture([[2, 1], [0, 2]], Gamma, alpha, 1)
        u = Y._message_to_child()
        self.assertAllClose(u[0],
                            [[3, 5],
                             [7, 12]])

        pass
开发者ID:kyloon,项目名称:bayespy,代码行数:31,代码来源:test_categorical.py

示例12: test_gradient

    def test_gradient(self):
        """
        Check the Euclidean gradient of the categorical node
        """

        Z = Categorical([[0.3, 0.5, 0.2], [0.1, 0.6, 0.3]])
        Y = Mixture(Z, Gamma, [2, 3, 4], [5, 6, 7])
        Y.observe([4.2, 0.2])
        def f(x):
            Z.set_parameters([np.reshape(x, Z.get_shape(0))])
            return Z.lower_bound_contribution() + Y.lower_bound_contribution()
        def df(x):
            Z.set_parameters([np.reshape(x, Z.get_shape(0))])
            g = Z.get_riemannian_gradient()
            return Z.get_gradient(g)[0]
        x0 = np.ravel(np.log([[2, 3, 7], [0.1, 3, 1]]))
        self.assertAllClose(
            misc.gradient(f, x0),
            np.ravel(df(x0))
        )

        pass
开发者ID:agile-innovations,项目名称:bayespy,代码行数:22,代码来源:test_categorical.py

示例13: _setup_bernoulli_mixture

def _setup_bernoulli_mixture():
    """
    Setup code for the hinton tests.

    This code is from http://www.bayespy.org/examples/bmm.html
    """
    np.random.seed(1)
    p0 = [0.1, 0.9, 0.1, 0.9, 0.1, 0.9, 0.1, 0.9, 0.1, 0.9]
    p1 = [0.1, 0.1, 0.1, 0.1, 0.1, 0.9, 0.9, 0.9, 0.9, 0.9]
    p2 = [0.9, 0.9, 0.9, 0.9, 0.9, 0.1, 0.1, 0.1, 0.1, 0.1]
    p = np.array([p0, p1, p2])

    z = random.categorical([1/3, 1/3, 1/3], size=100)
    x = random.bernoulli(p[z])
    N = 100
    D = 10
    K = 10

    R = Dirichlet(K*[1e-5],
                  name='R')
    Z = Categorical(R,
                    plates=(N,1),
                    name='Z')

    P = Beta([0.5, 0.5],
             plates=(D,K),
             name='P')

    X = Mixture(Z, Bernoulli, P)

    Q = VB(Z, R, X, P)
    P.initialize_from_random()
    X.observe(x)
    Q.update(repeat=1000)

    return (R,P,Z)
开发者ID:BayesianHuman,项目名称:bayespy,代码行数:36,代码来源:test_plot.py

示例14: test_message_to_parent

    def test_message_to_parent(self):
        """
        Test the message to parents of Mixture node.
        """

        K = 3

        # Broadcasting the moments on the cluster axis
        Mu = GaussianARD(2, 1,
                         ndim=0,
                         plates=(K,))
        (mu, mumu) = Mu._message_to_child()
        Alpha = Gamma(3, 1,
                      plates=(K,))
        (alpha, logalpha) = Alpha._message_to_child()
        z = Categorical(np.ones(K)/K)
        X = Mixture(z, GaussianARD, Mu, Alpha)
        tau = 4
        Y = GaussianARD(X, tau)
        y = 5
        Y.observe(y)
        (x, xx) = X._message_to_child()
        m = z._message_from_children()
        self.assertAllClose(m[0] * np.ones(K),
                            random.gaussian_logpdf(xx*alpha,
                                                   x*alpha*mu,
                                                   mumu*alpha,
                                                   logalpha,
                                                   0)
                            * np.ones(K))
        m = Mu._message_from_children()
        self.assertAllClose(m[0],
                            1/K * (alpha*x) * np.ones(3))
        self.assertAllClose(m[1],
                            -0.5 * 1/K * alpha * np.ones(3))

        # Some parameters do not have cluster plate axis
        Mu = GaussianARD(2, 1,
                         ndim=0,
                         plates=(K,))
        (mu, mumu) = Mu._message_to_child()
        Alpha = Gamma(3, 1) # Note: no cluster plate axis!
        (alpha, logalpha) = Alpha._message_to_child()
        z = Categorical(np.ones(K)/K)
        X = Mixture(z, GaussianARD, Mu, Alpha)
        tau = 4
        Y = GaussianARD(X, tau)
        y = 5
        Y.observe(y)
        (x, xx) = X._message_to_child()
        m = z._message_from_children()
        self.assertAllClose(m[0] * np.ones(K),
                            random.gaussian_logpdf(xx*alpha,
                                                   x*alpha*mu,
                                                   mumu*alpha,
                                                   logalpha,
                                                   0)
                            * np.ones(K))
                                                   
        m = Mu._message_from_children()
        self.assertAllClose(m[0],
                            1/K * (alpha*x) * np.ones(3))
        self.assertAllClose(m[1],
                            -0.5 * 1/K * alpha * np.ones(3))

        # Cluster assignments do not have as many plate axes as parameters.
        M = 2
        Mu = GaussianARD(2, 1,
                         ndim=0,
                         plates=(K,M))
        (mu, mumu) = Mu._message_to_child()
        Alpha = Gamma(3, 1,
                      plates=(K,M))
        (alpha, logalpha) = Alpha._message_to_child()
        z = Categorical(np.ones(K)/K)
        X = Mixture(z, GaussianARD, Mu, Alpha, cluster_plate=-2)
        tau = 4
        Y = GaussianARD(X, tau)
        y = 5 * np.ones(M)
        Y.observe(y)
        (x, xx) = X._message_to_child()
        m = z._message_from_children()
        self.assertAllClose(m[0]*np.ones(K),
                            np.sum(random.gaussian_logpdf(xx*alpha,
                                                          x*alpha*mu,
                                                          mumu*alpha,
                                                          logalpha,
                                                          0) *
                                   np.ones((K,M)),
                                   axis=-1))
                                                   
        m = Mu._message_from_children()
        self.assertAllClose(m[0] * np.ones((K,M)),
                            1/K * (alpha*x) * np.ones((K,M)))
        self.assertAllClose(m[1] * np.ones((K,M)),
                            -0.5 * 1/K * alpha * np.ones((K,M)))
        

        # Mixed distribution broadcasts g
        # This tests for a found bug. The bug caused an error.
#.........这里部分代码省略.........
开发者ID:agile-innovations,项目名称:bayespy,代码行数:101,代码来源:test_mixture.py

示例15: generateCPD

    def generateCPD(self, term): #, X_train, y_train, X_test, y_test, X_validation, y_validation, g_train, g_test, g_validation):
        
        clf = loadClf(self.ontology[term]['name'], self.fold, self.clfName)
        posTrain = sum(clf.y_train == POSTIVE_LABEL)
        negTrain = sum(clf.y_train == NEGATIVE_LABEL)
        totalTrain = posTrain + negTrain

        children = sorted(self.ontology[term]['children'])
        parents = sorted(self.ontology[term]['parents'])


        labels = {l : PRIOR for l in product(*((POSTIVE_LABEL,NEGATIVE_LABEL),)*(len(children)+1))}
        if children:
            childNodes = [self.ontology[child]['node'][self.fold][self.clfName] for child in children]
            for gene,y in zip(clf.g_train, clf.y_train):
                event = []
                for child in children:
                    event.append(POSTIVE_LABEL if gene in self.ontology.associations[child] else NEGATIVE_LABEL)
                event.append(POSTIVE_LABEL if gene in self.ontology.associations[term] else NEGATIVE_LABEL)
                assert (gene in self.ontology.associations[term]) == (y == POSTIVE_LABEL)
                event = tuple(event)

                labels[event] += 1
            def countBoth(event):
                return labels[event[:-1]+(POSTIVE_LABEL,)] + labels[event[:-1]+(NEGATIVE_LABEL,)]
            cprior = PRIOR * (2 ** len(children))
            
            types = [Mixture]*(len(children)-1) + [Categorical]
            mixparams = [i for s in zip(childNodes, types) for i in s]
            cpd = numpy.empty((2,)*(len(children)+1))
            for event, counted in labels.items():
                v=cpd
                for b in event[:-1]:
                    v = v[b]
                    
                hid = event[-1]
                print("Event: ", event)
                if POSTIVE_LABEL not in event[:-1]: # Všichni potomci označeni "ne"
                    v[hid] = counted/countBoth(event)
                    print("Stored %d / %d" % (counted,countBoth(event)))
                else:
                    v[hid] = {POSTIVE_LABEL: 0.99, NEGATIVE_LABEL:0.01}[hid]
                    print("Stored %d : %d" % (hid, v[hid]))


            #print(term,"<-",",".join(children))
            print(cpd)
            #print(labels)

            hidden = Mixture(*mixparams, cpd)
            hidden.params = cpd
            

        else: #No children
            #hidden = DiscreteDistribution({'0': posTrain / totalTrain, '1': negTrain / totalTrain})
            params = (posTrain / totalTrain, negTrain / totalTrain)
            hidden = Categorical(params)
            hidden.params = params

        #print("Hidden node %s:" % term)
        #print(repr(hidden))
        #print([p for p in hidden.parents if isinstance(p, Stochastic)])
        #print(hidden.get_moments())

        conf = clf.conf + PRIOR
        #posTest, negTest = numpy.sum(conf, 1) 
        posTest, negTest = numpy.sum(conf, 0) 
        #print("Confusion matrix:")
        #print(conf)
       
        try:
            assert term != self.ontology.root
            pos_decisions = clf.decision_function(clf.X_test[clf.y_test==POSTIVE_LABEL])
            neg_decisions = clf.decision_function(clf.X_test[clf.y_test==NEGATIVE_LABEL])
            means = [numpy.mean(pos_decisions)], [numpy.mean(neg_decisions)]
            maxprec = 100.0
            precs = [[numpy.min((1/numpy.var(pos_decisions), maxprec))]], [[numpy.min((1/numpy.var(neg_decisions), maxprec))]]
        #else:
        except (ValueError, AssertionError):
            means = [-1.], [1.]
            precs = [[1.]], [[1.]]
        print("Gaussian params:", term, self.ontology[term]['name'], means, precs)
        observed = Mixture(hidden, Gaussian, means, precs)
        #observed = ConditionalProbabilityTable([
        #        ['0', '0', conf[0][0] / posTest], # if term != root else 1.],
        #        ['0', '1', conf[0][1] / posTest], # if term != root else 0.],
        #        ['1', '0', conf[1][0] / negTest], # if term != root else 0.],
        #        ['1', '1', conf[1][1] / negTest]], #if term != root else 1.]],
        #    [hidden.distribution])
        #print("Observed node %s - %s:" % (term, self.ontology[term]['name']))
        #print(repr(observed))
        #print([p for p in observed.parents if isinstance(p, Stochastic)])

        self.ontology[term]['node'][self.fold][self.clfName] = hidden
        #self.ontology[term]['clf'][self.fold][self.clfName] = clf, X_validation, y_validation, g_validation
        assert self.lenValidation is None or self.lenValidation == len(clf.y_validation)
        self.lenValidation = len(clf.y_validation)
        self.allobserved[term] = observed
        self.allhidden[term] = hidden
        self.extranodes.update((p for p in hidden.parents if isinstance(p, Stochastic)))
开发者ID:jachymb,项目名称:DiplomovaPrace,代码行数:100,代码来源:bayesnet.py


注:本文中的bayespy.nodes.Mixture类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。