当前位置: 首页>>代码示例>>C++>>正文


C++ CFactor::ProcessingStatisticalData方法代码示例

本文整理汇总了C++中CFactor::ProcessingStatisticalData方法的典型用法代码示例。如果您正苦于以下问题:C++ CFactor::ProcessingStatisticalData方法的具体用法?C++ CFactor::ProcessingStatisticalData怎么用?C++ CFactor::ProcessingStatisticalData使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在CFactor的用法示例。


在下文中一共展示了CFactor::ProcessingStatisticalData方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1: Learn

void CMlLearningEngine::Learn()
{
/*
function takes an information from m_pEvidences and learns factors
of graphical model using prior probabilities or not
    */
    float logLikTmp = 0;
    if(!m_pGrModel)
    {
        PNL_THROW( CNULLPointer, "no graphical model")
    }
    CStaticGraphicalModel *grmodel = this->GetStaticModel();
    CFactor *parameter = NULL;
    int numberOfDomains = grmodel -> GetNumberOfFactors();
    
    for( int domainNodes = 0; domainNodes < numberOfDomains; domainNodes++ )
    {
        factor = grmodel->GetFactor( domainNodes );
        factor ->UpdateStatisticsML( &m_Vector_pEvidences.front(), 
            m_Vector_pEvidences.size() );
        PNL_CHECK_LEFT_BORDER(m_numberOfAllEvidences, 1);
        logLikTmp += parameter->ProcessingStatisticalData(m_numberOfAllEvidences);
    }
    switch( grmodel -> GetModelType() )
    {
    case mtBNet:
        {
            break;
        }
    case mtMRF2:
    case mtMNet:
        {
            logLikTmp = _LearnPotentials();
            break;
        }
    default:
        {
            PNL_THROW(CBadConst, "model type" )
                break;
        }
    }
    m_critValue.push_back(logLikTmp);
}
开发者ID:JacobCWard,项目名称:PyPNL,代码行数:43,代码来源:pnlMlLearningEngine.cpp

示例2: Learn


//.........这里部分代码省略.........
                    const int *domain;
                    parameter->GetDomain(&DomainSize, &domain);
                    if (IsDomainObserved(DomainSize, domain, currentEvidNumber))
                    {
                        const CEvidence *pEvidences[] = { pCurrentEvid };
                        parameter->UpdateStatisticsML(pEvidences, 1);
                    }
                    else
                    {
                        pCurrentInfEng->MarginalNodes(domain, DomainSize, 1);
                        const CPotential * pMargPot = pCurrentInfEng->GetQueryJPD();
                        parameter ->UpdateStatisticsEM(pMargPot, pCurrentEvid);
                    }
                }
                else
                {
                    const CEvidence *pEvidences[] = { pCurrentEvid };
                    parameter->UpdateStatisticsML(pEvidences, 1);
                }
            }
            itsML = itsML || !infIsNeed;
        }

        for(domainNodes = 0; domainNodes < numberOfParameters; domainNodes++ )
        {
            parameter = pGrModel->GetFactor(domainNodes);
            
            CNumericDenseMatrix<float> *matForSending;
            int matDim;
            const int *pMatRanges;
            int dataLength;
            const float *pDataForSending;

            matForSending = static_cast<CNumericDenseMatrix<float>*>
                ((parameter->GetDistribFun())->GetStatisticalMatrix(stMatTable));

            matForSending->GetRanges(&matDim, &pMatRanges);
            matForSending->GetRawData(&dataLength, &pDataForSending);
            float *pDataRecv = new float[dataLength];
            float *pDataRecv_copy = new float[dataLength];
            MPI_Status status;

            MPI_Allreduce((void*)pDataForSending, pDataRecv, dataLength, MPI_FLOAT, MPI_SUM,
                MPI_COMM_WORLD);

            CNumericDenseMatrix<float> *RecvMatrix =
                static_cast<CNumericDenseMatrix<float>*>
                (parameter->GetDistribFun()->GetStatisticalMatrix(stMatTable));
            int dataLength_new;
            float *pData_new;
            RecvMatrix->GetRawData(&dataLength_new, (const float**)(&pData_new));
            for(int t=0;t<dataLength_new;t++)
                pData_new[t]=pDataRecv[t];
        }
        switch (pGrModel->GetModelType())
        {
        case mtBNet:
            {
                loglikOld = loglik;
                loglik = 0.0f;
                for(domainNodes = 0; domainNodes < numberOfParameters; domainNodes++)
                {
                    parameter = pGrModel->GetFactor(domainNodes);
                    loglik += parameter->ProcessingStatisticalData(m_numberOfAllEvidences);
                }
                break;
            }
        case mtMRF2:
        case mtMNet:
            {
                loglikOld = loglik;
                loglik = _LearnPotentials();
                break;
            }
        default:
            {
                PNL_THROW(CBadConst, "model type")
                    break;
            }
        }

        stopExpression = 
            float(fabs(2 * (loglikOld - loglik) / (loglikOld + loglik)));
        exit = ((stopExpression > epsilon) && (iteration <= GetMaxIterEM())) && !itsML;
        if(exit)
        {
            ClearStatisticData();
        }

        delete pCurrentInfEng;
        pCurrentInfEng = NULL;
    }while(exit);

    if(iteration > GetMaxIterEM())
    {
        PNL_THROW(CNotConverged, "maximum number of iterations")
    }

    SetNumProcEv( GetNumEv() );
}
开发者ID:PyOpenPNL,项目名称:OpenPNL,代码行数:101,代码来源:pnlParEmLearningEngine.cpp

示例3: LearnExtraCPDs

void CEMLearningEngine::LearnExtraCPDs(int nMaxFamily, pCPDVector* additionalCPDs, floatVector* additionalLLs)
{

    CStaticGraphicalModel *pGrModel =  this->GetStaticModel();
    PNL_CHECK_IS_NULL_POINTER(pGrModel);
    PNL_CHECK_LEFT_BORDER(GetNumEv(), 1);
    
    int numberOfFactors = pGrModel->GetNumberOfFactors();
    int numberOfAddFactors = additionalCPDs->size();
    
    additionalLLs->resize(numberOfAddFactors);
    additionalLLs->clear();
    
    m_vFamilyLogLik.resize(numberOfFactors);
    float	loglik = 0.0f, ll;
    int		i, ev;
    int iteration = 0;
    const CEvidence* pEv;
    
    CFactor *factor = NULL;
    int nnodes;
    const int * domain;
    
    bool bInfIsNeed;
    CInfEngine *pInfEng = m_pInfEngine;
    
    if (IsAllObserved())
    {
        for (i = 0; i < numberOfFactors; i++)
        {
            factor = pGrModel->GetFactor(i);
            factor->UpdateStatisticsML(&m_Vector_pEvidences[GetNumberProcEv()], 
                GetNumEv() - GetNumberProcEv());
        }
        
        for( ev = 0; ev < GetNumEv() ; ev++)
        {
            pEv = m_Vector_pEvidences[ev];
            for( i = 0; i < numberOfAddFactors; i++ )
            {
                factor = static_cast<CFactor*>((*additionalCPDs)[i]);
                factor->UpdateStatisticsML( &pEv, 1 );
            }
        }
        
        switch (pGrModel->GetModelType())
        {
        case mtBNet:
            {
                for( i = 0; i<numberOfFactors; i++ )
                {
                    factor = pGrModel->GetFactor(i);
                    ll = factor->ProcessingStatisticalData( GetNumEv());
                    m_vFamilyLogLik[i] = ll;
                    loglik += ll;
                }
                
                for( i = 0; i < numberOfAddFactors; i++ )
                {
                    factor = static_cast<CFactor*>((*additionalCPDs)[i]);
                    ll = factor->ProcessingStatisticalData( GetNumEv());
                    (*additionalLLs)[i] = ll;
                }
                break;
            }
        case mtMRF2:
        case mtMNet:
            {	
                break;
            }
        default:
            {
                PNL_THROW(CBadConst, "model type" )
                    break;
            }
        }
        m_critValue.push_back(loglik);    
        
    }
    else
    {
开发者ID:JacobCWard,项目名称:PyPNL,代码行数:81,代码来源:pnlEmLearningEngine.cpp

示例4: testSetStatistics

int testSetStatistics()
{
    int ret = TRS_OK;
    float eps = 0.1f;
    
    int seed = pnlTestRandSeed();
    pnlSeed( seed );   
            
    CBNet *pBNet = pnlExCreateCondGaussArBNet();
    CModelDomain *pMD = pBNet->GetModelDomain();

    
    CGraph *pGraph = CGraph::Copy(pBNet->GetGraph());
    
    CBNet *pBNet1 = CBNet::CreateWithRandomMatrices( pGraph, pMD );

    pEvidencesVector evidences;
    int nEvidences = pnlRand( 3000, 4000);
    
    pBNet->GenerateSamples( &evidences, nEvidences );
   
    
    int i;
    for( i = 0; i < nEvidences; i++)
    {
	
	//evidences[i]->MakeNodeHiddenBySerialNum(0);
    }
    

    CEMLearningEngine *pLearn = CEMLearningEngine::Create(pBNet1);
    pLearn->SetData( nEvidences, &evidences.front() );
    pLearn->SetMaxIterEM();
    pLearn->Learn();

    for( i = 0; i < pBNet->GetNumberOfFactors(); i++ )
    {
	if( ! pBNet->GetFactor(i)->IsFactorsDistribFunEqual(pBNet1->GetFactor(i), eps))
	{
	    ret = TRS_FAIL;
	    pBNet->GetFactor(i)->GetDistribFun()->Dump();
	    pBNet1->GetFactor(i)->GetDistribFun()->Dump();

	}
    }
    
    CDistribFun *pDistr;
    const CMatrix<float>* pMat;
    CFactor *pCPD;
    
    pDistr = pBNet1->GetFactor(0)->GetDistribFun();
    pMat = pDistr->GetStatisticalMatrix(stMatTable);
    
    pCPD = pBNet->GetFactor(0);
    pCPD->SetStatistics(pMat, stMatTable);
    pCPD->ProcessingStatisticalData(nEvidences);
    if( ! pCPD->IsFactorsDistribFunEqual(pBNet1->GetFactor(0), 0.0001f) )
    {
	ret = TRS_FAIL;
    }
    

    pDistr = pBNet1->GetFactor(1)->GetDistribFun();
    
    int parentVal;
    pCPD = pBNet->GetFactor(1);
    
    parentVal = 0;

    pCPD->SetStatistics(pMat, stMatCoeff);

    pMat = pDistr->GetStatisticalMatrix(stMatMu, &parentVal);
    pCPD->SetStatistics(pMat, stMatMu, &parentVal);
    
    
    pMat = pDistr->GetStatisticalMatrix(stMatSigma, &parentVal);
    pCPD->SetStatistics(pMat, stMatSigma, &parentVal);
    
    parentVal = 1;
    
    pMat = pDistr->GetStatisticalMatrix(stMatMu, &parentVal);
    pCPD->SetStatistics(pMat, stMatMu, &parentVal);
    
    
    pMat = pDistr->GetStatisticalMatrix(stMatSigma, &parentVal);
    pCPD->SetStatistics(pMat, stMatSigma, &parentVal);

    pCPD->ProcessingStatisticalData(nEvidences);
    
    if( ! pCPD->IsFactorsDistribFunEqual(pBNet1->GetFactor(1), eps) )
    {
	ret = TRS_FAIL;
    }
    
    
    for( i = 0; i < nEvidences; i++)
    {
	delete evidences[i];
    }
    delete pLearn;
//.........这里部分代码省略.........
开发者ID:JacobCWard,项目名称:PyPNL,代码行数:101,代码来源:ASetStatistics.cpp


注:本文中的CFactor::ProcessingStatisticalData方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。