本文整理汇总了C++中BayesNet::GenerateEvidences方法的典型用法代码示例。如果您正苦于以下问题:C++ BayesNet::GenerateEvidences方法的具体用法?C++ BayesNet::GenerateEvidences怎么用?C++ BayesNet::GenerateEvidences使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类BayesNet
的用法示例。
在下文中一共展示了BayesNet::GenerateEvidences方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: TestCondSoftMaxParamLearning
void TestCondSoftMaxParamLearning(bool DeleteNet)
{
// BayesNet *net = SimpleCondSoftMaxModel();
BayesNet *netToLearn = SimpleCondSoftMaxModel();
float eps = 1e-1f;
int nEvid = 100;
netToLearn->GenerateEvidences(nEvid);
netToLearn->LearnParameters();
String nodes[] = {"node0", "node1", "node2"};
/* int i, j;
TokArr LearnParam, Param;
for(i = 0; i < 3; i++)
{
LearnParam = netToLearn->GetGaussianMean(nodes[i]);
Param = net->GetGaussianMean(nodes[i]);
if(LearnParam[0].fload.size() != Param[0].fload.size())
{
PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
}
for(j = 0; j < LearnParam[0].fload.size(); j++)
{
if( LearnParam[0].FltValue(j).fl - Param[0].FltValue(j).fl > eps)
{
PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
}
}
LearnParam = netToLearn->GetGaussianCovar(nodes[i]);
Param = net->GetGaussianCovar(nodes[i]);
if(LearnParam[0].fload.size() != Param[0].fload.size())
{
PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
}
for(j = 0; j < LearnParam[0].fload.size(); j++)
{
if( LearnParam[0].FltValue(j).fl - Param[0].FltValue(j).fl > eps)
{
PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
}
}
}
*/
if (DeleteNet)
{
delete netToLearn;
};
std::cout << "TestCondSoftMaxParamLearning is completed successfully" << std::endl;
}
示例2: TestCondGaussianParamLearning
void TestsPnlHigh::TestCondGaussianParamLearning()
{
BayesNet *net = SimpleCGM2();
BayesNet *netToLearn = SimpleCGM2();
float eps = 1e-2f;
int nEvidences = 5000;
netToLearn->GenerateEvidences(nEvidences);
netToLearn->LearnParameters();
/*
int nNodes = netToLearn->Net().Graph().nNode();
//Checking step
int i;
int j;
TokArr LearnParam, Param;
for (i = 0; i < nNodes; i++)
{
//if it is gaussian without tabular parents
if (true)
{
LearnParam = netToLearn->GetGaussianMean(nodes[i]);
Param = net->GetGaussianMean(nodes[i]);
if(LearnParam[0].fload.size() != Param[0].fload.size())
{
PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
};
for(j = 0; j < LearnParam[0].fload.size(); j++)
{
if( LearnParam[0].FltValue(j).fl - Param[0].FltValue(j).fl > eps)
{
PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
}
};
LearnParam = netToLearn->GetGaussianCovar(nodes[i]);
Param = net->GetGaussianCovar(nodes[i]);
if(LearnParam[0].fload.size() != Param[0].fload.size())
{
PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
};
for(j = 0; j < LearnParam[0].fload.size(); j++)
{
if( LearnParam[0].FltValue(j).fl - Param[0].FltValue(j).fl > eps)
{
PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
}
};
TokArr parents = netToLearn->GetParents(nodes[i]);
LearnParam = netToLearn->GetGaussianWeights(nodes[i], parents);
Param = net->GetGaussianWeights(nodes[i], parents);
if(LearnParam[0].fload.size() != Param[0].fload.size())
{
PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
};
for(j = 0; j < LearnParam[0].fload.size(); j++)
{
if( LearnParam[0].FltValue(j).fl - Param[0].FltValue(j).fl > eps)
{
PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
}
};
};
};
*/
delete net;
delete netToLearn;
}