本文整理汇总了C++中BayesNet类的典型用法代码示例。如果您正苦于以下问题:C++ BayesNet类的具体用法?C++ BayesNet怎么用?C++ BayesNet使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了BayesNet类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: printf
void TestsPnlHigh::TestGetDiscreteParentValuesIndexes()
{
printf("TestGetDiscreteParentValuesIndexes\n");
BayesNet *net = SimpleCGM1();
net->SetPGaussian("Cont1", "0.0", "2.5", "1.0 3.0", "Tab0^State0");
net->SetPGaussian("Cont1", "-1.5", "0.75", "0.5 2.5", "Tab0^State1");
WCondGaussianDistribFun *pCGDF = dynamic_cast<WCondGaussianDistribFun *>(net->m_pNet->m_paDistribution->Distribution(1));
TokArr ta("Tab0^State0");
Vector<int> dpInd = pCGDF->GetDiscreteParentValuesIndexes(ta);
if (dpInd.size()!=1)
{
PNL_THROW(pnl::CAlgorithmicException, "Size of dpInd is wrong");
};
if (dpInd[0] != 0)
{
PNL_THROW(pnl::CAlgorithmicException, "GetDiscreteParentValuesIndexes works incorrectly");
};
TokArr ta2("Tab0^State1");
dpInd = pCGDF->GetDiscreteParentValuesIndexes(ta2);
if (dpInd[0] != 1)
{
PNL_THROW(pnl::CAlgorithmicException, "GetDiscreteParentValuesIndexes works incorrectly");
};
delete net;
};
示例2: CrashTestJtreeInferenceSoftMax
void CrashTestJtreeInferenceSoftMax()
{
BayesNet *net = SimpleSoftMaxModel();
net->SetProperty("Inference", "jtree");
TokArr jpd5 = net->GetJPD("node5");
std::cout<< "jpd node5:\t"<<jpd5 << "\n";
delete net;
}
示例3: TestAddArc
void TestAddArc()
{
BayesNet *net = SevenNodesModel();
net->AddArc("node2", "node5");
// net->SetPTabular("node6^True node6^False", "0.2 0.8", "node4^True");
// net->SetPTabular("node6^True node6^False", "0.1 0.9", "node4^False");
// all continuous nodes are observed
net->EditEvidence("node0^0.3");
net->EditEvidence("node1^0.2");
net->EditEvidence("node5^0.9");
net->EditEvidence("node4^True");
net->SetProperty("Inference", "jtree");
TokArr jpd3 = net->GetJPD("node3");
std::cout<< "jpd node3:\t"<<jpd3 << "\n";
TokArr jpd6 = net->GetJPD("node6");
std::cout<< "jpd node6:\t"<<jpd6 << "\n";
TokArr jpd2 = net->GetJPD("node2");
std::cout<< "jpd node2:\t"<<jpd2 << "\n";
delete net;
std::cout << "TestDelArc is completed successfully" << std::endl;
}
示例4: take
BayesNet *BayesNet::loadFromTextFile(const string & filename) {
BayesNet* bn = nullptr;
uint nNodes; // number of nodes in network
vector<uint> nodeSizes; // number of the values each node can take (discrete variables)
vector<vector<uint>> mMapParents; // each node has a set of parent-nodes
vector<vector<double>> mapCpt; // each node has a conditional probability table (mMapCpt)
vector<double> listJointProbabilities; // use for pre-compute all joint probability over network
uint value;
double dValue;
ifstream inf(filename);
if (!inf.is_open()) {
cout << "Open file error!" << "\n";
return nullptr;
}
/// nodeSizes
inf >> nNodes;
for (uint i = 0; i < nNodes; ++i) {
inf >> value;
nodeSizes.push_back(value);
mMapParents.push_back(vector<uint>());
mapCpt.push_back(vector<double>());
}
/// mMapParents
uint nodeIndex, nParents;
for (uint i = 0; i < nNodes; ++i) {
inf >> nodeIndex >> nParents;
for (uint j = 0; j < nParents; ++j) {
inf >> value;
mMapParents[nodeIndex].push_back(value);
}
}
/// CPTs
for (uint i = 0; i < nNodes; ++i) {
inf >> nodeIndex >> nParents;
for (uint j = 0; j < nParents; ++j) {
inf >> dValue;
mapCpt[nodeIndex].push_back(dValue);
}
}
bn = new BayesNet(nNodes, nodeSizes, mMapParents, mapCpt);
/// jointProbabilities
uint size = 0;
inf >> size;
if (size > 0) {
for (uint i = 0; i < size; ++i) {
inf >> dValue;
listJointProbabilities.push_back(dValue);
}
bn->setJointDistribution(listJointProbabilities);
} else {
示例5: SimpleCGM1
//Cont0(3) Tab0(0)
// | |
// \/ \/
// Cont1(1)
// |
// \/
// Cont2(2) (with parameters)
BayesNet *TestsPnlHigh::SimpleCGM2()
{
BayesNet *net = SimpleCGM1();
net->SetPGaussian("Cont0", "1.5 -0.5", "1.0 0.3 0.3 2.0", TokArr(), TokArr());
net->SetPGaussian("Cont1", "0.0", "2.5", "1.0 3.0", "Tab0^State0");
net->SetPGaussian("Cont1", "-1.5", "0.75", "0.5 2.5", "Tab0^State1");
net->SetPGaussian("Cont2", "0.1", "1.1", "0.0");
return net;
}
示例6: TestCondSoftMaxParamLearning
void TestCondSoftMaxParamLearning(bool DeleteNet)
{
// BayesNet *net = SimpleCondSoftMaxModel();
BayesNet *netToLearn = SimpleCondSoftMaxModel();
float eps = 1e-1f;
int nEvid = 100;
netToLearn->GenerateEvidences(nEvid);
netToLearn->LearnParameters();
String nodes[] = {"node0", "node1", "node2"};
/* int i, j;
TokArr LearnParam, Param;
for(i = 0; i < 3; i++)
{
LearnParam = netToLearn->GetGaussianMean(nodes[i]);
Param = net->GetGaussianMean(nodes[i]);
if(LearnParam[0].fload.size() != Param[0].fload.size())
{
PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
}
for(j = 0; j < LearnParam[0].fload.size(); j++)
{
if( LearnParam[0].FltValue(j).fl - Param[0].FltValue(j).fl > eps)
{
PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
}
}
LearnParam = netToLearn->GetGaussianCovar(nodes[i]);
Param = net->GetGaussianCovar(nodes[i]);
if(LearnParam[0].fload.size() != Param[0].fload.size())
{
PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
}
for(j = 0; j < LearnParam[0].fload.size(); j++)
{
if( LearnParam[0].FltValue(j).fl - Param[0].FltValue(j).fl > eps)
{
PNL_THROW(pnl::CAlgorithmicException, "Parameters learning is wrong");
}
}
}
*/
if (DeleteNet)
{
delete netToLearn;
};
std::cout << "TestCondSoftMaxParamLearning is completed successfully" << std::endl;
}
示例7: run
void run(unsigned int size, unsigned int select) {
BayesNet<> bn;
map<vector<int>, map<int, double>> cpt;
map<int, double> dist;
map<vector<int>, map<int, double>> sinkCPT;
for (int i = 0; i < size; i++) {
if (i == select) {
dist.insert(make_pair(0, 0.0));
dist.insert(make_pair(1, 1.0));
cpt.insert(CondProb<>::CondCase(vector<int>(), dist));
bn.add_node(i, CondProb<>(cpt));
cpt.clear();
dist.insert(make_pair(0, 0.0));
dist.insert(make_pair(1, 1.0));
vector<int> parents(size, 0);
parents[i] = 1;
sinkCPT.insert(CondProb<>::CondCase(parents, dist));
} else {
dist.insert(make_pair(0, 1.0));
dist.insert(make_pair(1, 0.0));
cpt.insert(CondProb<>::CondCase(vector<int>(), dist));
bn.add_node(i, CondProb<>(cpt));
cpt.clear();
dist.insert(make_pair(0, 1.0));
dist.insert(make_pair(1, 0.0));
vector<int> parents(size, 0);
parents[i] = 1;
sinkCPT.insert(CondProb<>::CondCase(parents, dist));
}
cpt.clear();
dist.clear();
}
set<int> parents;
for (int i = 0; i < size; i++)
parents.insert(i);
bn.add_node(size, parents, CondProb<>(sinkCPT));
map<int, int> values;
for (int i = 0; i < size; i++)
values.insert(make_pair(0, 0));
values.insert(make_pair(select, 1));
map<std::map<int, int>, double> marginal_dist = bn.marginal_dist(values, 512, SampleStrategy::GIBBS);
assertEquals(marginal_dist[values], (double)1.0);
}
示例8: TestSetDistributionSoftMax
void TestSetDistributionSoftMax()
{
BayesNet *net = SimpleSoftMaxModel();
if (net->GetGaussianMean("node0")[0].FltValue() != 0.1f)
{
PNL_THROW(pnl::CAlgorithmicException, "node0 : Setting or getting gaussian parameters is wrong");
}
if (net->GetGaussianMean("node1")[0].FltValue() != 0.2f)
{
PNL_THROW(pnl::CAlgorithmicException, "node1 : Setting or getting gaussian parameters is wrong");
}
if (net->GetGaussianMean("node2")[0].FltValue() != 0.3f)
{
PNL_THROW(pnl::CAlgorithmicException, "node2 : Setting or getting gaussian parameters is wrong");
}
if (net->GetGaussianCovar("node0")[0].FltValue() != 0.9f)
{
PNL_THROW(pnl::CAlgorithmicException, "node0 : Setting or getting gaussian parameters is wrong");
}
if (net->GetGaussianCovar("node1")[0].FltValue() != 0.8f)
{
PNL_THROW(pnl::CAlgorithmicException, "node1 : Setting or getting gaussian parameters is wrong");
}
if (net->GetGaussianCovar("node2")[0].FltValue() != 0.7f)
{
PNL_THROW(pnl::CAlgorithmicException, "node2 : Setting or getting gaussian parameters is wrong");
}
if ((net->GetSoftMaxOffset("node5")[0].FltValue(0).fl != 0.1f)||
(net->GetSoftMaxOffset("node5")[0].FltValue(1).fl != 0.1f))
{
PNL_THROW(pnl::CAlgorithmicException, "node5 : Setting or getting gaussian parameters is wrong");
};
TokArr node5= net->GetSoftMaxWeights("node5");
float val0 = node5[0].FltValue(0).fl;
float val1 = node5[0].FltValue(1).fl;
float val2 = node5[0].FltValue(2).fl;
float val3 = node5[0].FltValue(3).fl;
float val4 = node5[0].FltValue(4).fl;
float val5 = node5[0].FltValue(5).fl;
if ((node5[0].FltValue(0).fl != 0.3f)||
(node5[0].FltValue(1).fl != 0.4f)||
(node5[0].FltValue(2).fl != 0.5f)||
(node5[0].FltValue(3).fl != 0.6f)||
(node5[0].FltValue(4).fl != 0.7f)||
(node5[0].FltValue(5).fl != 0.8f))
{
PNL_THROW(pnl::CAlgorithmicException, "node5 : Setting or getting gaussian parameters is wrong");
};
delete net;
std::cout << "TestSetDistributionSoftMax is completed successfully" << std::endl;
}
示例9: TestJtreeInference1SevenNodesModel
void TestJtreeInference1SevenNodesModel()
{
BayesNet *net = SevenNodesModel();
// all continuous nodes are observed
net->EditEvidence("node0^0.3");
net->EditEvidence("node1^0.2");
net->EditEvidence("node5^0.9");
net->EditEvidence("node4^True");
net->SetProperty("Inference", "jtree");
TokArr jpd3 = net->GetJPD("node3");
std::cout<< "jpd node3:\t"<<jpd3 << "\n";
TokArr jpd6 = net->GetJPD("node6");
std::cout<< "jpd node6:\t"<<jpd6 << "\n";
TokArr jpd2 = net->GetJPD("node2");
std::cout<< "jpd node2:\t"<<jpd2 << "\n";
delete net;
std::cout << "TestJtreeInference1SevenNodesModel is completed successfully" << std::endl;
}
示例10: canMarginalizeNode
void canMarginalizeNode() {
BayesNet<> bn;
map<vector<int>, map<int, double>> cpt;
map<int, double> dist;
dist.insert(make_pair(0, 0.7));
dist.insert(make_pair(1, 0.3));
cpt.insert(CondProb<>::CondCase(vector<int>(), dist));
bn.add_node(0, CondProb<>(cpt));
map<int, double> marginal_dist = bn.marginal_dist(0, 512);
assertTrue(marginal_dist[0] > 0.65 && marginal_dist[0] < 0.75);
assertTrue(marginal_dist[1] > 0.25 && marginal_dist[1] < 0.35);
}
示例11: canMarginalizeNetwork
void canMarginalizeNetwork() {
BayesNet<> bn;
map<vector<int>, map<int, double>> cpt;
map<int, double> dist;
dist.insert(make_pair(0, 0.7));
dist.insert(make_pair(1, 0.3));
cpt.insert(CondProb<>::CondCase(vector<int>(), dist));
bn.add_node(0, CondProb<>(cpt));
map<int, int> values;
values.insert(make_pair(0, 0));
map<std::map<int, int>, double> marginal_dist = bn.marginal_dist(values, 512, SampleStrategy::GIBBS);
assertTrue(marginal_dist[values] > 0.65 && marginal_dist[values] < 0.75);
}
示例12: BayesNet
BayesNet *TestsPnlHigh::CreateCondGaussianModel1()
{
// Cont1 Tab1
// | |
// \/ \/
// Cont2
BayesNet *net;
net = new BayesNet();
net->AddNode(continuous^"Cont1 Cont2", "dim1");
net->AddNode(discrete^"Tab1", "dim1 dim2");
net->AddArc("Cont1 Tab1", "Cont2");
return net;
}
示例13: TestJtreeInferenceSoftMax1
void TestJtreeInferenceSoftMax1()
{
BayesNet *net = SimpleSoftMaxModel();
// all continuous nodes are observed
net->EditEvidence("node0^0.3");
net->EditEvidence("node1^0.2");
net->EditEvidence("node2^0.9");
net->SetProperty("Inference", "jtree");
TokArr jpd5 = net->GetJPD("node5");
std::cout<< "jpd node5:\t"<<jpd5 << "\n";
delete net;
std::cout << "TestJtreeInferenceSoftMax1 is completed successfully" << std::endl;
}
示例14: canSample
void canSample() {
BayesNet<> bn;
map<vector<int>, map<int, double>> cpt;
map<int, double> dist;
dist.insert(make_pair(0, 1.0));
dist.insert(make_pair(1, 0.0));
cpt.insert(CondProb<>::CondCase(vector<int>(), dist));
bn.add_node(0, CondProb<>(cpt));
assertEquals(bn.sample_node(0), 0);
cpt.clear();
dist.clear();
dist.insert(make_pair(0, 0.0));
dist.insert(make_pair(1, 1.0));
cpt.insert(CondProb<>::CondCase(vector<int>(), dist));
bn.add_node(1, CondProb<>(cpt));
assertEquals(bn.sample_node(1), 1);
}
示例15: TestNodeTypes
void TestNodeTypes()
{
BayesNet *net = SimpleCondSoftMaxModel();
TokArr n0t = net->GetNodeType("node0");
TokArr n1t = net->GetNodeType("node1");
TokArr n2t = net->GetNodeType("node2");
TokArr n3t = net->GetNodeType("node3");
TokArr n5t = net->GetNodeType("node5");
TokArr n6t = net->GetNodeType("node6");
printf("\nNodes types\n");
printf("Node 0 type: %s\n", String(n0t).c_str());
printf("Node 1 type: %s\n",String(n1t).c_str());
printf("Node 2 type: %s\n",String(n2t).c_str());
printf("Node 3 type: %s\n",String(n3t).c_str());
printf("Node 5 type: %s\n",String(n5t).c_str());
printf("Node 6 type: %s\n",String(n6t).c_str());
delete net;
}