本文整理汇总了C++中NeuralNetwork::getInputNum方法的典型用法代码示例。如果您正苦于以下问题:C++ NeuralNetwork::getInputNum方法的具体用法?C++ NeuralNetwork::getInputNum怎么用?C++ NeuralNetwork::getInputNum使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类NeuralNetwork
的用法示例。
在下文中一共展示了NeuralNetwork::getInputNum方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: distribution
//TO FINISH
NeuralNetwork::NeuralNetwork(const NeuralNetwork& father, const NeuralNetwork& mother, bool addRandomChange)
{
//checks if sizes correspond
if(father.getInputNum() != mother.getInputNum()
|| father.getLayers().size() != mother.getLayers().size())
{
cerr << "error, mother and father NN of different sizes" << endl;
cerr << "error in numbers of layers" << endl;
cerr << father.getLayers().size() << " " << mother.getLayers().size() << endl;
}
for(unsigned int i=0; i<father.getLayers().size(); i++)
{
if (father.getLayers()[i].getNeurons().size() != mother.getLayers()[i].getNeurons().size())
{
cerr << "error, mother and father NN of different sizes" << endl;
cerr << "error in layers " << i << endl;
cerr << father.getLayers()[i].getNeurons().size() << " " << mother.getLayers()[i].getNeurons().size() << endl;
}
}
m_inputNum = father.getInputNum();
m_outputNum = father.getOutputNum();
m_hiddenLayerNum = father.getHiddenLayerNum();
vector<Neuron> childNeurons;
static mt19937 generator(random_device{}());
bernoulli_distribution distribution(0.5);
for(unsigned i = 0; i < father.m_layers.size(); i++)
{
const auto& fatherLayer = father.m_layers[i];
const auto& motherLayer = mother.m_layers[i];
for(int j = 0; j < fatherLayer.getNeuronNum(); j++)
{
const auto& fatherNeurons = fatherLayer.getNeurons();
const auto& motherNeurons = motherLayer.getNeurons();
// 1/2 chance to take the mother neuron
if(distribution(generator))
{
childNeurons.push_back(motherNeurons[j]);
}
else
{
childNeurons.push_back(fatherNeurons[j]);
}
}
m_layers.push_back(childNeurons);
childNeurons.clear();
}
if(addRandomChange)
{
randomiseWeight();
}
}
示例2: SaveNetwork
int SaveManager::SaveNetwork(const NeuralNetwork& nn, QXmlStreamWriter & writer)
{
writer.writeStartElement("NeuralNetwork");
// recuperation de quelqes informations sur le réseau de neurones
const std::vector<NeuronLayer> layers = nn.getLayers();
const unsigned int workingLayersNum=layers.size(); //nombre de couches cachées (en plus de l'input et de l'output)
const unsigned int inputNum = nn.getInputNum(); // nombre d'inputs
writer.writeStartElement("NeuronLayer");
writer.writeAttribute("id",QString::number(0)); // 0 means "input layer"
writer.writeTextElement("inputsNum", QString::number(inputNum));
writer.writeEndElement();
//for each layer
for (unsigned int layer=1; layer<=workingLayersNum; layer++)
{
const std::vector<Neuron> neurons = layers.at(layer-1).getNeurons();
unsigned int neuronsNum = neurons.size();
writer.writeStartElement("NeuronLayer");
writer.writeAttribute("id", QString::number(layer));
//for each neuron
for (unsigned int neuron=0; neuron<neuronsNum; neuron++)
{
writer.writeStartElement("Neuron");
writer.writeAttribute("id",QString::number(neuron));
const std::vector<double> weights = neurons.at(neuron).getWeights();
if(weights.size()!=0)
{
QString tempS = QString::number(weights.at(0)) + " ";
//for each weight
for(unsigned int w=1; w<weights.size(); w++)
{
tempS +=QString::number(weights.at(w)) + " " ;
}
writer.writeTextElement("weights", tempS);
}
writer.writeEndElement();
}
writer.writeEndElement();
}
writer.writeEndElement();//NeuraleNetwork
return 0;
}