当前位置: 首页>>代码示例>>C++>>正文


C++ NeuralNetwork::getNeurons方法代码示例

本文整理汇总了C++中NeuralNetwork::getNeurons方法的典型用法代码示例。如果您正苦于以下问题:C++ NeuralNetwork::getNeurons方法的具体用法?C++ NeuralNetwork::getNeurons怎么用?C++ NeuralNetwork::getNeurons使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在NeuralNetwork的用法示例。


在下文中一共展示了NeuralNetwork::getNeurons方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1: updateNetworkConnection

void ActivationFrequencyCalculator::updateNetworkConnection() {
	
	QList<NeuralNetwork*> networks = Neuro::getNeuralNetworkManager()->getNeuralNetworks();

	for(QListIterator<NeuralNetwork*> i(networks); i.hasNext();) {
		NeuralNetwork *net = i.next();

		if(net->getControlInterface() != 0 
			&& net->getControlInterface()->getName() == mAgentName->get())
		{
			QList<Neuron*> neurons = net->getNeurons();
			for(QListIterator<Neuron*> j(neurons); j.hasNext();) {
				Neuron *neuron = j.next();
				if(neuron->getId() == mNeuronId->get()) {
					if(mObservedNeuron == neuron) {
						return;
					}
					mObservedNeuron = neuron;
					mFrequency->set(0);
					mLastActivation = neuron->getOutputActivationValue().get();
					mStepCounter = 0;
					mFoundFirstReference = false;

					return;
				}
			}
		}
	}
	mObservedNeuron = 0;
	mFrequency->set(0);
	mLastActivation = 0.0;
	mStepCounter = 0;
	mFoundFirstReference = false;

}
开发者ID:nerd-toolkit,项目名称:nerd,代码行数:35,代码来源:ActivationFrequencyCalculator.cpp

示例2: testFreeElements

void TestNeuralNetwork::testFreeElements() {
	bool destroyedNeuron1 = false;
	bool destroyedNeuron2 = false;

	NeuronAdapter *neuron1 = new NeuronAdapter("Neuron1", TransferFunctionAdapter("TF1", 0.0, 1.0), 
							ActivationFunctionAdapter("AF1"), &destroyedNeuron1);

	NeuronAdapter *neuron2 = new NeuronAdapter("Neuron2", TransferFunctionAdapter("TF1", 0.0, 1.0), 
							ActivationFunctionAdapter("AF1"), &destroyedNeuron2);

	NeuralNetwork *net = new NeuralNetwork();

	InterfaceValue *inputValue = new InterfaceValue("/", "Test1");
	InterfaceValue *outputValue = new InterfaceValue("/", "Test1");
	ControlInterfaceAdapter *controller = new ControlInterfaceAdapter();
	controller->mInputValues.append(inputValue);
	controller->mOutputValues.append(outputValue);



	//free elements(false)

	QVERIFY(net->addNeuron(neuron1) == true);
	QVERIFY(net->addNeuron(neuron2) == true);

	net->setControlInterface(controller);

	QCOMPARE(net->getNeurons().size(), 2);
	QVERIFY(net->getNeurons().contains(neuron1));
	QVERIFY(net->getNeurons().contains(neuron2));

	net->freeElements(false);

	QCOMPARE(net->getNeurons().size(), 0);
	QVERIFY(destroyedNeuron1 == false);
	QVERIFY(destroyedNeuron2 == false);

	//free elements(true)

	QVERIFY(net->addNeuron(neuron1) == true);
	QVERIFY(net->addNeuron(neuron2) == true);

	net->setControlInterface(controller);

	QCOMPARE(net->getNeurons().size(), 2);
	QVERIFY(net->getNeurons().contains(neuron1));
	QVERIFY(net->getNeurons().contains(neuron2));

	net->freeElements(true);

	QCOMPARE(net->getNeurons().size(), 0);
	QVERIFY(destroyedNeuron1 == true);
	QVERIFY(destroyedNeuron2 == true);
}
开发者ID:nerd-toolkit,项目名称:nerd,代码行数:54,代码来源:TestNeuralNetwork.cpp

示例3: testDuplicationAndEquals

// Chris
void TestNeuralNetwork::testDuplicationAndEquals() {
	TransferFunctionAdapter tfa("TFA", -0.5, 0.5);
	ActivationFunctionAdapter afa("AFA");
	SynapseFunctionAdapter sfa("SFA");

	NeuralNetwork *net = new NeuralNetwork(afa, tfa, sfa);

	ControlInterfaceAdapter controlInterface;
	net->setControlInterface(&controlInterface);

	QVERIFY(net->getControlInterface() == &controlInterface);

	Neuron *neuron1 = new Neuron("Neuron1", tfa, afa, 2001);
	Neuron *neuron2 = new Neuron("Neuron1", tfa, afa, 2002);
	Neuron *neuron3 = new Neuron("Neuron1", tfa, afa, 2003);

	neuron1->setProperty(Neuron::NEURON_TYPE_INPUT);
	neuron3->setProperty(Neuron::NEURON_TYPE_OUTPUT);

	Synapse *synapse1 = Synapse::createSynapse(neuron1, neuron2, 0.5, sfa, 3001);
	Synapse *synapse2 = Synapse::createSynapse(neuron2, neuron3, 1.5, sfa, 3002);
	Synapse *synapse3 = Synapse::createSynapse(neuron3, synapse1, 0.1, sfa, 3003);

	net->addNeuron(neuron1);
	net->addNeuron(neuron2);
	net->addNeuron(neuron3);

	QCOMPARE(net->getNeurons().size(), 3);
	QVERIFY(net->getNeurons().contains(neuron1));
	QVERIFY(net->getNeurons().contains(neuron2));
	QVERIFY(net->getNeurons().contains(neuron3));

	QCOMPARE(net->getSynapses().size(), 3);
	QVERIFY(net->getSynapses().contains(synapse1));
	QVERIFY(net->getSynapses().contains(synapse2));
	QVERIFY(net->getSynapses().contains(synapse3));
	
	
	NeuralNetwork *copy = net->createCopy();

	//control interface is NOT copied.
	QVERIFY(copy->getControlInterface() == 0);

	QCOMPARE(copy->getNeurons().size(), 3);
	QVERIFY(!copy->getNeurons().contains(neuron1));
	QVERIFY(!copy->getNeurons().contains(neuron2));
	QVERIFY(!copy->getNeurons().contains(neuron3));

	QCOMPARE(copy->getSynapses().size(), 3);
	QVERIFY(!copy->getSynapses().contains(synapse1));
	QVERIFY(!copy->getSynapses().contains(synapse2));
	QVERIFY(!copy->getSynapses().contains(synapse3));

	Neuron *cNeuron1 = NeuralNetwork::selectNeuronById(neuron1->getId(), copy->getNeurons());
	Neuron *cNeuron2 = NeuralNetwork::selectNeuronById(neuron2->getId(), copy->getNeurons());
	Neuron *cNeuron3 = NeuralNetwork::selectNeuronById(neuron3->getId(), copy->getNeurons());

	Synapse *cSynapse1 = NeuralNetwork::selectSynapseById(synapse1->getId(), copy->getSynapses());
	Synapse *cSynapse2 = NeuralNetwork::selectSynapseById(synapse2->getId(), copy->getSynapses());
	Synapse *cSynapse3 = NeuralNetwork::selectSynapseById(synapse3->getId(), copy->getSynapses());
	
	QVERIFY(cNeuron1 != 0);
	QVERIFY(cNeuron2 != 0);
	QVERIFY(cNeuron3 != 0);

	QVERIFY(cNeuron1->equals(neuron1));
	QVERIFY(cNeuron2->equals(neuron2));
	QVERIFY(cNeuron3->equals(neuron3));

	QCOMPARE(cNeuron1->getId(), (qulonglong) 2001);
	QCOMPARE(cNeuron2->getId(), (qulonglong) 2002);
	QCOMPARE(cNeuron3->getId(), (qulonglong) 2003);

	QVERIFY(net->getInputNeurons().size() == 1);
	QVERIFY(copy->getInputNeurons().size() == 1);
	QVERIFY(net->getInputNeurons().at(0) == neuron1);
	QVERIFY(copy->getInputNeurons().at(0) == cNeuron1);
	QVERIFY(net->getOutputNeurons().size() == 1);
	QVERIFY(copy->getOutputNeurons().size() == 1);
	QVERIFY(net->getOutputNeurons().at(0) == neuron3);
	QVERIFY(copy->getOutputNeurons().at(0) == cNeuron3);

	QVERIFY(cSynapse1 != 0);
	QVERIFY(cSynapse2 != 0);
	QVERIFY(cSynapse3 != 0);

	QVERIFY(cSynapse1->equals(synapse1));
	QVERIFY(cSynapse2->equals(synapse2));
	QVERIFY(cSynapse3->equals(synapse3));

	QCOMPARE(cSynapse1->getId(), (qulonglong) 3001);
	QCOMPARE(cSynapse2->getId(), (qulonglong) 3002);
	QCOMPARE(cSynapse3->getId(), (qulonglong) 3003);

	QVERIFY(cSynapse1->getSource() == cNeuron1);
	QVERIFY(cSynapse2->getSource() == cNeuron2);
	QVERIFY(cSynapse3->getSource() == cNeuron3);
	QVERIFY(cSynapse1->getTarget() == cNeuron2);
	QVERIFY(cSynapse2->getTarget() == cNeuron3);
//.........这里部分代码省略.........
开发者ID:nerd-toolkit,项目名称:nerd,代码行数:101,代码来源:TestNeuralNetwork.cpp

示例4: applyOperator

bool RemoveBiasOperator::applyOperator(Individual *individual, CommandExecutor*) {

	NeuralNetwork *net = dynamic_cast<NeuralNetwork*>(individual->getGenome());

	if(net == 0) {
		Core::log("RemoveBiasOperator: Could not apply operator because individual did not "
				  "provide a NeuralNetwork as genome!");
		return false;
	}

	QString generationDate = QString::number(Evolution::getEvolutionManager()
				->getCurrentGenerationValue()->get());

	QList<Neuron*> neurons = net->getNeurons();
	QList<Neuron*> consideredNeurons = neurons;

	//remove protected neurons and neurons without bias values.
	{
		for(QListIterator<Neuron*> i(neurons); i.hasNext();) {
			Neuron *neuron = i.next();

			if(neuron->getBiasValue().get() == 0.0) {
				consideredNeurons.removeAll(neuron);
				continue;
			}
			if(neuron->hasProperty(NeuralNetworkConstants::TAG_ELEMENT_PROTECTED)) {
				consideredNeurons.removeAll(neuron);
				continue;
			}
			if(neuron->hasProperty(NeuralNetworkConstants::TAG_NEURON_PROTECT_BIAS_EXISTENCE)) {
				consideredNeurons.removeAll(neuron);
				continue;
			}
			else if(neuron->hasProperty(NeuralNetworkConstants::TAG_NEURON_PROTECT_BIAS)) {
				consideredNeurons.removeAll(neuron);
				continue;
			}
		}
	}

	double probability = mRemoveProbability->get();
	int maxNumberOfRemovedBiases = mMaximalNumberOfRemovedBiases->get();

	for(int i = 0; i < maxNumberOfRemovedBiases && !consideredNeurons.empty(); ++i) {
		
		if(Random::nextDouble() >= probability) {
			continue;
		}

		Neuron *neuron = consideredNeurons.value(Random::nextInt(consideredNeurons.size()));

		if(neuron == 0) {
			continue;
		}

		consideredNeurons.removeAll(neuron);
		neuron->getBiasValue().set(0.0);

		neuron->setProperty(NeuralNetworkConstants::PROP_ELEMENT_MODIFIED);

		//mark the individual as significantly modified
		individual->setProperty(EvolutionConstants::TAG_GENOME_SIGNIFICANT_CHANGE,
						generationDate);
		
		individual->setProperty(EvolutionConstants::TAG_GENOME_CHANGE_SUMMARY,
						individual->getProperty(EvolutionConstants::TAG_GENOME_CHANGE_SUMMARY) 
							+ ",N:" + QString::number(neuron->getId()) + ":rB"); 
	}

	return true;
}
开发者ID:nerd-toolkit,项目名称:nerd,代码行数:71,代码来源:RemoveBiasOperator.cpp

示例5: applyOperator

bool AdaptSRNParametersOperator::applyOperator(Individual *individual, CommandExecutor*) {

	NeuralNetwork *net = dynamic_cast<NeuralNetwork*>(individual->getGenome());

	if(net == 0) {
		Core::log("AdaptSRNParametersOperator: Could not apply operator because individual did not "
				  "provide a NeuralNetwork as genome!");
		return false;
	}

	double changeProbability = mChangeProbability->get();
	
	QList<Neuron*> networkNeurons = net->getNeurons();
	QList<SelfRegulatingNeuronActivationFunction*> consideredAF;
	for(QListIterator<Neuron*> i(networkNeurons); i.hasNext();) {
		//remove all neurons that can not be changed.
		Neuron *neuron = i.next();
		SelfRegulatingNeuronActivationFunction *srnaf = 
				dynamic_cast<SelfRegulatingNeuronActivationFunction*>(neuron->getActivationFunction());
		if(srnaf == 0) {
			continue;
		}
		if(neuron->hasProperty(NeuralNetworkConstants::TAG_ELEMENT_PROTECTED)) {
			continue;
		}
		if(neuron->hasProperty(NeuralNetworkConstants::TAG_NEURON_PROTECT_ACTIVATION_FUNCTION)) {
			continue;
		}
		consideredAF.append(srnaf);
	}
	
	if(mGlobalSettings->get()) {
		if(consideredAF.empty()) {
			//there are no matching activation functions to change.
			return true;
		}
		//check if mutations take place
		if(Random::nextDouble() >= changeProbability) {
			return true;
		}
		SelfRegulatingNeuronActivationFunction *srnaf = consideredAF.at(0);
		double newAlpha = mutateParameter(srnaf->getAlpha(), 0);
		double newBeta = mutateParameter(srnaf->getBeta(), 1);
		double newGamma = mutateParameter(srnaf->getGamma(), 2);
		double newDelta = mutateParameter(srnaf->getDelta(), 3);
		double newAStar = mutateParameter(srnaf->getAStar(), 4);
		
		//check if the default AF should be mutated, too.
		SelfRegulatingNeuronActivationFunction *defaultAF = 
				dynamic_cast<SelfRegulatingNeuronActivationFunction*>(net->getDefaultActivationFunction());
		if(defaultAF != 0) {
			consideredAF.append(defaultAF);
		}
		
		for(QListIterator<SelfRegulatingNeuronActivationFunction*> i(consideredAF); i.hasNext();) {
			SelfRegulatingNeuronActivationFunction *af = i.next();
			af->getAlpha()->set(newAlpha);
			af->getBeta()->set(newBeta);
			af->getGamma()->set(newGamma);
			af->getDelta()->set(newDelta);
			af->getAStar()->set(newAStar);
		}
	}
	else {
		for(QListIterator<SelfRegulatingNeuronActivationFunction*> i(consideredAF); i.hasNext();) {
			SelfRegulatingNeuronActivationFunction *af = i.next();
			
			if(Random::nextDouble() >= changeProbability) {
				continue;
			}
			
			//change each parameter separately.
			double newAlpha = mutateParameter(af->getAlpha(), 0);
			double newBeta = mutateParameter(af->getBeta(), 1);
			double newGamma = mutateParameter(af->getGamma(), 2);
			double newDelta = mutateParameter(af->getDelta(), 3);
			double newAStar = mutateParameter(af->getAStar(), 4);
		
			af->getAlpha()->set(newAlpha);
			af->getBeta()->set(newBeta);
			af->getGamma()->set(newGamma);
			af->getDelta()->set(newDelta);
			af->getAStar()->set(newAStar);
		}
	}
// 
// 	double probability = mInsertionProbability->get();
// 
// 	if(consideredNeurons.empty()) {
// 		return true;
// 	}
// 
// 	//prepare the generation date as string.
// 	QString generationDate = QString::number(Evolution::getEvolutionManager()
// 				->getCurrentGenerationValue()->get());
// 	
// 	for(int i = 0; i < maxNumberOfNewBiases; ++i) {
// 		
// 		if(Random::nextDouble() >= probability) {
// 			continue;
//.........这里部分代码省略.........
开发者ID:nerd-toolkit,项目名称:nerd,代码行数:101,代码来源:AdaptSRNParametersOperator.cpp

示例6: calculateDegreesOfFreedom

void NetworkDegreeOfFreedomCalculator::calculateDegreesOfFreedom() {

	QList<NeuralNetwork*> networks = Neuro::getNeuralNetworkManager()->getNeuralNetworks();

	if(networks.empty()) {
		mDOFAll->set(0);
		mDOFMain->set(0);
		mDOFBiasTerms->set(0);
		mDOFSynapseWeights->set(0);
		mDOFTransferFunctions->set(0);
		mDOFActivationFunctions->set(0);
		mDOFSynapseFunctions->set(0);
		return;
	}

	NeuralNetwork *network = networks.at(0);
	if(network == 0) {
		Core::log("NetworkDegreeOfFreedomCalculator: Could not find a network...");
		return;
	}

	int dofBias = 0;
	int dofWeights = 0;
	int dofTF = 0;
	int dofAF = 0;
	int dofSF = 0;
	
	
	QList<Neuron*> neurons = network->getNeurons();
	
	for(QListIterator<Neuron*> i(neurons); i.hasNext();) {
		Neuron *neuron = i.next();
		
		bool bias = true;
		bool tf = true;
		bool af = true;
		
		if(neuron->hasProperty(NeuralNetworkConstants::TAG_ELEMENT_PROTECTED)) {
			bias = false;
			tf = false;
			af = false;
		}
		else {
			QString reducedDOFs = neuron->getProperty(
					NeuralNetworkConstants::TAG_ELEMENT_REDUCED_DEGREES_OF_FREEDOM);
			if(reducedDOFs != "") {
				if(reducedDOFs.contains("B")) {
					bias = false;
				}
				if(reducedDOFs.contains("A")) {
					af = false;
				}
				if(reducedDOFs.contains("T")) {
					tf = false;
				}
			}
			//Count bias only as degree of freedom, if there is one set.
			if(neuron->getBiasValue().get() == 0.0
				|| neuron->hasProperty(NeuralNetworkConstants::TAG_NEURON_PROTECT_BIAS)) 
			{
				bias = false;
			}
		}
		if(bias) { ++dofBias; }
		if(tf) { ++dofTF; }
		if(af) { ++dofAF; }
	}
	
	QList<Synapse*> synapses = network->getSynapses();	
	for(QListIterator<Synapse*> i(synapses); i.hasNext();) {
		Synapse *synapse = i.next();
		
		bool weight = true;
		bool sf = true;
		
		if(synapse->hasProperty(NeuralNetworkConstants::TAG_ELEMENT_PROTECTED)) {
			weight = false;
			sf = false;
		}
		else {
			QString reducedDOFs = synapse->getProperty(
					NeuralNetworkConstants::TAG_ELEMENT_REDUCED_DEGREES_OF_FREEDOM);
			if(reducedDOFs != "") {
				if(reducedDOFs.contains("W")) {
					weight = false;
				}
				if(reducedDOFs.contains("S")) {
					sf = false;
				}
			}
			if(synapse->hasProperty(NeuralNetworkConstants::TAG_SYNAPSE_PROTECT_STRENGTH)) {
				weight = false;
			}
		}
		if(weight) { ++dofWeights; }
		if(sf) { ++dofSF; }
	}

	
	mDOFAll->set(dofBias + dofWeights + dofTF + dofAF + dofSF);
//.........这里部分代码省略.........
开发者ID:nerd-toolkit,项目名称:nerd,代码行数:101,代码来源:NetworkDegreeOfFreedomCalculator.cpp


注:本文中的NeuralNetwork::getNeurons方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。