当前位置: 首页>>代码示例>>C++>>正文


C++ NeuralNetwork::getDefaultActivationFunction方法代码示例

本文整理汇总了C++中NeuralNetwork::getDefaultActivationFunction方法的典型用法代码示例。如果您正苦于以下问题:C++ NeuralNetwork::getDefaultActivationFunction方法的具体用法?C++ NeuralNetwork::getDefaultActivationFunction怎么用?C++ NeuralNetwork::getDefaultActivationFunction使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在NeuralNetwork的用法示例。


在下文中一共展示了NeuralNetwork::getDefaultActivationFunction方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1: applyOperator

bool AdaptSRNParametersOperator::applyOperator(Individual *individual, CommandExecutor*) {

	NeuralNetwork *net = dynamic_cast<NeuralNetwork*>(individual->getGenome());

	if(net == 0) {
		Core::log("AdaptSRNParametersOperator: Could not apply operator because individual did not "
				  "provide a NeuralNetwork as genome!");
		return false;
	}

	double changeProbability = mChangeProbability->get();
	
	QList<Neuron*> networkNeurons = net->getNeurons();
	QList<SelfRegulatingNeuronActivationFunction*> consideredAF;
	for(QListIterator<Neuron*> i(networkNeurons); i.hasNext();) {
		//remove all neurons that can not be changed.
		Neuron *neuron = i.next();
		SelfRegulatingNeuronActivationFunction *srnaf = 
				dynamic_cast<SelfRegulatingNeuronActivationFunction*>(neuron->getActivationFunction());
		if(srnaf == 0) {
			continue;
		}
		if(neuron->hasProperty(NeuralNetworkConstants::TAG_ELEMENT_PROTECTED)) {
			continue;
		}
		if(neuron->hasProperty(NeuralNetworkConstants::TAG_NEURON_PROTECT_ACTIVATION_FUNCTION)) {
			continue;
		}
		consideredAF.append(srnaf);
	}
	
	if(mGlobalSettings->get()) {
		if(consideredAF.empty()) {
			//there are no matching activation functions to change.
			return true;
		}
		//check if mutations take place
		if(Random::nextDouble() >= changeProbability) {
			return true;
		}
		SelfRegulatingNeuronActivationFunction *srnaf = consideredAF.at(0);
		double newAlpha = mutateParameter(srnaf->getAlpha(), 0);
		double newBeta = mutateParameter(srnaf->getBeta(), 1);
		double newGamma = mutateParameter(srnaf->getGamma(), 2);
		double newDelta = mutateParameter(srnaf->getDelta(), 3);
		double newAStar = mutateParameter(srnaf->getAStar(), 4);
		
		//check if the default AF should be mutated, too.
		SelfRegulatingNeuronActivationFunction *defaultAF = 
				dynamic_cast<SelfRegulatingNeuronActivationFunction*>(net->getDefaultActivationFunction());
		if(defaultAF != 0) {
			consideredAF.append(defaultAF);
		}
		
		for(QListIterator<SelfRegulatingNeuronActivationFunction*> i(consideredAF); i.hasNext();) {
			SelfRegulatingNeuronActivationFunction *af = i.next();
			af->getAlpha()->set(newAlpha);
			af->getBeta()->set(newBeta);
			af->getGamma()->set(newGamma);
			af->getDelta()->set(newDelta);
			af->getAStar()->set(newAStar);
		}
	}
	else {
		for(QListIterator<SelfRegulatingNeuronActivationFunction*> i(consideredAF); i.hasNext();) {
			SelfRegulatingNeuronActivationFunction *af = i.next();
			
			if(Random::nextDouble() >= changeProbability) {
				continue;
			}
			
			//change each parameter separately.
			double newAlpha = mutateParameter(af->getAlpha(), 0);
			double newBeta = mutateParameter(af->getBeta(), 1);
			double newGamma = mutateParameter(af->getGamma(), 2);
			double newDelta = mutateParameter(af->getDelta(), 3);
			double newAStar = mutateParameter(af->getAStar(), 4);
		
			af->getAlpha()->set(newAlpha);
			af->getBeta()->set(newBeta);
			af->getGamma()->set(newGamma);
			af->getDelta()->set(newDelta);
			af->getAStar()->set(newAStar);
		}
	}
// 
// 	double probability = mInsertionProbability->get();
// 
// 	if(consideredNeurons.empty()) {
// 		return true;
// 	}
// 
// 	//prepare the generation date as string.
// 	QString generationDate = QString::number(Evolution::getEvolutionManager()
// 				->getCurrentGenerationValue()->get());
// 	
// 	for(int i = 0; i < maxNumberOfNewBiases; ++i) {
// 		
// 		if(Random::nextDouble() >= probability) {
// 			continue;
//.........这里部分代码省略.........
开发者ID:nerd-toolkit,项目名称:nerd,代码行数:101,代码来源:AdaptSRNParametersOperator.cpp

示例2: connectNetworksToInterfaces

void NetworkAgentControlParser::connectNetworksToInterfaces() {
	NeuralNetworkManager *nnm = Neuro::getNeuralNetworkManager();
	bool addedNetwork = false;

	QStringList params = mNetLoaderArgument->getParameters();

	int numberOfEntries = mNetLoaderArgument->getNumberOfEntries();

	for(int i = 0; i < numberOfEntries; ++i) {
		QStringList paramSet;
		if(params.size() > i) {
			paramSet = params.at(i).split(" ");
		}
		if(paramSet.size() > 2) {
			Core::log("NetworkAgentControlParser: Could not interpret command line argument. ");
			continue;
		}
		QString agentName = "";
		QString networkFile = "";
		
		if(paramSet.size() > 0) {
			if(paramSet.at(0).trimmed() == "c") {
				networkFile = "c";
			}
			else {
				agentName = paramSet.at(0);
			}
		}
		
		if(paramSet.size() > 1) {
			networkFile = paramSet.at(1);
		}
		
		if(networkFile.trimmed() == "c") {
			networkFile = "";
			//use most recent network from the editor for this agent. 
			QString filePrefix = Core::getInstance()->getConfigDirectoryPath() + "/properties";
			QFile file(filePrefix + "/recentNetworks.txt");
			if(file.open(QIODevice::ReadOnly | QIODevice::Text)) {
				QTextStream input(&file);
				if(!input.atEnd()) {
					networkFile = input.readLine();
				}
			}
			file.close();
		}
		
		PhysicsManager *pm = Physics::getPhysicsManager();

		SimObjectGroup *agent = 0;
		if(agentName == "" && !pm->getSimObjectGroups().empty()) {
			agent = pm->getSimObjectGroups().at(0);
		}
		else {
			agent = pm->getSimObjectGroup(agentName);
		}

		if(agent == 0) {
			Core::log(QString("NetworkAgentControlParser: Could not find an agent with name [")
					.append(agentName).append("]! [Skipping]"));
			continue;
		}
		
		QString errorMessage;
		QList<QString> messages;

		NeuralNetwork *net = 0;

		//check if there is already a network that can be reused.
		QList<NeuralNetwork*> networks = nnm->getNeuralNetworks();
		if(networks.size() > i) {
			net = networks.at(i);
		}

		if(net == 0) {
			if(networkFile == "") {
				//create standard network with matching number of input/output neurons.
	
				net = new ModularNeuralNetwork();
				int numberOfInputs = agent->getOutputValues().size();
				int numberOfOutputs = agent->getInputValues().size();
	
				for(int i = 0; i < numberOfInputs; ++i) {
					Neuron *neuron = new Neuron("Neuron" + QString::number(i),
											*net->getDefaultTransferFunction(),
											*net->getDefaultActivationFunction());
					neuron->setProperty(Neuron::NEURON_TYPE_INPUT);
					net->addNeuron(neuron);
				}
				for(int i = 0; i < numberOfOutputs; ++i) {
					Neuron *neuron = new Neuron("Neuron" + QString::number(i),
											*net->getDefaultTransferFunction(),
											*net->getDefaultActivationFunction());
					neuron->setProperty(Neuron::NEURON_TYPE_OUTPUT);
					net->addNeuron(neuron);
				}
		
			}
			else {
				net = NeuralNetworkIO::createNetworkFromFile(
//.........这里部分代码省略.........
开发者ID:nerd-toolkit,项目名称:nerd,代码行数:101,代码来源:NetworkAgentControlParser.cpp


注:本文中的NeuralNetwork::getDefaultActivationFunction方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。