本文整理汇总了Java中org.encog.neural.neat.NEATUtil.constructNEATTrainer方法的典型用法代码示例。如果您正苦于以下问题:Java NEATUtil.constructNEATTrainer方法的具体用法?Java NEATUtil.constructNEATTrainer怎么用?Java NEATUtil.constructNEATTrainer使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.encog.neural.neat.NEATUtil
的用法示例。
在下文中一共展示了NEATUtil.constructNEATTrainer方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: runEvolvedRace
import org.encog.neural.neat.NEATUtil; //导入方法依赖的package包/类
private void runEvolvedRace(String nnFile) {
NEATPopulation population = loadNEAT(nnFile);
TrainEA trainer = NEATUtil.constructNEATTrainer(population, new DriverFitnessScore());
NEATNetwork network = (NEATNetwork) trainer.getCODEC().decode(population.getBestGenome());
NeuralNetworkController nn = new EvolvedController(network);
DefaultDriverGenome genome = new DefaultDriverGenome(nn);
DefaultDriver driver = new DefaultDriver();
driver.loadGenome(genome);
// start a race
Race race = new Race();
race.setTrack("road", "aalborg");
race.setTermination(Race.Termination.LAPS, 1);
race.setStage(Controller.Stage.RACE);
race.addCompetitor(driver);
race.runWithGUI();
}
示例2: main
import org.encog.neural.neat.NEATUtil; //导入方法依赖的package包/类
public static void main(final String args[]) {
MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
NEATPopulation pop = new NEATPopulation(2,1,1000);
pop.setInitialConnectionDensity(1.0);// not required, but speeds training
pop.reset();
CalculateScore score = new TrainingSetScore(trainingSet);
// train the neural network
final EvolutionaryAlgorithm train = NEATUtil.constructNEATTrainer(pop,score);
do {
train.iteration();
System.out.println("Epoch #" + train.getIteration() + " Error:" + train.getError()+ ", Species:" + pop.getSpecies().size());
} while(train.getError() > 0.01);
NEATNetwork network = (NEATNetwork)train.getCODEC().decode(train.getBestGenome());
// test the neural network
System.out.println("Neural Network Results:");
EncogUtility.evaluate(network, trainingSet);
Encog.getInstance().shutdown();
}
示例3: resetTraining
import org.encog.neural.neat.NEATUtil; //导入方法依赖的package包/类
public void resetTraining() {
Substrate substrate = SubstrateFactory.factorSandwichSubstrate(11, 11);
BoxesScore score = new BoxesScore(11);
this.pop = new NEATPopulation(substrate, 500);
this.pop.setActivationCycles(4);
this.pop.reset();
this.train = NEATUtil.constructNEATTrainer(this.pop, score);
OriginalNEATSpeciation speciation = new OriginalNEATSpeciation();
speciation.setCompatibilityThreshold(1);
this.train.setSpeciation(speciation = new OriginalNEATSpeciation());
// train.setThreadCount(1);
}
示例4: main
import org.encog.neural.neat.NEATUtil; //导入方法依赖的package包/类
/**
* The main method.
* @param args No arguments are used.
*/
public static void main(final String args[]) {
// // create a neural network, without using a factory
// BasicNetwork network = new BasicNetwork();
// network.addLayer(new BasicLayer(null,true,2));
// network.addLayer(new BasicLayer(new ActivationSigmoid(),true,3));
// network.addLayer(new BasicLayer(new ActivationSigmoid(),false,1));
// network.getStructure().finalizeStructure();
// network.reset();
//
// // create training data
// MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
//
// // train the neural network
// final ResilientPropagation train = new ResilientPropagation(network, trainingSet);
//
// int epoch = 1;
//
// do {
// train.iteration();
// System.out.println("Epoch #" + epoch + " Error:" + train.getError());
// epoch++;
// } while(train.getError() > 0.01);
// train.finishTraining();
//
// // test the neural network
// System.out.println("Neural Network Results:");
// for(MLDataPair pair: trainingSet ) {
// final MLData output = network.compute(pair.getInput());
// System.out.println(pair.getInput().getData(0) + "," + pair.getInput().getData(1)
// + ", actual=" + output.getData(0) + ",ideal=" + pair.getIdeal().getData(0));
// }
//
// Encog.getInstance().shutdown();
MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
NEATPopulation pop = new NEATPopulation(2,1,1000);
pop.setInitialConnectionDensity(1.0);// not required, but speeds training
pop.reset();
CalculateScore score = new TrainingSetScore(trainingSet);
// train the neural network
final EvolutionaryAlgorithm train = NEATUtil.constructNEATTrainer(pop, score);
do {
train.iteration();
System.out.println("Epoch #" + train.getIteration() + " Error:" + train.getError()+ ", Species:" + pop.getSpecies().size());
} while(train.getError() > 0.01);
NEATNetwork network = (NEATNetwork)train.getCODEC().decode(train.getBestGenome());
// test the neural network
System.out.println("Neural Network Results:");
EncogUtility.evaluate(network, trainingSet);
Encog.getInstance().shutdown();
}