本文整理汇总了Java中org.encog.ml.data.MLDataSet类的典型用法代码示例。如果您正苦于以下问题:Java MLDataSet类的具体用法?Java MLDataSet怎么用?Java MLDataSet使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
MLDataSet类属于org.encog.ml.data包,在下文中一共展示了MLDataSet类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: HandWritingInputDisplay
import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
public HandWritingInputDisplay(MLDataSet training) {
this.imageValues = new double[(int) training.getRecordCount()][];
this.numberValues = new int[(int) training.getRecordCount()];
int index = 0;
for (MLDataPair mlDataPair : training) {
this.imageValues[index] = mlDataPair.getInputArray();
int yIndex = 0;
while(mlDataPair.getIdealArray()[yIndex]!=1)yIndex++;
this.numberValues[index] = (yIndex + 1) % 10;
index++;
}
this.currentImageIndex = 0;
rp = new ResizeProcessor(200, 200);
JFrame frame = DisplayUtilities.displayName(this.getCurrentImage(), "numbers");
frame.addKeyListener(this);
}
示例2: Gradient
import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
/**
* Construct a gradient worker.
*
* @param theNetwork
* The network to train.
* @param theOwner
* The owner that is doing the training.
* @param theTraining
* The training data.
* @param theLow
* The low index to use in the training data.
* @param theHigh
* The high index to use in the training data.
*/
public Gradient(final FlatNetwork theNetwork, final MLDataSet theTraining, final double[] flatSpot, ErrorFunction ef) {
this.network = theNetwork;
this.training = theTraining;
this.flatSpot = flatSpot;
this.errorFunction = ef;
this.layerDelta = new double[getNetwork().getLayerOutput().length];
this.gradients = new double[getNetwork().getWeights().length];
this.actual = new double[getNetwork().getOutputCount()];
this.weights = getNetwork().getWeights();
this.layerIndex = getNetwork().getLayerIndex();
this.layerCounts = getNetwork().getLayerCounts();
this.weightIndex = getNetwork().getWeightIndex();
this.layerOutput = getNetwork().getLayerOutput();
this.layerSums = getNetwork().getLayerSums();
this.layerFeedCounts = getNetwork().getLayerFeedCounts();
this.pair = BasicMLDataPair.createPair(getNetwork().getInputCount(), getNetwork().getOutputCount());
}
示例3: main
import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
public static void main(final String args[]) {
MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
NEATPopulation pop = new NEATPopulation(2,1,1000);
pop.setInitialConnectionDensity(1.0);// not required, but speeds training
pop.reset();
CalculateScore score = new TrainingSetScore(trainingSet);
// train the neural network
final EvolutionaryAlgorithm train = NEATUtil.constructNEATTrainer(pop,score);
do {
train.iteration();
System.out.println("Epoch #" + train.getIteration() + " Error:" + train.getError()+ ", Species:" + pop.getSpecies().size());
} while(train.getError() > 0.01);
NEATNetwork network = (NEATNetwork)train.getCODEC().decode(train.getBestGenome());
// test the neural network
System.out.println("Neural Network Results:");
EncogUtility.evaluate(network, trainingSet);
Encog.getInstance().shutdown();
}
示例4: main
import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
/**
* The main method.
* @param args No arguments are used.
*/
public static void main(final String args[]) {
// create a neural network, without using a factory
BasicNetwork network = new BasicNetwork();
network.addLayer(new BasicLayer(null,true,2));
network.addLayer(new BasicLayer(new ActivationSigmoid(),true,3));
network.addLayer(new BasicLayer(new ActivationSigmoid(),false,1));
network.getStructure().finalizeStructure();
network.reset();
// create training data
MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
// train the neural network
final ResilientPropagation train = new ResilientPropagation(network, trainingSet);
int epoch = 1;
do {
train.iteration();
System.out.println("Epoch #" + epoch + " Error:" + train.getError());
epoch++;
} while(train.getError() > 0.01);
train.finishTraining();
// test the neural network
System.out.println("Neural Network Results:");
for(MLDataPair pair: trainingSet ) {
final MLData output = network.compute(pair.getInput());
System.out.println(pair.getInput().getData(0) + "," + pair.getInput().getData(1)
+ ", actual=" + output.getData(0) + ",ideal=" + pair.getIdeal().getData(0));
}
Encog.getInstance().shutdown();
}
示例5: initGradient
import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
private void initGradient(MLDataSet training, double[] weights) {
BasicNetwork network = NNUtils.generateNetwork(this.inputs, this.hiddens, this.outputs);
// use the weights from master
network.getFlat().setWeights(weights);
FlatNetwork flat = network.getFlat();
// copy Propagation from encog
double[] flatSpot = new double[flat.getActivationFunctions().length];
for(int i = 0; i < flat.getActivationFunctions().length; i++) {
flatSpot[i] = flat.getActivationFunctions()[i] instanceof ActivationSigmoid ? 0.1 : 0.0;
}
this.gradient = new Gradient(flat, training, flatSpot, new LinearErrorFunction());
}
示例6: main
import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
/**
* The main method.
* @param args No arguments are used.
*/
public static void main(final String args[]) {
// create a neural network, without using a factory
BasicNetwork network = new BasicNetwork();
network.addLayer(new BasicLayer(null,true,2));
network.addLayer(new BasicLayer(new ActivationSigmoid(),true,3));
network.addLayer(new BasicLayer(new ActivationSigmoid(),false,1));
network.getStructure().finalizeStructure();
network.reset();
// create training data
MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
// train the neural network
final ResilientPropagation train = new ResilientPropagation(network, trainingSet);
int epoch = 1;
do {
train.iteration();
System.out.println("Epoch #" + epoch + " Error:" + train.getError());
epoch++;
} while(train.getError() > 0.01);
train.finishTraining();
// test the neural network
System.out.println("Neural Network Results:");
for(MLDataPair pair: trainingSet ) {
final MLData output = network.compute(pair.getInput());
System.out.println(pair.getInput().getData(0) + "," + pair.getInput().getData(1)
+ ", actual=" + output.getData(0) + ",ideal=" + pair.getIdeal().getData(0));
}
Encog.getInstance().shutdown();
}
示例7: OurSVMTrain
import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
/**
* Construct a trainer for an SVM network.
*
* @param method
* The method to train.
* @param training
* The training data for this network.
*/
public OurSVMTrain(final SVM method, final MLDataSet training) {
super(TrainingImplementationType.Iterative);
this.network = method;
setTraining(training);
this.isSetup = false;
this.trainingDone = false;
this.internalTrain = new SVMTrain(network, training);
}
示例8: GradientDecent
import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
public GradientDecent(ReservoirNetwork reservoir, MLDataSet trainingSet, double learningRate){
this.reservoir = reservoir;
this.trainingSet = trainingSet;
this.learningRate = learningRate;
//this.deltaWeights = new double [reservoir.getReservior().getNeuronCount() * reservoir.getReadout().getNumberOutputs()];
Arrays.fill(deltaWeights, 0.0);
}
示例9: ReservoirSnapshot
import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
public ReservoirSnapshot(ReservoirNetwork reservoir, MLDataSet trainingSet,double alpha){
this.net = reservoir;
this.dataset = trainingSet;
this.alpha = alpha;
this.N_STATES = accumVoltages();
logger.info("STATES: " + N_STATES);
this.Y = target();
}
示例10: getTrainingData
import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
public MLDataSet getTrainingData() {
return trainingData;
}
示例11: setTrainingData
import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
public void setTrainingData(MLDataSet trainingData) {
this.trainingData = trainingData;
}
示例12: getTestingData
import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
public MLDataSet getTestingData() {
return testingData;
}
示例13: setTestingData
import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
public void setTestingData(MLDataSet testingData) {
this.testingData = testingData;
}
示例14: openAdditional
import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
@Override
public MLDataSet openAdditional() {
throw new UnsupportedOperationException();
}
示例15: main
import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
/**
* The main method.
* @param args No arguments are used.
*/
public static void main(final String args[]) {
// // create a neural network, without using a factory
// BasicNetwork network = new BasicNetwork();
// network.addLayer(new BasicLayer(null,true,2));
// network.addLayer(new BasicLayer(new ActivationSigmoid(),true,3));
// network.addLayer(new BasicLayer(new ActivationSigmoid(),false,1));
// network.getStructure().finalizeStructure();
// network.reset();
//
// // create training data
// MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
//
// // train the neural network
// final ResilientPropagation train = new ResilientPropagation(network, trainingSet);
//
// int epoch = 1;
//
// do {
// train.iteration();
// System.out.println("Epoch #" + epoch + " Error:" + train.getError());
// epoch++;
// } while(train.getError() > 0.01);
// train.finishTraining();
//
// // test the neural network
// System.out.println("Neural Network Results:");
// for(MLDataPair pair: trainingSet ) {
// final MLData output = network.compute(pair.getInput());
// System.out.println(pair.getInput().getData(0) + "," + pair.getInput().getData(1)
// + ", actual=" + output.getData(0) + ",ideal=" + pair.getIdeal().getData(0));
// }
//
// Encog.getInstance().shutdown();
MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
NEATPopulation pop = new NEATPopulation(2,1,1000);
pop.setInitialConnectionDensity(1.0);// not required, but speeds training
pop.reset();
CalculateScore score = new TrainingSetScore(trainingSet);
// train the neural network
final EvolutionaryAlgorithm train = NEATUtil.constructNEATTrainer(pop, score);
do {
train.iteration();
System.out.println("Epoch #" + train.getIteration() + " Error:" + train.getError()+ ", Species:" + pop.getSpecies().size());
} while(train.getError() > 0.01);
NEATNetwork network = (NEATNetwork)train.getCODEC().decode(train.getBestGenome());
// test the neural network
System.out.println("Neural Network Results:");
EncogUtility.evaluate(network, trainingSet);
Encog.getInstance().shutdown();
}