当前位置: 首页>>代码示例>>Java>>正文


Java MLDataSet类代码示例

本文整理汇总了Java中org.encog.ml.data.MLDataSet的典型用法代码示例。如果您正苦于以下问题:Java MLDataSet类的具体用法?Java MLDataSet怎么用?Java MLDataSet使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


MLDataSet类属于org.encog.ml.data包,在下文中一共展示了MLDataSet类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: HandWritingInputDisplay

import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
public HandWritingInputDisplay(MLDataSet training) {
	this.imageValues = new double[(int) training.getRecordCount()][];
	this.numberValues = new int[(int) training.getRecordCount()];
	
	int index = 0;
	for (MLDataPair mlDataPair : training) {
		this.imageValues[index] = mlDataPair.getInputArray();
		int yIndex = 0;
		while(mlDataPair.getIdealArray()[yIndex]!=1)yIndex++;
		this.numberValues[index] = (yIndex + 1) % 10;
		index++;
	}
	
	this.currentImageIndex = 0;
	rp = new ResizeProcessor(200, 200);
	JFrame frame = DisplayUtilities.displayName(this.getCurrentImage(), "numbers");
	frame.addKeyListener(this);
}
 
开发者ID:openimaj,项目名称:openimaj,代码行数:19,代码来源:HandWritingInputDisplay.java

示例2: Gradient

import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
/**
 * Construct a gradient worker.
 * 
 * @param theNetwork
 *            The network to train.
 * @param theOwner
 *            The owner that is doing the training.
 * @param theTraining
 *            The training data.
 * @param theLow
 *            The low index to use in the training data.
 * @param theHigh
 *            The high index to use in the training data.
 */
public Gradient(final FlatNetwork theNetwork, final MLDataSet theTraining, final double[] flatSpot, ErrorFunction ef) {
    this.network = theNetwork;
    this.training = theTraining;
    this.flatSpot = flatSpot;
    this.errorFunction = ef;

    this.layerDelta = new double[getNetwork().getLayerOutput().length];
    this.gradients = new double[getNetwork().getWeights().length];
    this.actual = new double[getNetwork().getOutputCount()];

    this.weights = getNetwork().getWeights();
    this.layerIndex = getNetwork().getLayerIndex();
    this.layerCounts = getNetwork().getLayerCounts();
    this.weightIndex = getNetwork().getWeightIndex();
    this.layerOutput = getNetwork().getLayerOutput();
    this.layerSums = getNetwork().getLayerSums();
    this.layerFeedCounts = getNetwork().getLayerFeedCounts();

    this.pair = BasicMLDataPair.createPair(getNetwork().getInputCount(), getNetwork().getOutputCount());
}
 
开发者ID:ShifuML,项目名称:guagua,代码行数:35,代码来源:Gradient.java

示例3: main

import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
public static void main(final String args[]) {

        MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
        NEATPopulation pop = new NEATPopulation(2,1,1000);
        pop.setInitialConnectionDensity(1.0);// not required, but speeds training
        pop.reset();

        CalculateScore score = new TrainingSetScore(trainingSet);
        // train the neural network

        final EvolutionaryAlgorithm train = NEATUtil.constructNEATTrainer(pop,score);

        do {
            train.iteration();
            System.out.println("Epoch #" + train.getIteration() + " Error:" + train.getError()+ ", Species:" + pop.getSpecies().size());
        } while(train.getError() > 0.01);

        NEATNetwork network = (NEATNetwork)train.getCODEC().decode(train.getBestGenome());

        // test the neural network
        System.out.println("Neural Network Results:");
        EncogUtility.evaluate(network, trainingSet);

        Encog.getInstance().shutdown();
    }
 
开发者ID:jeffheaton,项目名称:aifh,代码行数:26,代码来源:NEATXORExample.java

示例4: main

import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
/**
 * The main method.
 * @param args No arguments are used.
 */
public static void main(final String args[]) {

    // create a neural network, without using a factory
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(null,true,2));
    network.addLayer(new BasicLayer(new ActivationSigmoid(),true,3));
    network.addLayer(new BasicLayer(new ActivationSigmoid(),false,1));
    network.getStructure().finalizeStructure();
    network.reset();

    // create training data
    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);

    // train the neural network
    final ResilientPropagation train = new ResilientPropagation(network, trainingSet);

    int epoch = 1;

    do {
        train.iteration();
        System.out.println("Epoch #" + epoch + " Error:" + train.getError());
        epoch++;
    } while(train.getError() > 0.01);
    train.finishTraining();

    // test the neural network
    System.out.println("Neural Network Results:");
    for(MLDataPair pair: trainingSet ) {
        final MLData output = network.compute(pair.getInput());
        System.out.println(pair.getInput().getData(0) + "," + pair.getInput().getData(1)
                + ", actual=" + output.getData(0) + ",ideal=" + pair.getIdeal().getData(0));
    }

    Encog.getInstance().shutdown();
}
 
开发者ID:neo4j-contrib,项目名称:neo4j-ml-procedures,代码行数:40,代码来源:XORHelloWorld.java

示例5: initGradient

import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
private void initGradient(MLDataSet training, double[] weights) {
    BasicNetwork network = NNUtils.generateNetwork(this.inputs, this.hiddens, this.outputs);
    // use the weights from master
    network.getFlat().setWeights(weights);

    FlatNetwork flat = network.getFlat();
    // copy Propagation from encog
    double[] flatSpot = new double[flat.getActivationFunctions().length];
    for(int i = 0; i < flat.getActivationFunctions().length; i++) {
        flatSpot[i] = flat.getActivationFunctions()[i] instanceof ActivationSigmoid ? 0.1 : 0.0;
    }

    this.gradient = new Gradient(flat, training, flatSpot, new LinearErrorFunction());
}
 
开发者ID:ShifuML,项目名称:guagua,代码行数:15,代码来源:NNWorker.java

示例6: main

import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
/**
 * The main method.
 * @param args No arguments are used.
 */
public static void main(final String args[]) {

	// create a neural network, without using a factory
	BasicNetwork network = new BasicNetwork();
	network.addLayer(new BasicLayer(null,true,2));
	network.addLayer(new BasicLayer(new ActivationSigmoid(),true,3));
	network.addLayer(new BasicLayer(new ActivationSigmoid(),false,1));
	network.getStructure().finalizeStructure();
	network.reset();

	// create training data
	MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);

	// train the neural network
	final ResilientPropagation train = new ResilientPropagation(network, trainingSet);

	int epoch = 1;

	do {
		train.iteration();
		System.out.println("Epoch #" + epoch + " Error:" + train.getError());
		epoch++;
	} while(train.getError() > 0.01);
	train.finishTraining();

	// test the neural network
	System.out.println("Neural Network Results:");
	for(MLDataPair pair: trainingSet ) {
		final MLData output = network.compute(pair.getInput());
		System.out.println(pair.getInput().getData(0) + "," + pair.getInput().getData(1)
				+ ", actual=" + output.getData(0) + ",ideal=" + pair.getIdeal().getData(0));
	}

	Encog.getInstance().shutdown();
}
 
开发者ID:encog,项目名称:encog-sample-java,代码行数:40,代码来源:HelloWorld.java

示例7: OurSVMTrain

import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
/**
 * Construct a trainer for an SVM network.
 * 
 * @param method
 *            The method to train.
 * @param training
 *            The training data for this network.
 */
public OurSVMTrain(final SVM method, final MLDataSet training) {
	super(TrainingImplementationType.Iterative);
	this.network = method;
	setTraining(training);
	this.isSetup = false;
	this.trainingDone = false;

	this.internalTrain = new SVMTrain(network, training);
}
 
开发者ID:taochen,项目名称:ssascaling,代码行数:18,代码来源:OurSVMTrain.java

示例8: GradientDecent

import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
public GradientDecent(ReservoirNetwork reservoir, MLDataSet trainingSet, double learningRate){
	this.reservoir = reservoir;
	this.trainingSet = trainingSet;
	this.learningRate = learningRate;
	//this.deltaWeights = new double [reservoir.getReservior().getNeuronCount() * reservoir.getReadout().getNumberOutputs()];
	Arrays.fill(deltaWeights, 0.0);
}
 
开发者ID:wil3,项目名称:lacus,代码行数:8,代码来源:GradientDecent.java

示例9: ReservoirSnapshot

import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
public ReservoirSnapshot(ReservoirNetwork reservoir, MLDataSet trainingSet,double alpha){
	this.net = reservoir;
	this.dataset = trainingSet;
	this.alpha = alpha;
			
	this.N_STATES = accumVoltages();
	logger.info("STATES: " + N_STATES);
	this.Y = target();
}
 
开发者ID:wil3,项目名称:lacus,代码行数:10,代码来源:ReservoirSnapshot.java

示例10: getTrainingData

import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
public MLDataSet getTrainingData() {
    return trainingData;
}
 
开发者ID:ShifuML,项目名称:guagua,代码行数:4,代码来源:NNWorker.java

示例11: setTrainingData

import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
public void setTrainingData(MLDataSet trainingData) {
    this.trainingData = trainingData;
}
 
开发者ID:ShifuML,项目名称:guagua,代码行数:4,代码来源:NNWorker.java

示例12: getTestingData

import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
public MLDataSet getTestingData() {
    return testingData;
}
 
开发者ID:ShifuML,项目名称:guagua,代码行数:4,代码来源:NNWorker.java

示例13: setTestingData

import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
public void setTestingData(MLDataSet testingData) {
    this.testingData = testingData;
}
 
开发者ID:ShifuML,项目名称:guagua,代码行数:4,代码来源:NNWorker.java

示例14: openAdditional

import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
@Override
public MLDataSet openAdditional() {
    throw new UnsupportedOperationException();
}
 
开发者ID:ShifuML,项目名称:guagua,代码行数:5,代码来源:MemoryDiskMLDataSet.java

示例15: main

import org.encog.ml.data.MLDataSet; //导入依赖的package包/类
/**
     * The main method.
     * @param args No arguments are used.
     */
    public static void main(final String args[]) {

//        // create a neural network, without using a factory
//        BasicNetwork network = new BasicNetwork();
//        network.addLayer(new BasicLayer(null,true,2));
//        network.addLayer(new BasicLayer(new ActivationSigmoid(),true,3));
//        network.addLayer(new BasicLayer(new ActivationSigmoid(),false,1));
//        network.getStructure().finalizeStructure();
//        network.reset();
//
//        // create training data
//        MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
//
//        // train the neural network
//        final ResilientPropagation train = new ResilientPropagation(network, trainingSet);
//
//        int epoch = 1;
//
//        do {
//            train.iteration();
//            System.out.println("Epoch #" + epoch + " Error:" + train.getError());
//            epoch++;
//        } while(train.getError() > 0.01);
//        train.finishTraining();
//
//        // test the neural network
//        System.out.println("Neural Network Results:");
//        for(MLDataPair pair: trainingSet ) {
//            final MLData output = network.compute(pair.getInput());
//            System.out.println(pair.getInput().getData(0) + "," + pair.getInput().getData(1)
//                    + ", actual=" + output.getData(0) + ",ideal=" + pair.getIdeal().getData(0));
//        }
//
//        Encog.getInstance().shutdown();

        MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
        NEATPopulation pop = new NEATPopulation(2,1,1000);
        pop.setInitialConnectionDensity(1.0);// not required, but speeds training
        pop.reset();

        CalculateScore score = new TrainingSetScore(trainingSet);
        // train the neural network

        final EvolutionaryAlgorithm train = NEATUtil.constructNEATTrainer(pop, score);

        do {
            train.iteration();
            System.out.println("Epoch #" + train.getIteration() + " Error:" + train.getError()+ ", Species:" + pop.getSpecies().size());
        } while(train.getError() > 0.01);

        NEATNetwork network = (NEATNetwork)train.getCODEC().decode(train.getBestGenome());

        // test the neural network
        System.out.println("Neural Network Results:");
        EncogUtility.evaluate(network, trainingSet);

        Encog.getInstance().shutdown();

    }
 
开发者ID:robrtj,项目名称:NeuralNetworkImageCompression,代码行数:64,代码来源:XorSample.java


注:本文中的org.encog.ml.data.MLDataSet类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。