本文整理汇总了Java中org.encog.neural.networks.BasicNetwork.reset方法的典型用法代码示例。如果您正苦于以下问题:Java BasicNetwork.reset方法的具体用法?Java BasicNetwork.reset怎么用?Java BasicNetwork.reset使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.encog.neural.networks.BasicNetwork
的用法示例。
在下文中一共展示了BasicNetwork.reset方法的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: createNeuralNetwork
import org.encog.neural.networks.BasicNetwork; //导入方法依赖的package包/类
private BasicNetwork createNeuralNetwork() {
BasicNetwork network = new BasicNetwork();
// input layer
network.addLayer(new BasicLayer(null, true, inputLayerSize));
// hidden layer
network.addLayer(new BasicLayer(new ActivationSigmoid(), true, inputLayerSize / 6));
network.addLayer(new BasicLayer(new ActivationSigmoid(), true, inputLayerSize / 6 / 4));
// output layer
network.addLayer(new BasicLayer(new ActivationSigmoid(), false, outputLayerSize));
network.getStructure().finalizeStructure();
network.reset();
return network;
}
示例2: main
import org.encog.neural.networks.BasicNetwork; //导入方法依赖的package包/类
/**
* The main method.
* @param args No arguments are used.
*/
public static void main(final String args[]) {
// create a neural network, without using a factory
BasicNetwork network = new BasicNetwork();
network.addLayer(new BasicLayer(null,true,2));
network.addLayer(new BasicLayer(new ActivationSigmoid(),true,3));
network.addLayer(new BasicLayer(new ActivationSigmoid(),false,1));
network.getStructure().finalizeStructure();
network.reset();
// create training data
MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
// train the neural network
final ResilientPropagation train = new ResilientPropagation(network, trainingSet);
int epoch = 1;
do {
train.iteration();
System.out.println("Epoch #" + epoch + " Error:" + train.getError());
epoch++;
} while(train.getError() > 0.01);
train.finishTraining();
// test the neural network
System.out.println("Neural Network Results:");
for(MLDataPair pair: trainingSet ) {
final MLData output = network.compute(pair.getInput());
System.out.println(pair.getInput().getData(0) + "," + pair.getInput().getData(1)
+ ", actual=" + output.getData(0) + ",ideal=" + pair.getIdeal().getData(0));
}
Encog.getInstance().shutdown();
}
示例3: generateAgent
import org.encog.neural.networks.BasicNetwork; //导入方法依赖的package包/类
public static NeuralAgent generateAgent(EnvironmentMap map)
{
BasicNetwork network = new BasicNetwork();
network.addLayer(new BasicLayer(NeuralConstants.INPUT_NEURON_COUNT));
network.addLayer(new BasicLayer(60));
network.addLayer(new BasicLayer(30));
network.addLayer(new BasicLayer(NeuralConstants.OUTPUT_NEURON_COUNT));
network.getStructure().finalizeStructure();
network.reset();
NeuralAgent agent = new NeuralAgent(network, map);
return agent;
}
示例4: getNetwork
import org.encog.neural.networks.BasicNetwork; //导入方法依赖的package包/类
private BasicNetwork getNetwork() {
BasicNetwork network = new BasicNetwork();
network.addLayer(new BasicLayer(new ActivationSigmoid(), true, INPUTS.length));
network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 25));
network.addLayer(new BasicLayer(new ActivationSigmoid(), true, OUTPUTS.length));
network.getStructure().finalizeStructure();
network.reset();
return network;
}
示例5: BackPropagationNeuralNet
import org.encog.neural.networks.BasicNetwork; //导入方法依赖的package包/类
/** Neural Network structure initialization */
public BackPropagationNeuralNet() {
iterations = new ArrayList<>();
errors = new ArrayList<>();
network = new BasicNetwork();
network.addLayer(new BasicLayer(null, true, 4));
network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 10));
network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
network.getStructure().finalizeStructure();
network.reset();
new ConsistentRandomizer(-1, 1, 500).randomize(network);
}
示例6: ResilientPropagationNeuralNet
import org.encog.neural.networks.BasicNetwork; //导入方法依赖的package包/类
/** Neural Network structure initialization */
public ResilientPropagationNeuralNet() {
iterations = new ArrayList<>();
errors = new ArrayList<>();
network = new BasicNetwork();
network.addLayer(new BasicLayer(null, true, 4));
network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 10));
network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
network.getStructure().finalizeStructure();
network.reset();
new ConsistentRandomizer(-1, 1, 500).randomize(network);
}
示例7: generateNetwork
import org.encog.neural.networks.BasicNetwork; //导入方法依赖的package包/类
/**
* Generate basic NN network object
*/
public static BasicNetwork generateNetwork(int in, int hidden, int out) {
final BasicNetwork network = new BasicNetwork();
network.addLayer(new BasicLayer(new ActivationLinear(), true, in));
network.addLayer(new BasicLayer(new ActivationSigmoid(), true, hidden));
network.addLayer(new BasicLayer(new ActivationSigmoid(), false, out));
network.getStructure().finalizeStructure();
network.reset();
return network;
}
示例8: main
import org.encog.neural.networks.BasicNetwork; //导入方法依赖的package包/类
/**
* The main method.
* @param args No arguments are used.
*/
public static void main(final String args[]) {
// create a neural network, without using a factory
BasicNetwork network = new BasicNetwork();
network.addLayer(new BasicLayer(null,true,2));
network.addLayer(new BasicLayer(new ActivationSigmoid(),true,3));
network.addLayer(new BasicLayer(new ActivationSigmoid(),false,1));
network.getStructure().finalizeStructure();
network.reset();
// create training data
MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
// train the neural network
final ResilientPropagation train = new ResilientPropagation(network, trainingSet);
int epoch = 1;
do {
train.iteration();
System.out.println("Epoch #" + epoch + " Error:" + train.getError());
epoch++;
} while(train.getError() > 0.01);
train.finishTraining();
// test the neural network
System.out.println("Neural Network Results:");
for(MLDataPair pair: trainingSet ) {
final MLData output = network.compute(pair.getInput());
System.out.println(pair.getInput().getData(0) + "," + pair.getInput().getData(1)
+ ", actual=" + output.getData(0) + ",ideal=" + pair.getIdeal().getData(0));
}
Encog.getInstance().shutdown();
}
示例9: buildNetwork
import org.encog.neural.networks.BasicNetwork; //导入方法依赖的package包/类
private BasicNetwork buildNetwork(NeuralDataSet trainingSet, int hidden, int size, Class<?> activation) throws InstantiationException, IllegalAccessException {
BasicNetwork network = new BasicNetwork();
this.currentNoOfHiddenNode = hidden;
network.addLayer(new BasicLayer((ActivationFunction)activation.newInstance(), true, size));
network.addLayer(new BasicLayer((ActivationFunction)activation.newInstance(), true, hidden));
network.addLayer(new BasicLayer((ActivationFunction)activation.newInstance(), true, 1));
network.getStructure().finalizeStructure();
network.reset();
return network;
}
示例10: getNetwork
import org.encog.neural.networks.BasicNetwork; //导入方法依赖的package包/类
private BasicNetwork getNetwork(ExampleSet exampleSet) throws OperatorException {
BasicNetwork network = new BasicNetwork();
// input layer
network.addLayer(new FeedforwardLayer(exampleSet.getAttributes().size()));
// hidden layers
log("No hidden layers defined. Using default hidden layers.");
int layerSize = getParameterAsInt(PARAMETER_DEFAULT_HIDDEN_LAYER_SIZE);
if (layerSize <= 0)
layerSize = getDefaultLayerSize(exampleSet);
for (int p = 0; p < getParameterAsInt(PARAMETER_DEFAULT_NUMBER_OF_HIDDEN_LAYERS); p++) {
network.addLayer(new FeedforwardLayer(layerSize));
}
// output layer
if (exampleSet.getAttributes().getLabel().isNominal()) {
network.addLayer(new FeedforwardLayer(new ActivationSigmoid(), 1));
} else {
network.addLayer(new FeedforwardLayer(new ActivationLinear(), 1));
}
network.reset(RandomGenerator.getRandomGenerator(getParameterAsBoolean(RandomGenerator.PARAMETER_USE_LOCAL_RANDOM_SEED), getParameterAsInt(RandomGenerator.PARAMETER_LOCAL_RANDOM_SEED)));
return network;
}
示例11: test
import org.encog.neural.networks.BasicNetwork; //导入方法依赖的package包/类
public static void test(double[][] inputValues, double[][] outputValues)
{
NeuralDataSet trainingSet = new BasicNeuralDataSet(inputValues, outputValues);
BasicNetwork network = new BasicNetwork();
network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 4));
network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 1000));
network.addLayer(new BasicLayer(new ActivationLinear(), false, 1));
network.getStructure().finalizeStructure();
network.reset();
final Train train = new ResilientPropagation(network, trainingSet);
int epoch = 1;
do
{
train.iteration();
System.out.println("Epoch #" + epoch + " Error:" + train.getError());
epoch++;
}
while(epoch < 10000);
System.out.println("Neural Network Results:");
for(MLDataPair pair : trainingSet)
{
final MLData output = network.compute(pair.getInput());
System.out.println(pair.getInput().getData(0) + "," + pair.getInput().getData(1) + ", actual="
+ output.getData(0) + ",ideal=" + pair.getIdeal().getData(0));
}
}
示例12: addLayer
import org.encog.neural.networks.BasicNetwork; //导入方法依赖的package包/类
public void addLayer(ActivationFunction func, int nodes){
if(params.size() > 0) {
buildNetwork();
transformData();
} else {
intermediateDataset = this.dataset;
}
network = new BasicNetwork();
network.addLayer(new BasicLayer(new ActivationLinear(), true, intermediateDataset.getInputSize()));
network.addLayer(new BasicLayer(func, true, nodes));
network.addLayer(new BasicLayer(new ActivationTANH(), false, intermediateDataset.getIdealSize()));
network.getStructure().finalizeStructure();
network.reset();
train(nodes);
}
示例13: newNetwork
import org.encog.neural.networks.BasicNetwork; //导入方法依赖的package包/类
/**
* Create new artificial neural network.
*
* @param inputSize
* Size of the input layer.
* @param hiddenSize
* Size of the hidden layer.
* @param outputSize
* Size of the output layer.
*
* @return Neural network created object.
*/
public static BasicNetwork newNetwork(int inputSize, int hiddenSize, int outputSize) {
BasicNetwork net = new BasicNetwork();
net.addLayer(new BasicLayer(null, true, inputSize));
net.addLayer(new BasicLayer(new ActivationSigmoid(), true, hiddenSize));
net.addLayer(new BasicLayer(new ActivationSigmoid(), false, outputSize));
net.getStructure().finalizeStructure();
net.reset();
return net;
}