本文整理汇总了Java中org.encog.engine.network.activation.ActivationFunction类的典型用法代码示例。如果您正苦于以下问题:Java ActivationFunction类的具体用法?Java ActivationFunction怎么用?Java ActivationFunction使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
ActivationFunction类属于org.encog.engine.network.activation包,在下文中一共展示了ActivationFunction类的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: processLevel
import org.encog.engine.network.activation.ActivationFunction; //导入依赖的package包/类
/**
* Process one level.
*
* @param currentLevel
* The level.
*/
private void processLevel(final int currentLevel) {
final int fromLayerIndex = this.layerIndex[currentLevel + 1];
final int toLayerIndex = this.layerIndex[currentLevel];
final int fromLayerSize = this.layerCounts[currentLevel + 1];
final int toLayerSize = this.layerFeedCounts[currentLevel];
final int index = this.weightIndex[currentLevel];
final ActivationFunction activation = this.getNetwork().getActivationFunctions()[currentLevel + 1];
final double currentFlatSpot = this.flatSpot[currentLevel + 1];
// handle weights
int yi = fromLayerIndex;
for(int y = 0; y < fromLayerSize; y++) {
final double output = this.layerOutput[yi];
double sum = 0;
int xi = toLayerIndex;
int wi = index + y;
for(int x = 0; x < toLayerSize; x++) {
this.gradients[wi] += output * this.getLayerDelta()[xi];
sum += this.weights[wi] * this.getLayerDelta()[xi];
wi += fromLayerSize;
xi++;
}
this.getLayerDelta()[yi] = sum
* (activation.derivativeFunction(this.layerSums[yi], this.layerOutput[yi]) + currentFlatSpot);
yi++;
}
}
示例2: MLParams
import org.encog.engine.network.activation.ActivationFunction; //导入依赖的package包/类
public MLParams(double[] weights, ActivationFunction func, int nodes){
this.weights = weights;
this.func = func;
this.nodes = nodes;
}
示例3: setFunc
import org.encog.engine.network.activation.ActivationFunction; //导入依赖的package包/类
public void setFunc(ActivationFunction func){
this.func = func;
}
示例4: addLayer
import org.encog.engine.network.activation.ActivationFunction; //导入依赖的package包/类
public void addLayer(ActivationFunction func, int nodes){
if(params.size() > 0) {
buildNetwork();
transformData();
} else {
intermediateDataset = this.dataset;
}
network = new BasicNetwork();
network.addLayer(new BasicLayer(new ActivationLinear(), true, intermediateDataset.getInputSize()));
network.addLayer(new BasicLayer(func, true, nodes));
network.addLayer(new BasicLayer(new ActivationTANH(), false, intermediateDataset.getIdealSize()));
network.getStructure().finalizeStructure();
network.reset();
train(nodes);
}
示例5: train
import org.encog.engine.network.activation.ActivationFunction; //导入依赖的package包/类
public void train(int nodes) {
Propagation propagation = new QuickPropagation(network, intermediateDataset, 0.01);
propagation.setThreadCount(0);
//for(int i = 0 ; i < 100; i ++) {
propagation.iteration();
System.out.println( "In deep layer: " + params.size() + " Training error " + propagation.getError());
//}
int fromNodes = network.getInputCount() + 1;
int toNodes = network.getLayerNeuronCount(1); //the next layer
int numWeight = fromNodes * toNodes;
double[] weights = new double[numWeight];
int k = 0;
for(int i = 0 ; i < fromNodes; i ++ ){
for(int j = 0 ; j < toNodes; j ++) {
//FIXME, bug
weights[k++] = network.getWeight(0, i, j);
}
}
ActivationFunction func = network.getActivation(1);
MLParams param = new MLParams(weights, func, nodes);
params.add(param);
System.out.println("Add weight: " + weights.length + "\n and the activation function: " + func.toString());
}