本文整理汇总了Java中org.deeplearning4j.nn.api.OptimizationAlgorithm.CONJUGATE_GRADIENT属性的典型用法代码示例。如果您正苦于以下问题:Java OptimizationAlgorithm.CONJUGATE_GRADIENT属性的具体用法?Java OptimizationAlgorithm.CONJUGATE_GRADIENT怎么用?Java OptimizationAlgorithm.CONJUGATE_GRADIENT使用的例子?那么, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在类org.deeplearning4j.nn.api.OptimizationAlgorithm
的用法示例。
在下文中一共展示了OptimizationAlgorithm.CONJUGATE_GRADIENT属性的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testOptimizationAlgorithmsSearchBasic
@Test
public void testOptimizationAlgorithmsSearchBasic() {
DataSetIterator iter = new IrisDataSetIterator(1, 1);
OptimizationAlgorithm[] oas = new OptimizationAlgorithm[]{OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT,
OptimizationAlgorithm.LINE_GRADIENT_DESCENT, OptimizationAlgorithm.CONJUGATE_GRADIENT,
OptimizationAlgorithm.LBFGS};
for (OptimizationAlgorithm oa : oas) {
System.out.println(oa);
ComputationGraphConfiguration conf =
new NeuralNetConfiguration.Builder().optimizationAlgo(oa).graphBuilder()
.addInputs("input")
.addLayer("first", new DenseLayer.Builder().nIn(4).nOut(5).build(), "input")
.addLayer("output", new OutputLayer.Builder().nIn(5).nOut(3).build(),
"first")
.setOutputs("output").pretrain(false).backprop(true).build();
ComputationGraph net = new ComputationGraph(conf);
net.init();
net.fit(iter.next());
}
}
示例2: testOptimizersBasicMLPBackprop
@Test
public void testOptimizersBasicMLPBackprop() {
//Basic tests of the 'does it throw an exception' variety.
DataSetIterator iter = new IrisDataSetIterator(5, 50);
OptimizationAlgorithm[] toTest =
{OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT, OptimizationAlgorithm.LINE_GRADIENT_DESCENT,
OptimizationAlgorithm.CONJUGATE_GRADIENT, OptimizationAlgorithm.LBFGS
//OptimizationAlgorithm.HESSIAN_FREE //Known to not work
};
for (OptimizationAlgorithm oa : toTest) {
MultiLayerNetwork network = new MultiLayerNetwork(getMLPConfigIris(oa));
network.init();
iter.reset();
network.fit(iter);
}
}
示例3: testOptimizersMLP
@Test
public void testOptimizersMLP() {
//Check that the score actually decreases over time
DataSetIterator iter = new IrisDataSetIterator(150, 150);
OptimizationAlgorithm[] toTest =
{OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT, OptimizationAlgorithm.LINE_GRADIENT_DESCENT,
OptimizationAlgorithm.CONJUGATE_GRADIENT, OptimizationAlgorithm.LBFGS
//OptimizationAlgorithm.HESSIAN_FREE //Known to not work
};
DataSet ds = iter.next();
ds.normalizeZeroMeanZeroUnitVariance();
for (OptimizationAlgorithm oa : toTest) {
int nIter = 10;
MultiLayerNetwork network = new MultiLayerNetwork(getMLPConfigIris(oa));
network.init();
double score = network.score(ds);
assertTrue(score != 0.0 && !Double.isNaN(score));
if (PRINT_OPT_RESULTS)
System.out.println("testOptimizersMLP() - " + oa);
int nCallsToOptimizer = 30;
double[] scores = new double[nCallsToOptimizer + 1];
scores[0] = score;
for (int i = 0; i < nCallsToOptimizer; i++) {
for( int j=0; j<nIter; j++ ) {
network.fit(ds);
}
double scoreAfter = network.score(ds);
scores[i + 1] = scoreAfter;
assertTrue("Score is NaN after optimization", !Double.isNaN(scoreAfter));
assertTrue("OA= " + oa + ", before= " + score + ", after= " + scoreAfter, scoreAfter <= score);
score = scoreAfter;
}
if (PRINT_OPT_RESULTS)
System.out.println(oa + " - " + Arrays.toString(scores));
}
}