本文整理汇总了Java中org.deeplearning4j.nn.multilayer.MultiLayerNetwork.numParams方法的典型用法代码示例。如果您正苦于以下问题:Java MultiLayerNetwork.numParams方法的具体用法?Java MultiLayerNetwork.numParams怎么用?Java MultiLayerNetwork.numParams使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.deeplearning4j.nn.multilayer.MultiLayerNetwork
的用法示例。
在下文中一共展示了MultiLayerNetwork.numParams方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getVaeLayer
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Override
public VariationalAutoencoder getVaeLayer() {
MultiLayerNetwork network =
new MultiLayerNetwork(MultiLayerConfiguration.fromJson((String) jsonConfig.getValue()));
network.init();
INDArray val = ((INDArray) params.value()).unsafeDuplication();
if (val.length() != network.numParams(false))
throw new IllegalStateException(
"Network did not have same number of parameters as the broadcast set parameters");
network.setParameters(val);
Layer l = network.getLayer(0);
if (!(l instanceof VariationalAutoencoder)) {
throw new RuntimeException(
"Cannot use VaeReconstructionProbWithKeyFunction on network that doesn't have a VAE "
+ "layer as layer 0. Layer type: " + l.getClass());
}
return (VariationalAutoencoder) l;
}
示例2: getVaeLayer
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Override
public VariationalAutoencoder getVaeLayer() {
MultiLayerNetwork network =
new MultiLayerNetwork(MultiLayerConfiguration.fromJson((String) jsonConfig.getValue()));
network.init();
INDArray val = ((INDArray) params.value()).unsafeDuplication();
if (val.length() != network.numParams(false))
throw new IllegalStateException(
"Network did not have same number of parameters as the broadcast set parameters");
network.setParameters(val);
Layer l = network.getLayer(0);
if (!(l instanceof VariationalAutoencoder)) {
throw new RuntimeException(
"Cannot use VaeReconstructionErrorWithKeyFunction on network that doesn't have a VAE "
+ "layer as layer 0. Layer type: " + l.getClass());
}
return (VariationalAutoencoder) l;
}
示例3: regressionTestMLP1
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void regressionTestMLP1() throws Exception {
File f = new ClassPathResource("regression_testing/050/050_ModelSerializer_Regression_MLP_1.zip")
.getTempFileFromArchive();
MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
assertEquals(2, conf.getConfs().size());
assertTrue(conf.isBackprop());
assertFalse(conf.isPretrain());
DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
assertEquals("relu", l0.getActivationFn().toString());
assertEquals(3, l0.getNIn());
assertEquals(4, l0.getNOut());
assertEquals(WeightInit.XAVIER, l0.getWeightInit());
assertEquals(new Nesterovs(0.15, 0.9), l0.getIUpdater());
assertEquals(0.15, ((Nesterovs)l0.getIUpdater()).getLearningRate(), 1e-6);
OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
assertEquals("softmax", l1.getActivationFn().toString());
assertTrue(l1.getLossFn() instanceof LossMCXENT);
assertEquals(4, l1.getNIn());
assertEquals(5, l1.getNOut());
assertEquals(WeightInit.XAVIER, l1.getWeightInit());
assertEquals(new Nesterovs(0.15, 0.9), l1.getIUpdater());
assertEquals(0.9, ((Nesterovs)l1.getIUpdater()).getMomentum(), 1e-6);
assertEquals(0.15, ((Nesterovs)l1.getIUpdater()).getLearningRate(), 1e-6);
int numParams = net.numParams();
assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
int updaterSize = (int) new Nesterovs().stateSize(net.numParams());
assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}
示例4: regressionTestMLP1
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void regressionTestMLP1() throws Exception {
File f = new ClassPathResource("regression_testing/071/071_ModelSerializer_Regression_MLP_1.zip")
.getTempFileFromArchive();
MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
assertEquals(2, conf.getConfs().size());
assertTrue(conf.isBackprop());
assertFalse(conf.isPretrain());
DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
assertEquals("relu", l0.getActivationFn().toString());
assertEquals(3, l0.getNIn());
assertEquals(4, l0.getNOut());
assertEquals(WeightInit.XAVIER, l0.getWeightInit());
assertEquals(new Nesterovs(0.15, 0.9), l0.getIUpdater());
assertEquals(0.15, ((Nesterovs)l0.getIUpdater()).getLearningRate(), 1e-6);
OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
assertEquals("softmax", l1.getActivationFn().toString());
assertTrue(l1.getLossFn() instanceof LossMCXENT);
assertEquals(4, l1.getNIn());
assertEquals(5, l1.getNOut());
assertEquals(WeightInit.XAVIER, l1.getWeightInit());
assertEquals(0.9, ((Nesterovs)l1.getIUpdater()).getMomentum(), 1e-6);
assertEquals(0.9, ((Nesterovs)l1.getIUpdater()).getMomentum(), 1e-6);
assertEquals(0.15, ((Nesterovs)l1.getIUpdater()).getLearningRate(), 1e-6);
int numParams = net.numParams();
assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
int updaterSize = (int) new Nesterovs().stateSize(numParams);
assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}
示例5: regressionTestMLP1
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void regressionTestMLP1() throws Exception {
File f = new ClassPathResource("regression_testing/060/060_ModelSerializer_Regression_MLP_1.zip")
.getTempFileFromArchive();
MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
assertEquals(2, conf.getConfs().size());
assertTrue(conf.isBackprop());
assertFalse(conf.isPretrain());
DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
assertEquals("relu", l0.getActivationFn().toString());
assertEquals(3, l0.getNIn());
assertEquals(4, l0.getNOut());
assertEquals(WeightInit.XAVIER, l0.getWeightInit());
assertEquals(new Nesterovs(0.15, 0.9), l0.getIUpdater());
assertEquals(0.15, ((Nesterovs)l0.getIUpdater()).getLearningRate(), 1e-6);
OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
assertEquals("softmax", l1.getActivationFn().toString());
assertTrue(l1.getLossFn() instanceof LossMCXENT);
assertEquals(4, l1.getNIn());
assertEquals(5, l1.getNOut());
assertEquals(WeightInit.XAVIER, l1.getWeightInit());
assertEquals(new Nesterovs(0.15, 0.9), l1.getIUpdater());
assertEquals(0.9, ((Nesterovs)l1.getIUpdater()).getMomentum(), 1e-6);
assertEquals(0.15, ((Nesterovs)l1.getIUpdater()).getLearningRate(), 1e-6);
int numParams = net.numParams();
assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
int updaterSize = (int) new Nesterovs().stateSize(numParams);
assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}
示例6: call
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Override
public Iterable<Tuple2<Integer, Double>> call(Iterator<DataSet> dataSetIterator) throws Exception {
if (!dataSetIterator.hasNext()) {
return Collections.singletonList(new Tuple2<>(0, 0.0));
}
DataSetIterator iter = new IteratorDataSetIterator(dataSetIterator, minibatchSize); //Does batching where appropriate
MultiLayerNetwork network = new MultiLayerNetwork(MultiLayerConfiguration.fromJson(json));
network.init();
INDArray val = params.value().unsafeDuplication(); //.value() object will be shared by all executors on each machine -> OK, as params are not modified by score function
if (val.length() != network.numParams(false))
throw new IllegalStateException(
"Network did not have same number of parameters as the broadcast set parameters");
network.setParameters(val);
List<Tuple2<Integer, Double>> out = new ArrayList<>();
while (iter.hasNext()) {
DataSet ds = iter.next();
double score = network.score(ds, false);
int numExamples = ds.getFeatureMatrix().size(0);
out.add(new Tuple2<>(numExamples, score * numExamples));
}
Nd4j.getExecutioner().commit();
return out;
}
示例7: regressionTestMLP2
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void regressionTestMLP2() throws Exception {
File f = new ClassPathResource("regression_testing/050/050_ModelSerializer_Regression_MLP_2.zip")
.getTempFileFromArchive();
MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
assertEquals(2, conf.getConfs().size());
assertTrue(conf.isBackprop());
assertFalse(conf.isPretrain());
DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
assertTrue(l0.getActivationFn() instanceof ActivationLReLU);
assertEquals(3, l0.getNIn());
assertEquals(4, l0.getNOut());
assertEquals(WeightInit.DISTRIBUTION, l0.getWeightInit());
assertEquals(new NormalDistribution(0.1, 1.2), l0.getDist());
assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater());
assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);
assertEquals(new Dropout(0.6), l0.getIDropout());
assertEquals(0.1, l0.getL1(), 1e-6);
assertEquals(0.2, l0.getL2(), 1e-6);
OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
assertEquals("identity", l1.getActivationFn().toString());
assertTrue(l1.getLossFn() instanceof LossMSE);
assertEquals(4, l1.getNIn());
assertEquals(5, l1.getNOut());
assertEquals(WeightInit.DISTRIBUTION, l0.getWeightInit());
assertEquals(new NormalDistribution(0.1, 1.2), l0.getDist());
assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l1.getIUpdater());
assertEquals(0.15, ((RmsProp)l1.getIUpdater()).getLearningRate(), 1e-6);
assertEquals(new Dropout(0.6), l1.getIDropout());
assertEquals(0.1, l1.getL1(), 1e-6);
assertEquals(0.2, l1.getL2(), 1e-6);
int numParams = net.numParams();
assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
int updaterSize = (int) new RmsProp().stateSize(numParams);
assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}
示例8: regressionTestCNN1
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void regressionTestCNN1() throws Exception {
File f = new ClassPathResource("regression_testing/050/050_ModelSerializer_Regression_CNN_1.zip")
.getTempFileFromArchive();
MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
assertEquals(3, conf.getConfs().size());
assertTrue(conf.isBackprop());
assertFalse(conf.isPretrain());
ConvolutionLayer l0 = (ConvolutionLayer) conf.getConf(0).getLayer();
assertEquals("tanh", l0.getActivationFn().toString());
assertEquals(3, l0.getNIn());
assertEquals(3, l0.getNOut());
assertEquals(WeightInit.RELU, l0.getWeightInit());
assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater());
assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);
assertArrayEquals(new int[] {2, 2}, l0.getKernelSize());
assertArrayEquals(new int[] {1, 1}, l0.getStride());
assertArrayEquals(new int[] {0, 0}, l0.getPadding());
assertEquals(l0.getConvolutionMode(), ConvolutionMode.Truncate); //Pre-0.7.0: no ConvolutionMode. Want to default to truncate here if not set
SubsamplingLayer l1 = (SubsamplingLayer) conf.getConf(1).getLayer();
assertArrayEquals(new int[] {2, 2}, l1.getKernelSize());
assertArrayEquals(new int[] {1, 1}, l1.getStride());
assertArrayEquals(new int[] {0, 0}, l1.getPadding());
assertEquals(PoolingType.MAX, l1.getPoolingType());
assertEquals(l1.getConvolutionMode(), ConvolutionMode.Truncate); //Pre-0.7.0: no ConvolutionMode. Want to default to truncate here if not set
OutputLayer l2 = (OutputLayer) conf.getConf(2).getLayer();
assertEquals("sigmoid", l2.getActivationFn().toString());
assertTrue(l2.getLossFn() instanceof LossNegativeLogLikelihood);
assertEquals(26 * 26 * 3, l2.getNIn());
assertEquals(5, l2.getNOut());
assertEquals(WeightInit.RELU, l0.getWeightInit());
assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater());
assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);
int numParams = net.numParams();
assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
int updaterSize = (int) new RmsProp().stateSize(numParams);
assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}
示例9: regressionTestMLP1
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void regressionTestMLP1() throws Exception {
File f = new ClassPathResource("regression_testing/080/080_ModelSerializer_Regression_MLP_1.zip")
.getTempFileFromArchive();
MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
assertEquals(2, conf.getConfs().size());
assertTrue(conf.isBackprop());
assertFalse(conf.isPretrain());
DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
assertTrue(l0.getActivationFn() instanceof ActivationReLU);
assertEquals(3, l0.getNIn());
assertEquals(4, l0.getNOut());
assertEquals(WeightInit.XAVIER, l0.getWeightInit());
assertTrue(l0.getIUpdater() instanceof Nesterovs);
Nesterovs n = (Nesterovs) l0.getIUpdater();
assertEquals(0.9, n.getMomentum(), 1e-6);
assertEquals(0.15, ((Nesterovs)l0.getIUpdater()).getLearningRate(), 1e-6);
assertEquals(0.15, n.getLearningRate(), 1e-6);
OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
assertTrue(l1.getActivationFn() instanceof ActivationSoftmax);
assertTrue(l1.getLossFn() instanceof LossMCXENT);
assertEquals(4, l1.getNIn());
assertEquals(5, l1.getNOut());
assertEquals(WeightInit.XAVIER, l1.getWeightInit());
assertTrue(l1.getIUpdater() instanceof Nesterovs);
assertEquals(0.9, ((Nesterovs)l1.getIUpdater()).getMomentum(), 1e-6);
assertEquals(0.15, ((Nesterovs)l1.getIUpdater()).getLearningRate(), 1e-6);
assertEquals(0.15, n.getLearningRate(), 1e-6);
int numParams = net.numParams();
assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
int updaterSize = (int) new Nesterovs().stateSize(numParams);
assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}
示例10: regressionTestMLP2
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void regressionTestMLP2() throws Exception {
File f = new ClassPathResource("regression_testing/080/080_ModelSerializer_Regression_MLP_2.zip")
.getTempFileFromArchive();
MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
assertEquals(2, conf.getConfs().size());
assertTrue(conf.isBackprop());
assertFalse(conf.isPretrain());
DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
assertTrue(l0.getActivationFn() instanceof ActivationLReLU);
assertEquals(3, l0.getNIn());
assertEquals(4, l0.getNOut());
assertEquals(WeightInit.DISTRIBUTION, l0.getWeightInit());
assertEquals(new NormalDistribution(0.1, 1.2), l0.getDist());
assertTrue(l0.getIUpdater() instanceof RmsProp);
RmsProp r = (RmsProp) l0.getIUpdater();
assertEquals(0.96, r.getRmsDecay(), 1e-6);
assertEquals(0.15, r.getLearningRate(), 1e-6);
assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);
assertEquals(new Dropout(0.6), l0.getIDropout());
assertEquals(0.1, l0.getL1(), 1e-6);
assertEquals(0.2, l0.getL2(), 1e-6);
assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization());
assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5);
OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
assertTrue(l1.getActivationFn() instanceof ActivationIdentity);
assertTrue(l1.getLossFn() instanceof LossMSE);
assertEquals(4, l1.getNIn());
assertEquals(5, l1.getNOut());
assertEquals(WeightInit.DISTRIBUTION, l1.getWeightInit());
assertEquals(new NormalDistribution(0.1, 1.2), l1.getDist());
assertTrue(l1.getIUpdater() instanceof RmsProp);
r = (RmsProp) l1.getIUpdater();
assertEquals(0.96, r.getRmsDecay(), 1e-6);
assertEquals(0.15, r.getLearningRate(), 1e-6);
assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);
assertEquals(new Dropout(0.6), l1.getIDropout());
assertEquals(0.1, l1.getL1(), 1e-6);
assertEquals(0.2, l1.getL2(), 1e-6);
assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization());
assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5);
int numParams = net.numParams();
assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
int updaterSize = (int) new RmsProp().stateSize(numParams);
assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}
示例11: regressionTestCNN1
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void regressionTestCNN1() throws Exception {
File f = new ClassPathResource("regression_testing/080/080_ModelSerializer_Regression_CNN_1.zip")
.getTempFileFromArchive();
MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
assertEquals(3, conf.getConfs().size());
assertTrue(conf.isBackprop());
assertFalse(conf.isPretrain());
ConvolutionLayer l0 = (ConvolutionLayer) conf.getConf(0).getLayer();
assertTrue(l0.getActivationFn() instanceof ActivationTanH);
assertEquals(3, l0.getNIn());
assertEquals(3, l0.getNOut());
assertEquals(WeightInit.RELU, l0.getWeightInit());
assertTrue(l0.getIUpdater() instanceof RmsProp);
RmsProp r = (RmsProp) l0.getIUpdater();
assertEquals(0.96, r.getRmsDecay(), 1e-6);
assertEquals(0.15, r.getLearningRate(), 1e-6);
assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);
assertArrayEquals(new int[] {2, 2}, l0.getKernelSize());
assertArrayEquals(new int[] {1, 1}, l0.getStride());
assertArrayEquals(new int[] {0, 0}, l0.getPadding());
assertEquals(l0.getConvolutionMode(), ConvolutionMode.Same);
SubsamplingLayer l1 = (SubsamplingLayer) conf.getConf(1).getLayer();
assertArrayEquals(new int[] {2, 2}, l1.getKernelSize());
assertArrayEquals(new int[] {1, 1}, l1.getStride());
assertArrayEquals(new int[] {0, 0}, l1.getPadding());
assertEquals(PoolingType.MAX, l1.getPoolingType());
assertEquals(l1.getConvolutionMode(), ConvolutionMode.Same);
OutputLayer l2 = (OutputLayer) conf.getConf(2).getLayer();
assertTrue(l2.getActivationFn() instanceof ActivationSigmoid);
assertTrue(l2.getLossFn() instanceof LossNegativeLogLikelihood);
assertEquals(26 * 26 * 3, l2.getNIn());
assertEquals(5, l2.getNOut());
assertEquals(WeightInit.RELU, l2.getWeightInit());
assertTrue(l2.getIUpdater() instanceof RmsProp);
r = (RmsProp) l2.getIUpdater();
assertEquals(0.96, r.getRmsDecay(), 1e-6);
assertEquals(0.15, r.getLearningRate(), 1e-6);
assertTrue(conf.getInputPreProcess(2) instanceof CnnToFeedForwardPreProcessor);
int numParams = net.numParams();
assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
int updaterSize = (int) new RmsProp().stateSize(numParams);
assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}
示例12: regressionTestMLP2
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void regressionTestMLP2() throws Exception {
File f = new ClassPathResource("regression_testing/071/071_ModelSerializer_Regression_MLP_2.zip")
.getTempFileFromArchive();
MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
assertEquals(2, conf.getConfs().size());
assertTrue(conf.isBackprop());
assertFalse(conf.isPretrain());
DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
assertTrue(l0.getActivationFn() instanceof ActivationLReLU);
assertEquals(3, l0.getNIn());
assertEquals(4, l0.getNOut());
assertEquals(WeightInit.DISTRIBUTION, l0.getWeightInit());
assertEquals(new NormalDistribution(0.1, 1.2), l0.getDist());
assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater());
assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);
assertEquals(new Dropout(0.6), l0.getIDropout());
assertEquals(0.1, l0.getL1(), 1e-6);
assertEquals(0.2, l0.getL2(), 1e-6);
assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization());
assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5);
OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
assertTrue(l1.getActivationFn() instanceof ActivationIdentity);
assertTrue(l1.getLossFn() instanceof LossMSE);
assertEquals(4, l1.getNIn());
assertEquals(5, l1.getNOut());
assertEquals(WeightInit.DISTRIBUTION, l0.getWeightInit());
assertEquals(new NormalDistribution(0.1, 1.2), l0.getDist());
assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l1.getIUpdater());
assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);
assertEquals(new Dropout(0.6), l1.getIDropout());
assertEquals(0.1, l1.getL1(), 1e-6);
assertEquals(0.2, l1.getL2(), 1e-6);
assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization());
assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5);
int numParams = net.numParams();
assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
int updaterSize = (int) new RmsProp().stateSize(numParams);
assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}
示例13: regressionTestCNN1
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void regressionTestCNN1() throws Exception {
File f = new ClassPathResource("regression_testing/071/071_ModelSerializer_Regression_CNN_1.zip")
.getTempFileFromArchive();
MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
assertEquals(3, conf.getConfs().size());
assertTrue(conf.isBackprop());
assertFalse(conf.isPretrain());
ConvolutionLayer l0 = (ConvolutionLayer) conf.getConf(0).getLayer();
assertEquals("tanh", l0.getActivationFn().toString());
assertEquals(3, l0.getNIn());
assertEquals(3, l0.getNOut());
assertEquals(WeightInit.RELU, l0.getWeightInit());
assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater());
assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);
assertArrayEquals(new int[] {2, 2}, l0.getKernelSize());
assertArrayEquals(new int[] {1, 1}, l0.getStride());
assertArrayEquals(new int[] {0, 0}, l0.getPadding());
assertEquals(l0.getConvolutionMode(), ConvolutionMode.Same);
SubsamplingLayer l1 = (SubsamplingLayer) conf.getConf(1).getLayer();
assertArrayEquals(new int[] {2, 2}, l1.getKernelSize());
assertArrayEquals(new int[] {1, 1}, l1.getStride());
assertArrayEquals(new int[] {0, 0}, l1.getPadding());
assertEquals(PoolingType.MAX, l1.getPoolingType());
assertEquals(l1.getConvolutionMode(), ConvolutionMode.Same);
OutputLayer l2 = (OutputLayer) conf.getConf(2).getLayer();
assertEquals("sigmoid", l2.getActivationFn().toString());
assertTrue(l2.getLossFn() instanceof LossNegativeLogLikelihood); //TODO
assertEquals(26 * 26 * 3, l2.getNIn());
assertEquals(5, l2.getNOut());
assertEquals(WeightInit.RELU, l0.getWeightInit());
assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater());
assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);
assertTrue(conf.getInputPreProcess(2) instanceof CnnToFeedForwardPreProcessor);
int numParams = net.numParams();
assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
int updaterSize = (int) new RmsProp().stateSize(numParams);
assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}
示例14: regressionTestMLP2
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void regressionTestMLP2() throws Exception {
File f = new ClassPathResource("regression_testing/060/060_ModelSerializer_Regression_MLP_2.zip")
.getTempFileFromArchive();
MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
assertEquals(2, conf.getConfs().size());
assertTrue(conf.isBackprop());
assertFalse(conf.isPretrain());
DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
assertTrue(l0.getActivationFn() instanceof ActivationLReLU);
assertEquals(3, l0.getNIn());
assertEquals(4, l0.getNOut());
assertEquals(WeightInit.DISTRIBUTION, l0.getWeightInit());
assertEquals(new NormalDistribution(0.1, 1.2), l0.getDist());
assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater());
assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);
assertEquals(new Dropout(0.6), l0.getIDropout());
assertEquals(0.1, l0.getL1(), 1e-6);
assertEquals(0.2, l0.getL2(), 1e-6);
assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization());
assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5);
OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
assertEquals("identity", l1.getActivationFn().toString());
assertTrue(l1.getLossFn() instanceof LossMSE);
assertEquals(4, l1.getNIn());
assertEquals(5, l1.getNOut());
assertEquals(WeightInit.DISTRIBUTION, l0.getWeightInit());
assertEquals(new NormalDistribution(0.1, 1.2), l0.getDist());
assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l1.getIUpdater());
assertEquals(0.15, ((RmsProp)l1.getIUpdater()).getLearningRate(), 1e-6);
assertEquals(new Dropout(0.6), l1.getIDropout());
assertEquals(0.1, l1.getL1(), 1e-6);
assertEquals(0.2, l1.getL2(), 1e-6);
assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization());
assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5);
int numParams = net.numParams();
assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
int updaterSize = (int) new RmsProp().stateSize(numParams);
assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}
示例15: regressionTestCNN1
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void regressionTestCNN1() throws Exception {
File f = new ClassPathResource("regression_testing/060/060_ModelSerializer_Regression_CNN_1.zip")
.getTempFileFromArchive();
MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
assertEquals(3, conf.getConfs().size());
assertTrue(conf.isBackprop());
assertFalse(conf.isPretrain());
ConvolutionLayer l0 = (ConvolutionLayer) conf.getConf(0).getLayer();
assertEquals("tanh", l0.getActivationFn().toString());
assertEquals(3, l0.getNIn());
assertEquals(3, l0.getNOut());
assertEquals(WeightInit.RELU, l0.getWeightInit());
assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater());
assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);
assertArrayEquals(new int[] {2, 2}, l0.getKernelSize());
assertArrayEquals(new int[] {1, 1}, l0.getStride());
assertArrayEquals(new int[] {0, 0}, l0.getPadding());
assertEquals(l0.getConvolutionMode(), ConvolutionMode.Truncate); //Pre-0.7.0: no ConvolutionMode. Want to default to truncate here if not set
SubsamplingLayer l1 = (SubsamplingLayer) conf.getConf(1).getLayer();
assertArrayEquals(new int[] {2, 2}, l1.getKernelSize());
assertArrayEquals(new int[] {1, 1}, l1.getStride());
assertArrayEquals(new int[] {0, 0}, l1.getPadding());
assertEquals(PoolingType.MAX, l1.getPoolingType());
assertEquals(l1.getConvolutionMode(), ConvolutionMode.Truncate); //Pre-0.7.0: no ConvolutionMode. Want to default to truncate here if not set
OutputLayer l2 = (OutputLayer) conf.getConf(2).getLayer();
assertEquals("sigmoid", l2.getActivationFn().toString());
assertTrue(l2.getLossFn() instanceof LossNegativeLogLikelihood); //TODO
assertEquals(26 * 26 * 3, l2.getNIn());
assertEquals(5, l2.getNOut());
assertEquals(WeightInit.RELU, l0.getWeightInit());
assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater());
assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);
assertTrue(conf.getInputPreProcess(2) instanceof CnnToFeedForwardPreProcessor);
int numParams = net.numParams();
assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
int updaterSize = (int) new RmsProp().stateSize(numParams);
assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}