本文整理汇总了Java中org.deeplearning4j.nn.multilayer.MultiLayerNetwork.getParam方法的典型用法代码示例。如果您正苦于以下问题:Java MultiLayerNetwork.getParam方法的具体用法?Java MultiLayerNetwork.getParam怎么用?Java MultiLayerNetwork.getParam使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.deeplearning4j.nn.multilayer.MultiLayerNetwork
的用法示例。
在下文中一共展示了MultiLayerNetwork.getParam方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testRWInit
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void testRWInit() {
for (boolean rwInit : new boolean[]{false, true}) {
for (int i = 0; i < 3; i++) {
NeuralNetConfiguration.ListBuilder b = new NeuralNetConfiguration.Builder()
.weightInit(new UniformDistribution(0, 1))
.list();
if(rwInit) {
switch (i) {
case 0:
b.layer(new LSTM.Builder().nIn(10).nOut(10)
.weightInitRecurrent(new UniformDistribution(2, 3))
.build());
break;
case 1:
b.layer(new GravesLSTM.Builder().nIn(10).nOut(10)
.weightInitRecurrent(new UniformDistribution(2, 3))
.build());
break;
case 2:
b.layer(new SimpleRnn.Builder().nIn(10).nOut(10)
.weightInitRecurrent(new UniformDistribution(2, 3)).build());
break;
default:
throw new RuntimeException();
}
} else {
switch (i) {
case 0:
b.layer(new LSTM.Builder().nIn(10).nOut(10).build());
break;
case 1:
b.layer(new GravesLSTM.Builder().nIn(10).nOut(10).build());
break;
case 2:
b.layer(new SimpleRnn.Builder().nIn(10).nOut(10).build());
break;
default:
throw new RuntimeException();
}
}
MultiLayerNetwork net = new MultiLayerNetwork(b.build());
net.init();
INDArray rw = net.getParam("0_RW");
double min = rw.minNumber().doubleValue();
double max = rw.maxNumber().doubleValue();
if(rwInit){
assertTrue(String.valueOf(min), min >= 2.0);
assertTrue(String.valueOf(max), max <= 3.0);
} else {
assertTrue(String.valueOf(min), min >= 0.0);
assertTrue(String.valueOf(max), max <= 1.0);
}
}
}
}
示例2: testSimpleRnn
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void testSimpleRnn(){
Nd4j.getRandom().setSeed(12345);
int m = 3;
int nIn = 5;
int layerSize = 6;
int tsLength = 7;
INDArray in = Nd4j.rand(new int[]{m, nIn, tsLength});
// in.get(all(), all(), interval(1,tsLength)).assign(0);
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.updater(new NoOp())
.weightInit(WeightInit.XAVIER)
.activation(Activation.TANH)
.list()
.layer(new SimpleRnn.Builder().nIn(nIn).nOut(layerSize).build())
.build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
INDArray out = net.output(in);
INDArray w = net.getParam("0_W");
INDArray rw = net.getParam("0_RW");
INDArray b = net.getParam("0_b");
INDArray outLast = null;
for( int i=0; i<tsLength; i++ ){
INDArray inCurrent = in.get(all(), all(), point(i));
INDArray outExpCurrent = inCurrent.mmul(w);
if(outLast != null){
outExpCurrent.addi(outLast.mmul(rw));
}
outExpCurrent.addiRowVector(b);
Transforms.tanh(outExpCurrent, false);
INDArray outActCurrent = out.get(all(), all(), point(i));
assertEquals(String.valueOf(i), outExpCurrent, outActCurrent);
outLast = outExpCurrent;
}
TestUtils.testModelSerialization(net);
}
示例3: testLayerRecurrentConstraints
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void testLayerRecurrentConstraints() throws Exception {
LayerConstraint[] constraints = new LayerConstraint[]{
new MaxNormConstraint(0.5, 1),
new MinMaxNormConstraint(0.3, 0.4, 1.0, 1),
new NonNegativeConstraint(),
new UnitNormConstraint(1)
};
for (LayerConstraint lc : constraints) {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.updater(new Sgd(0.0))
.weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 5))
.list()
.layer(new LSTM.Builder().nIn(12).nOut(10)
.constrainRecurrent(lc).build())
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
.build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
LayerConstraint exp = lc.clone();
assertEquals(exp.toString(), net.getLayer(0).conf().getLayer().getConstraints().get(0).toString());
INDArray input = Nd4j.rand(3, 12);
INDArray labels = Nd4j.rand(3, 8);
net.fit(input, labels);
INDArray RW0 = net.getParam("0_RW");
if (lc instanceof MaxNormConstraint) {
assertTrue(RW0.norm2(1).maxNumber().doubleValue() <= 0.5);
} else if (lc instanceof MinMaxNormConstraint) {
assertTrue(RW0.norm2(1).minNumber().doubleValue() >= 0.3);
assertTrue(RW0.norm2(1).maxNumber().doubleValue() <= 0.4);
} else if (lc instanceof NonNegativeConstraint) {
assertTrue(RW0.minNumber().doubleValue() >= 0.0);
} else if (lc instanceof UnitNormConstraint) {
assertEquals(RW0.norm2(1).minNumber().doubleValue(), 1.0, 1e-6);
assertEquals(RW0.norm2(1).maxNumber().doubleValue(), 1.0, 1e-6);
}
TestUtils.testModelSerialization(net);
}
}
示例4: testLayerBiasConstraints
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void testLayerBiasConstraints() throws Exception {
LayerConstraint[] constraints = new LayerConstraint[]{
new MaxNormConstraint(0.5, 1),
new MinMaxNormConstraint(0.3, 0.4, 1.0, 1),
new NonNegativeConstraint(),
new UnitNormConstraint(1)
};
for (LayerConstraint lc : constraints) {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.updater(new Sgd(0.0))
.weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 5))
.biasInit(10.0)
.list()
.layer(new DenseLayer.Builder().nIn(12).nOut(10)
.constrainBias(lc).build())
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
.build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
LayerConstraint exp = lc.clone();
assertEquals(exp.toString(), net.getLayer(0).conf().getLayer().getConstraints().get(0).toString());
INDArray input = Nd4j.rand(3, 12);
INDArray labels = Nd4j.rand(3, 8);
net.fit(input, labels);
INDArray b0 = net.getParam("0_b");
if (lc instanceof MaxNormConstraint) {
assertTrue(b0.norm2(1).maxNumber().doubleValue() <= 0.5);
} else if (lc instanceof MinMaxNormConstraint) {
assertTrue(b0.norm2(1).minNumber().doubleValue() >= 0.3);
assertTrue(b0.norm2(1).maxNumber().doubleValue() <= 0.4);
} else if (lc instanceof NonNegativeConstraint) {
assertTrue(b0.minNumber().doubleValue() >= 0.0);
} else if (lc instanceof UnitNormConstraint) {
assertEquals(b0.norm2(1).minNumber().doubleValue(), 1.0, 1e-6);
assertEquals(b0.norm2(1).maxNumber().doubleValue(), 1.0, 1e-6);
}
TestUtils.testModelSerialization(net);
}
}
示例5: testLayerWeightsConstraints
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void testLayerWeightsConstraints() throws Exception {
LayerConstraint[] constraints = new LayerConstraint[]{
new MaxNormConstraint(0.5, 1),
new MinMaxNormConstraint(0.3, 0.4, 1.0, 1),
new NonNegativeConstraint(),
new UnitNormConstraint(1)
};
for (LayerConstraint lc : constraints) {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.updater(new Sgd(0.0))
.weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 5))
.list()
.layer(new DenseLayer.Builder().nIn(12).nOut(10)
.constrainWeights(lc).build())
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
.build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
LayerConstraint exp = lc.clone();
assertEquals(exp.toString(), net.getLayer(0).conf().getLayer().getConstraints().get(0).toString());
INDArray input = Nd4j.rand(3, 12);
INDArray labels = Nd4j.rand(3, 8);
net.fit(input, labels);
INDArray w0 = net.getParam("0_W");
if (lc instanceof MaxNormConstraint) {
assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.5);
} else if (lc instanceof MinMaxNormConstraint) {
assertTrue(w0.norm2(1).minNumber().doubleValue() >= 0.3);
assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.4);
} else if (lc instanceof NonNegativeConstraint) {
assertTrue(w0.minNumber().doubleValue() >= 0.0);
} else if (lc instanceof UnitNormConstraint) {
assertEquals(w0.norm2(1).minNumber().doubleValue(), 1.0, 1e-6);
assertEquals(w0.norm2(1).maxNumber().doubleValue(), 1.0, 1e-6);
}
TestUtils.testModelSerialization(net);
}
}
示例6: testLayerWeightsAndBiasConstraints
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void testLayerWeightsAndBiasConstraints() throws Exception {
LayerConstraint[] constraints = new LayerConstraint[]{
new MaxNormConstraint(0.5, 1),
new MinMaxNormConstraint(0.3, 0.4, 1.0, 1),
new NonNegativeConstraint(),
new UnitNormConstraint(1)
};
for (LayerConstraint lc : constraints) {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.updater(new Sgd(0.0))
.weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 5))
.biasInit(0.2)
.list()
.layer(new DenseLayer.Builder().nIn(12).nOut(10)
.constrainAllParameters(lc).build())
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
.build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
LayerConstraint exp = lc.clone();
assertEquals(exp.toString(), net.getLayer(0).conf().getLayer().getConstraints().get(0).toString());
INDArray input = Nd4j.rand(3, 12);
INDArray labels = Nd4j.rand(3, 8);
net.fit(input, labels);
INDArray w0 = net.getParam("0_W");
INDArray b0 = net.getParam("0_b");
if (lc instanceof MaxNormConstraint) {
assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.5);
assertTrue(b0.norm2(1).maxNumber().doubleValue() <= 0.5);
} else if (lc instanceof MinMaxNormConstraint) {
assertTrue(w0.norm2(1).minNumber().doubleValue() >= 0.3);
assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.4);
assertTrue(b0.norm2(1).minNumber().doubleValue() >= 0.3);
assertTrue(b0.norm2(1).maxNumber().doubleValue() <= 0.4);
} else if (lc instanceof NonNegativeConstraint) {
assertTrue(w0.minNumber().doubleValue() >= 0.0);
assertTrue(b0.minNumber().doubleValue() >= 0.0);
} else if (lc instanceof UnitNormConstraint) {
assertEquals(w0.norm2(1).minNumber().doubleValue(), 1.0, 1e-6);
assertEquals(w0.norm2(1).maxNumber().doubleValue(), 1.0, 1e-6);
assertEquals(b0.norm2(1).minNumber().doubleValue(), 1.0, 1e-6);
assertEquals(b0.norm2(1).maxNumber().doubleValue(), 1.0, 1e-6);
}
TestUtils.testModelSerialization(net);
}
}
示例7: testLayerWeightsAndBiasSeparateConstraints
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void testLayerWeightsAndBiasSeparateConstraints() throws Exception {
LayerConstraint[] constraints = new LayerConstraint[]{
new MaxNormConstraint(0.5, 1),
new MinMaxNormConstraint(0.3, 0.4, 1.0, 1),
new NonNegativeConstraint(),
new UnitNormConstraint(1)
};
for (LayerConstraint lc : constraints) {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.updater(new Sgd(0.0))
.weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 5))
.biasInit(0.2)
.list()
.layer(new DenseLayer.Builder().nIn(12).nOut(10)
.constrainWeights(lc).constrainBias(lc).build())
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
.build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
LayerConstraint exp = lc.clone();
assertEquals(exp.toString(), net.getLayer(0).conf().getLayer().getConstraints().get(0).toString());
INDArray input = Nd4j.rand(3, 12);
INDArray labels = Nd4j.rand(3, 8);
net.fit(input, labels);
INDArray w0 = net.getParam("0_W");
INDArray b0 = net.getParam("0_b");
if (lc instanceof MaxNormConstraint) {
assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.5);
assertTrue(b0.norm2(1).maxNumber().doubleValue() <= 0.5);
} else if (lc instanceof MinMaxNormConstraint) {
assertTrue(w0.norm2(1).minNumber().doubleValue() >= 0.3);
assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.4);
assertTrue(b0.norm2(1).minNumber().doubleValue() >= 0.3);
assertTrue(b0.norm2(1).maxNumber().doubleValue() <= 0.4);
} else if (lc instanceof NonNegativeConstraint) {
assertTrue(w0.minNumber().doubleValue() >= 0.0);
assertTrue(b0.minNumber().doubleValue() >= 0.0);
} else if (lc instanceof UnitNormConstraint) {
assertEquals(w0.norm2(1).minNumber().doubleValue(), 1.0, 1e-6);
assertEquals(w0.norm2(1).maxNumber().doubleValue(), 1.0, 1e-6);
assertEquals(b0.norm2(1).minNumber().doubleValue(), 1.0, 1e-6);
assertEquals(b0.norm2(1).maxNumber().doubleValue(), 1.0, 1e-6);
}
TestUtils.testModelSerialization(net);
}
}
示例8: testModelConstraints
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void testModelConstraints() throws Exception {
LayerConstraint[] constraints = new LayerConstraint[]{
new MaxNormConstraint(0.5, 1),
new MinMaxNormConstraint(0.3, 0.4, 1.0, 1),
new NonNegativeConstraint(),
new UnitNormConstraint(1)
};
for(LayerConstraint lc : constraints){
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.constrainWeights(lc)
.updater(new Sgd(0.0))
.weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0,5))
.biasInit(1)
.list()
.layer(new DenseLayer.Builder().nIn(12).nOut(10).build())
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
.build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
LayerConstraint exp = lc.clone();
assertEquals(exp.toString(), net.getLayer(0).conf().getLayer().getConstraints().get(0).toString());
assertEquals(exp.toString(), net.getLayer(1).conf().getLayer().getConstraints().get(0).toString());
INDArray input = Nd4j.rand(3, 12);
INDArray labels = Nd4j.rand(3, 8);
net.fit(input, labels);
INDArray w0 = net.getParam("0_W");
INDArray w1 = net.getParam("1_W");
if(lc instanceof MaxNormConstraint){
assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.5 );
assertTrue(w1.norm2(1).maxNumber().doubleValue() <= 0.5 );
} else if(lc instanceof MinMaxNormConstraint){
assertTrue(w0.norm2(1).minNumber().doubleValue() >= 0.3 );
assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.4 );
assertTrue(w1.norm2(1).minNumber().doubleValue() >= 0.3 );
assertTrue(w1.norm2(1).maxNumber().doubleValue() <= 0.4 );
} else if(lc instanceof NonNegativeConstraint ){
assertTrue(w0.minNumber().doubleValue() >= 0.0 );
} else if(lc instanceof UnitNormConstraint ){
assertEquals(w0.norm2(1).minNumber().doubleValue(), 1.0, 1e-6 );
assertEquals(w0.norm2(1).maxNumber().doubleValue(), 1.0, 1e-6 );
assertEquals(w1.norm2(1).minNumber().doubleValue(), 1.0, 1e-6 );
assertEquals(w1.norm2(1).maxNumber().doubleValue(), 1.0, 1e-6 );
}
TestUtils.testModelSerialization(net);
}
}