当前位置: 首页>>代码示例>>Java>>正文


Java MultiLayerNetwork.params方法代码示例

本文整理汇总了Java中org.deeplearning4j.nn.multilayer.MultiLayerNetwork.params方法的典型用法代码示例。如果您正苦于以下问题:Java MultiLayerNetwork.params方法的具体用法?Java MultiLayerNetwork.params怎么用?Java MultiLayerNetwork.params使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.deeplearning4j.nn.multilayer.MultiLayerNetwork的用法示例。


在下文中一共展示了MultiLayerNetwork.params方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testRunIteration

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void testRunIteration() {

    DataSet dataSet = new IrisDataSetIterator(5,5).next();
    List<DataSet> list = dataSet.asList();
    JavaRDD<DataSet> data = sc.parallelize(list);

    SparkDl4jMultiLayer sparkNetCopy = new SparkDl4jMultiLayer(sc,getBasicConf(),new ParameterAveragingTrainingMaster(true,numExecutors(),1,5,1,0));
    MultiLayerNetwork networkCopy = sparkNetCopy.fit(data);

    INDArray expectedParams = networkCopy.params();

    SparkDl4jMultiLayer sparkNet = getBasicNetwork();
    MultiLayerNetwork network = sparkNet.fit(data);
    INDArray actualParams = network.params();

    assertEquals(expectedParams.size(1), actualParams.size(1));
}
 
开发者ID:PacktPublishing,项目名称:Deep-Learning-with-Hadoop,代码行数:19,代码来源:TestSparkMultiLayerParameterAveraging.java

示例2: getFinalResult

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Override
public ParameterAveragingTrainingResult getFinalResult(MultiLayerNetwork network) {
    INDArray updaterState = null;
    if (saveUpdater) {
        Updater u = network.getUpdater();
        if (u != null)
            updaterState = u.getStateViewArray();
    }

    Nd4j.getExecutioner().commit();

    Collection<StorageMetaData> storageMetaData = null;
    Collection<Persistable> listenerStaticInfo = null;
    Collection<Persistable> listenerUpdates = null;
    if (listenerRouterProvider != null) {
        StatsStorageRouter r = listenerRouterProvider.getRouter();
        if (r instanceof VanillaStatsStorageRouter) { //TODO this is ugly... need to find a better solution
            VanillaStatsStorageRouter ssr = (VanillaStatsStorageRouter) r;
            storageMetaData = ssr.getStorageMetaData();
            listenerStaticInfo = ssr.getStaticInfo();
            listenerUpdates = ssr.getUpdates();
        }
    }
    return new ParameterAveragingTrainingResult(network.params(), updaterState, network.score(), storageMetaData,
                    listenerStaticInfo, listenerUpdates);
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:27,代码来源:ParameterAveragingTrainingWorker.java

示例3: testRunIteration

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void testRunIteration() {

    DataSet dataSet = new IrisDataSetIterator(5, 5).next();
    List<DataSet> list = dataSet.asList();
    JavaRDD<DataSet> data = sc.parallelize(list);

    SparkDl4jMultiLayer sparkNetCopy = new SparkDl4jMultiLayer(sc, getBasicConf(),
                    new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 5, 1, 0));
    MultiLayerNetwork networkCopy = sparkNetCopy.fit(data);

    INDArray expectedParams = networkCopy.params();

    SparkDl4jMultiLayer sparkNet = getBasicNetwork();
    MultiLayerNetwork network = sparkNet.fit(data);
    INDArray actualParams = network.params();

    assertEquals(expectedParams.size(1), actualParams.size(1));
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:20,代码来源:TestSparkMultiLayerParameterAveraging.java

示例4: testIrisFit

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void testIrisFit() {

    ComputationGraphConfiguration configuration = getIrisGraphConfiguration();
    ComputationGraph graph = new ComputationGraph(configuration);
    graph.init();

    MultiLayerConfiguration mlnConfig = getIrisMLNConfiguration();
    MultiLayerNetwork net = new MultiLayerNetwork(mlnConfig);
    net.init();

    Nd4j.getRandom().setSeed(12345);
    int nParams = getNumParams();
    INDArray params = Nd4j.rand(1, nParams);

    graph.setParams(params.dup());
    net.setParams(params.dup());


    DataSetIterator iris = new IrisDataSetIterator(75, 150);

    net.fit(iris);
    iris.reset();

    graph.fit(iris);

    //Check that parameters are equal for both models after fitting:
    INDArray paramsMLN = net.params();
    INDArray paramsGraph = graph.params();

    assertNotEquals(params, paramsGraph);
    assertEquals(paramsMLN, paramsGraph);
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:34,代码来源:TestComputationGraphNetwork.java

示例5: testMLN

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void testMLN() {
    DataSet randomData = new DataSet(Nd4j.rand(10, 4), Nd4j.rand(10, 3));

    NeuralNetConfiguration.Builder overallConf = new NeuralNetConfiguration.Builder().updater(new Sgd(0.1))
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .activation(Activation.IDENTITY);

    MultiLayerNetwork modelToFineTune = new MultiLayerNetwork(overallConf.clone().list()
                    .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).build())
                    .layer(1, new DenseLayer.Builder().nIn(3).nOut(2).build())
                    .layer(2, new DenseLayer.Builder().nIn(2).nOut(3).build())
                    .layer(3, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                    LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3)
                                                    .build())
                    .build());

    modelToFineTune.init();
    MultiLayerNetwork modelNow = new TransferLearning.Builder(modelToFineTune).setFeatureExtractor(1).build();
    List<INDArray> ff = modelToFineTune.feedForwardToLayer(2, randomData.getFeatures(), false);
    INDArray asFrozenFeatures = ff.get(2);

    TransferLearningHelper helper = new TransferLearningHelper(modelToFineTune, 1);

    INDArray paramsLastTwoLayers =
                    Nd4j.hstack(modelToFineTune.getLayer(2).params(), modelToFineTune.getLayer(3).params());
    MultiLayerNetwork notFrozen = new MultiLayerNetwork(overallConf.clone().list()
                    .layer(0, new DenseLayer.Builder().nIn(2).nOut(3).build())
                    .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                    LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3)
                                                    .build())
                    .build(), paramsLastTwoLayers);

    assertEquals(asFrozenFeatures, helper.featurize(randomData).getFeatures());
    assertEquals(randomData.getLabels(), helper.featurize(randomData).getLabels());

    for (int i = 0; i < 5; i++) {
        notFrozen.fit(new DataSet(asFrozenFeatures, randomData.getLabels()));
        helper.fitFeaturized(helper.featurize(randomData));
        modelNow.fit(randomData);
    }

    INDArray expected = Nd4j.hstack(modelToFineTune.getLayer(0).params(), modelToFineTune.getLayer(1).params(),
                    notFrozen.params());
    INDArray act = modelNow.params();
    assertEquals(expected, act);
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:48,代码来源:TransferLearningHelperTest.java

示例6: simpleFineTune

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void simpleFineTune() {

    long rng = 12345L;
    DataSet randomData = new DataSet(Nd4j.rand(10, 4), Nd4j.rand(10, 3));
    //original conf
    NeuralNetConfiguration.Builder confToChange =
                    new NeuralNetConfiguration.Builder().seed(rng).optimizationAlgo(OptimizationAlgorithm.LBFGS)
                                    .updater(new Nesterovs(0.01, 0.99));

    MultiLayerNetwork modelToFineTune = new MultiLayerNetwork(confToChange.list()
                    .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).build())
                    .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                    LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3)
                                                    .build())
                    .build());
    modelToFineTune.init();

    //model after applying changes with transfer learning
    MultiLayerNetwork modelNow =
            new TransferLearning.Builder(modelToFineTune)
                    .fineTuneConfiguration(new FineTuneConfiguration.Builder().seed(rng)
                            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                            .updater(new RmsProp(0.5)) //Intent: override both weight and bias LR, unless bias LR is manually set also
                            .l2(0.4).build())
                    .build();

    for (org.deeplearning4j.nn.api.Layer l : modelNow.getLayers()) {
        BaseLayer bl = ((BaseLayer) l.conf().getLayer());
        assertEquals(new RmsProp(0.5), bl.getIUpdater());
    }


    NeuralNetConfiguration.Builder confSet = new NeuralNetConfiguration.Builder().seed(rng)
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new RmsProp(0.5)).l2(0.4);

    MultiLayerNetwork expectedModel = new MultiLayerNetwork(confSet.list()
                    .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).build())
                    .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                    LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3)
                                                    .build())
                    .build());
    expectedModel.init();
    expectedModel.setParams(modelToFineTune.params().dup());

    assertEquals(expectedModel.params(), modelNow.params());

    //Check json
    MultiLayerConfiguration expectedConf = expectedModel.getLayerWiseConfigurations();
    assertEquals(expectedConf.toJson(), modelNow.getLayerWiseConfigurations().toJson());

    //Check params after fit
    modelNow.fit(randomData);
    expectedModel.fit(randomData);

    assertEquals(modelNow.score(), expectedModel.score(), 1e-6);
    INDArray pExp = expectedModel.params();
    INDArray pNow = modelNow.params();
    assertEquals(pExp, pNow);
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:62,代码来源:TransferLearningMLNTest.java

示例7: testFrozen

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void testFrozen() {
    DataSet randomData = new DataSet(Nd4j.rand(10, 4), Nd4j.rand(10, 3));

    NeuralNetConfiguration.Builder overallConf = new NeuralNetConfiguration.Builder().updater(new Sgd(0.1))
                    .activation(Activation.IDENTITY);

    FineTuneConfiguration finetune = new FineTuneConfiguration.Builder().updater(new Sgd(0.1)).build();

    MultiLayerNetwork modelToFineTune = new MultiLayerNetwork(overallConf.clone().list()
                    .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).build())
                    .layer(1, new DenseLayer.Builder().nIn(3).nOut(2).build())
                    .layer(2, new DenseLayer.Builder().nIn(2).nOut(3).build())
                    .layer(3, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                    LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3)
                                                    .build())
                    .build());

    modelToFineTune.init();
    List<INDArray> ff = modelToFineTune.feedForwardToLayer(2, randomData.getFeatures(), false);
    INDArray asFrozenFeatures = ff.get(2);

    MultiLayerNetwork modelNow = new TransferLearning.Builder(modelToFineTune).fineTuneConfiguration(finetune)
                    .setFeatureExtractor(1).build();

    INDArray paramsLastTwoLayers =
                    Nd4j.hstack(modelToFineTune.getLayer(2).params(), modelToFineTune.getLayer(3).params());
    MultiLayerNetwork notFrozen = new MultiLayerNetwork(overallConf.clone().list()
                    .layer(0, new DenseLayer.Builder().nIn(2).nOut(3).build())
                    .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                    LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3)
                                                    .build())
                    .build(), paramsLastTwoLayers);

    //        assertEquals(modelNow.getLayer(2).conf(), notFrozen.getLayer(0).conf());  //Equal, other than names
    //        assertEquals(modelNow.getLayer(3).conf(), notFrozen.getLayer(1).conf());  //Equal, other than names

    //Check: forward pass
    INDArray outNow = modelNow.output(randomData.getFeatures());
    INDArray outNotFrozen = notFrozen.output(asFrozenFeatures);
    assertEquals(outNow, outNotFrozen);

    for (int i = 0; i < 5; i++) {
        notFrozen.fit(new DataSet(asFrozenFeatures, randomData.getLabels()));
        modelNow.fit(randomData);
    }

    INDArray expected = Nd4j.hstack(modelToFineTune.getLayer(0).params(), modelToFineTune.getLayer(1).params(),
                    notFrozen.params());
    INDArray act = modelNow.params();
    assertEquals(expected, act);
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:53,代码来源:FrozenLayerTest.java

示例8: testGetSetParams

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void testGetSetParams() {

    MultiLayerNetwork net = getCNNMLNConfig(true, false);

    INDArray paramsOrig = net.params().dup();
    net.setParams(paramsOrig);

    INDArray params2 = net.params();

    assertEquals(paramsOrig, params2);
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:13,代码来源:ConvolutionLayerTest.java


注:本文中的org.deeplearning4j.nn.multilayer.MultiLayerNetwork.params方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。