当前位置: 首页>>代码示例>>Java>>正文


Java MultiLayerNetwork.setListeners方法代码示例

本文整理汇总了Java中org.deeplearning4j.nn.multilayer.MultiLayerNetwork.setListeners方法的典型用法代码示例。如果您正苦于以下问题:Java MultiLayerNetwork.setListeners方法的具体用法?Java MultiLayerNetwork.setListeners怎么用?Java MultiLayerNetwork.setListeners使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.deeplearning4j.nn.multilayer.MultiLayerNetwork的用法示例。


在下文中一共展示了MultiLayerNetwork.setListeners方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: createNetwork

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
private MultiLayerNetwork createNetwork(int numLabels) {
  MultiLayerNetwork network = null;
  switch (modelType) {
    case "LeNet":
      network = lenetModel(numLabels);
      break;
    case "AlexNet":
      network = alexnetModel(numLabels);
      break;
    case "custom":
      network = customModel(numLabels);
      break;
    default:
      throw new InvalidInputTypeException("Incorrect model provided.");
  }
  network.init();
  network.setListeners(new ScoreIterationListener(listenerFreq));
  return network;
}
 
开发者ID:MyRobotLab,项目名称:myrobotlab,代码行数:20,代码来源:Deeplearning4j.java

示例2: test

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void test() throws IOException {

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list()
                    .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
                    .layer(1, new OutputLayer.Builder().nIn(10).nOut(10).build()).build();

    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();


    MultiLayerNetwork net2 =
                    new TransferLearning.Builder(net)
                                    .fineTuneConfiguration(
                                                    new FineTuneConfiguration.Builder().updater(new Sgd(0.01)).build())
                                    .setFeatureExtractor(0).build();

    File f = Files.createTempFile("dl4jTestTransferStatsCollection", "bin").toFile();
    f.delete();
    net2.setListeners(new StatsListener(new FileStatsStorage(f)));

    //Previosuly: failed on frozen layers
    net2.fit(new DataSet(Nd4j.rand(8, 10), Nd4j.rand(8, 10)));

    f.deleteOnExit();
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:27,代码来源:TestTransferStatsCollection.java

示例3: testListenersViaModel

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void testListenersViaModel() {
    TestListener.clearCounts();

    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().list().layer(0,
                    new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(10).nOut(10)
                                    .activation(Activation.TANH).build());

    MultiLayerConfiguration conf = builder.build();
    MultiLayerNetwork model = new MultiLayerNetwork(conf);
    model.init();

    StatsStorage ss = new InMemoryStatsStorage();
    model.setListeners(new TestListener(), new StatsListener(ss));

    testListenersForModel(model, null);

    assertEquals(1, ss.listSessionIDs().size());
    assertEquals(2, ss.listWorkerIDsForSession(ss.listSessionIDs().get(0)).size());
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:21,代码来源:TestListeners.java

示例4: testIterationListener

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void testIterationListener() {
    MultiLayerNetwork model1 = new MultiLayerNetwork(getConf());
    model1.init();
    model1.setListeners(Collections.singletonList((IterationListener) new ScoreIterationListener(1)));

    MultiLayerNetwork model2 = new MultiLayerNetwork(getConf());
    model2.setListeners(Collections.singletonList((IterationListener) new ScoreIterationListener(1)));
    model2.init();

    Layer[] l1 = model1.getLayers();
    for (int i = 0; i < l1.length; i++)
        assertTrue(l1[i].getListeners() != null && l1[i].getListeners().size() == 1);

    Layer[] l2 = model2.getLayers();
    for (int i = 0; i < l2.length; i++)
        assertTrue(l2[i].getListeners() != null && l2[i].getListeners().size() == 1);
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:19,代码来源:MultiLayerNeuralNetConfigurationTest.java

示例5: main

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
public static void main(String[] args){

        //Generate the training data
        DataSetIterator iterator = getTrainingData(batchSize,rng);

        //Create the network
        int numInput = 2;
        int numOutputs = 1;
        int nHidden = 10;
        MultiLayerNetwork net = new MultiLayerNetwork(new NeuralNetConfiguration.Builder()
                .seed(seed)
                .iterations(iterations)
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                .learningRate(learningRate)
                .weightInit(WeightInit.XAVIER)
                .updater(Updater.NESTEROVS).momentum(0.9)
                .list()
                .layer(0, new DenseLayer.Builder().nIn(numInput).nOut(nHidden)
                        .activation(Activation.TANH)
                        .build())
                .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MSE)
                        .activation(Activation.IDENTITY)
                        .nIn(nHidden).nOut(numOutputs).build())
                .pretrain(false).backprop(true).build()
        );
        net.init();
        net.setListeners(new ScoreIterationListener(1));


        //Train the network on the full data set, and evaluate in periodically
        for( int i=0; i<nEpochs; i++ ){
            iterator.reset();
            net.fit(iterator);
        }
        // Test the addition of 2 numbers (Try different numbers here)
        final INDArray input = Nd4j.create(new double[] { 0.111111, 0.3333333333333 }, new int[] { 1, 2 });
        INDArray out = net.output(input, false);
        System.out.println(out);

    }
 
开发者ID:IsaacChanghau,项目名称:NeuralNetworksLite,代码行数:41,代码来源:RegressionSum.java

示例6: main

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
public static void main(final String[] args){

        //Switch these two options to do different functions with different networks
        final MathFunction fn = new SinXDivXMathFunction();
        final MultiLayerConfiguration conf = getDeepDenseLayerNetworkConfiguration();

        //Generate the training data
        final INDArray x = Nd4j.linspace(-10,10,nSamples).reshape(nSamples, 1);
        final DataSetIterator iterator = getTrainingData(x,fn,batchSize,rng);

        //Create the network
        final MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();
        net.setListeners(new ScoreIterationListener(1));


        //Train the network on the full data set, and evaluate in periodically
        final INDArray[] networkPredictions = new INDArray[nEpochs/ plotFrequency];
        for( int i=0; i<nEpochs; i++ ){
            iterator.reset();
            net.fit(iterator);
            if((i+1) % plotFrequency == 0) networkPredictions[i/ plotFrequency] = net.output(x, false);
        }

        //Plot the target data and the network predictions
        plot(fn,x,fn.getFunctionValues(x),networkPredictions);
    }
 
开发者ID:IsaacChanghau,项目名称:NeuralNetworksLite,代码行数:28,代码来源:RegressionMathFunctions.java

示例7: build

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
public Model build()
   {
model = new MultiLayerNetwork(getConfiguration());
model.init();
model.setListeners(
	Arrays.asList((IterationListener) new ScoreIterationListener(parameters.getListenerFrequency())));
return this;
   }
 
开发者ID:amrabed,项目名称:DL4J,代码行数:9,代码来源:Model.java

示例8: testUIMultipleSessions

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
@Ignore
public void testUIMultipleSessions() throws Exception {

    for (int session = 0; session < 3; session++) {

        StatsStorage ss = new InMemoryStatsStorage();

        UIServer uiServer = UIServer.getInstance();
        uiServer.attach(ss);

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                        .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
                        .layer(0, new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(4).build())
                        .layer(1, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
                                        .activation(Activation.SOFTMAX).nIn(4).nOut(3).build())
                        .pretrain(false).backprop(true).build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();
        net.setListeners(new StatsListener(ss), new ScoreIterationListener(1));

        DataSetIterator iter = new IrisDataSetIterator(150, 150);

        for (int i = 0; i < 20; i++) {
            net.fit(iter);
            Thread.sleep(100);
        }
    }


    Thread.sleep(1000000);
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:34,代码来源:TestPlayUI.java

示例9: testBadTuning

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void testBadTuning() {
    //Test poor tuning (high LR): should terminate on MaxScoreIterationTerminationCondition

    Nd4j.getRandom().setSeed(12345);
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new Sgd(1.0)) //Intentionally huge LR
                    .weightInit(WeightInit.XAVIER).list()
                    .layer(0, new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build())
                    .pretrain(false).backprop(true).build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.setListeners(new ScoreIterationListener(1));

    DataSetIterator irisIter = new IrisDataSetIterator(10, 150);
    EarlyStoppingModelSaver<MultiLayerNetwork> saver = new InMemoryModelSaver<>();
    EarlyStoppingConfiguration<MultiLayerNetwork> esConf =
                    new EarlyStoppingConfiguration.Builder<MultiLayerNetwork>()
                                    .epochTerminationConditions(new MaxEpochsTerminationCondition(5000))
                                    .iterationTerminationConditions(
                                                    new MaxTimeIterationTerminationCondition(1, TimeUnit.MINUTES),
                                                    new MaxScoreIterationTerminationCondition(10)) //Initial score is ~2.5
                                    .scoreCalculator(new DataSetLossCalculator(irisIter, true)).modelSaver(saver)
                                    .build();

    IEarlyStoppingTrainer<MultiLayerNetwork> trainer =
                    new EarlyStoppingParallelTrainer<>(esConf, net, irisIter, null, 2, 2, 1);
    EarlyStoppingResult result = trainer.fit();

    assertTrue(result.getTotalEpochs() < 5);
    assertEquals(EarlyStoppingResult.TerminationReason.IterationTerminationCondition,
                    result.getTerminationReason());
    String expDetails = new MaxScoreIterationTerminationCondition(10).toString();
    assertEquals(expDetails, result.getTerminationDetails());

    assertTrue(result.getBestModelEpoch() <= 0);
    assertNotNull(result.getBestModel());
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:40,代码来源:TestParallelEarlyStopping.java

示例10: testEarlyStoppingGetBestModel

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void testEarlyStoppingGetBestModel() {
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER).list()
                    .layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build())
                    .pretrain(false).backprop(true).build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.setListeners(new ScoreIterationListener(1));

    DataSetIterator irisIter = new IrisDataSetIterator(150, 150);
    MultipleEpochsIterator mIter = new MultipleEpochsIterator(10, irisIter);

    EarlyStoppingModelSaver<MultiLayerNetwork> saver = new InMemoryModelSaver<>();
    EarlyStoppingConfiguration<MultiLayerNetwork> esConf =
                    new EarlyStoppingConfiguration.Builder<MultiLayerNetwork>()
                                    .epochTerminationConditions(new MaxEpochsTerminationCondition(5))
                                    .iterationTerminationConditions(
                                                    new MaxTimeIterationTerminationCondition(1, TimeUnit.MINUTES))
                                    .scoreCalculator(new DataSetLossCalculator(irisIter, true)).modelSaver(saver)
                                    .build();

    IEarlyStoppingTrainer<MultiLayerNetwork> trainer = new EarlyStoppingTrainer(esConf, net, mIter);

    EarlyStoppingResult<MultiLayerNetwork> result = trainer.fit();
    System.out.println(result);

    MultiLayerNetwork mln = result.getBestModel();

    assertEquals(net.getnLayers(), mln.getnLayers());
    assertEquals(net.conf().getOptimizationAlgo(), mln.conf().getOptimizationAlgo());
    BaseLayer bl = (BaseLayer) net.conf().getLayer();
    assertEquals(bl.getActivationFn().toString(), ((BaseLayer) mln.conf().getLayer()).getActivationFn().toString());
    assertEquals(bl.getIUpdater(), ((BaseLayer) mln.conf().getLayer()).getIUpdater());
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:37,代码来源:TestEarlyStopping.java

示例11: testListeners

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void testListeners() {
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER).list()
                    .layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build())
                    .pretrain(false).backprop(true).build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.setListeners(new ScoreIterationListener(1));

    DataSetIterator irisIter = new IrisDataSetIterator(150, 150);
    EarlyStoppingModelSaver<MultiLayerNetwork> saver = new InMemoryModelSaver<>();
    EarlyStoppingConfiguration<MultiLayerNetwork> esConf =
                    new EarlyStoppingConfiguration.Builder<MultiLayerNetwork>()
                                    .epochTerminationConditions(new MaxEpochsTerminationCondition(5))
                                    .iterationTerminationConditions(
                                                    new MaxTimeIterationTerminationCondition(1, TimeUnit.MINUTES))
                                    .scoreCalculator(new DataSetLossCalculator(irisIter, true)).modelSaver(saver)
                                    .build();

    LoggingEarlyStoppingListener listener = new LoggingEarlyStoppingListener();

    IEarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, net, irisIter, listener);

    trainer.fit();

    assertEquals(1, listener.onStartCallCount);
    assertEquals(5, listener.onEpochCallCount);
    assertEquals(1, listener.onCompletionCallCount);
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:32,代码来源:TestEarlyStopping.java

示例12: testBackTrackLineHessian

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test(expected = Exception.class)
public void testBackTrackLineHessian() {
    OptimizationAlgorithm optimizer = OptimizationAlgorithm.HESSIAN_FREE;
    DataSet data = irisIter.next();

    MultiLayerNetwork network = new MultiLayerNetwork(getIrisMultiLayerConfig(Activation.RELU, optimizer));
    network.init();
    IterationListener listener = new ScoreIterationListener(1);
    network.setListeners(Collections.singletonList(listener));

    for( int i=0; i<100; i++ ) {
        network.fit(data.getFeatureMatrix(), data.getLabels());
    }
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:15,代码来源:BackTrackLineSearchTest.java

示例13: testNoImprovementNEpochsTermination

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void testNoImprovementNEpochsTermination() {
    //Idea: terminate training if score (test set loss) does not improve for 5 consecutive epochs
    //Simulate this by setting LR = 0.0

    Nd4j.getRandom().setSeed(12345);
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new Sgd(0.0)).weightInit(WeightInit.XAVIER).list()
                    .layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build())
                    .pretrain(false).backprop(true).build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.setListeners(new ScoreIterationListener(1));

    JavaRDD<DataSet> irisData = getIris();

    EarlyStoppingModelSaver<MultiLayerNetwork> saver = new InMemoryModelSaver<>();
    EarlyStoppingConfiguration<MultiLayerNetwork> esConf =
                    new EarlyStoppingConfiguration.Builder<MultiLayerNetwork>()
                                    .epochTerminationConditions(new MaxEpochsTerminationCondition(100),
                                                    new ScoreImprovementEpochTerminationCondition(5))
                                    .iterationTerminationConditions(new MaxScoreIterationTerminationCondition(7.5)) //Initial score is ~2.5
                                    .scoreCalculator(new SparkDataSetLossCalculator(irisData, true, sc.sc()))
                                    .modelSaver(saver).build();

    IEarlyStoppingTrainer<MultiLayerNetwork> trainer = new SparkEarlyStoppingTrainer(getContext().sc(),
                    new ParameterAveragingTrainingMaster(true, 4, 1, 150 / 10, 1, 0), esConf, net, irisData);
    EarlyStoppingResult result = trainer.fit();

    //Expect no score change due to 0 LR -> terminate after 6 total epochs
    assertTrue(result.getTotalEpochs() < 12); //Normally expect 6 epochs exactly; get a little more than that here due to rounding + order of operations
    assertEquals(EarlyStoppingResult.TerminationReason.EpochTerminationCondition, result.getTerminationReason());
    String expDetails = new ScoreImprovementEpochTerminationCondition(5).toString();
    assertEquals(expDetails, result.getTerminationDetails());
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:37,代码来源:TestEarlyStoppingSpark.java

示例14: testCNNActivationsFrozen

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void testCNNActivationsFrozen() throws Exception {

    int nChannels = 1;
    int outputNum = 10;
    int batchSize = 64;
    int nEpochs = 10;
    int seed = 123;

    log.info("Load data....");
    DataSetIterator mnistTrain = new MnistDataSetIterator(batchSize, true, 12345);

    log.info("Build model....");
    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed)
            .l2(0.0005)
            .weightInit(WeightInit.XAVIER)
            .updater(new Nesterovs(0.01, 0.9)).list()
            .layer(0, new FrozenLayer(new ConvolutionLayer.Builder(5, 5)
                    //nIn and nOut specify depth. nIn here is the nChannels and nOut is the number of filters to be applied
                    .nIn(nChannels).stride(1, 1).nOut(20).activation(Activation.IDENTITY).build()))
            .layer(1, new FrozenLayer(new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2)
                    .stride(2, 2).build()))
            .layer(2, new FrozenLayer(new DenseLayer.Builder().activation(Activation.RELU).nOut(500).build()))
            .layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                    .nOut(outputNum).activation(Activation.SOFTMAX).build())
            .setInputType(InputType.convolutionalFlat(28, 28, nChannels));

    MultiLayerConfiguration conf = builder.build();
    MultiLayerNetwork model = new MultiLayerNetwork(conf);
    model.init();

    log.info("Train model....");
    model.setListeners(new ConvolutionalIterationListener(1));

    for (int i = 0; i < nEpochs; i++) {
        model.fit(mnistTrain);
    }
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:39,代码来源:ManualTests.java

示例15: test

import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; //导入方法依赖的package包/类
@Test
public void test() {

    IrisDataSetIterator iter = new IrisDataSetIterator(30, 150);

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(new Sgd(1e-5))
                    .list().layer(0, new DenseLayer.Builder().nIn(4).nOut(20).build())
                    .layer(1, new DenseLayer.Builder().nIn(20).nOut(30).build())
                    .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                                    .activation(Activation.SOFTMAX).nIn(30).nOut(3).build())
                    .pretrain(false).backprop(true).build();

    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();

    IterationListener listener = ParamAndGradientIterationListener.builder().outputToFile(true)
                    .file(new File(System.getProperty("java.io.tmpdir") + "/paramAndGradTest.txt"))
                    .outputToConsole(true).outputToLogger(false).iterations(2).printHeader(true).printMean(false)
                    .printMinMax(false).printMeanAbsValue(true).delimiter("\t").build();
    net.setListeners(listener);

    for (int i = 0; i < 2; i++) {
        net.fit(iter);
    }


}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:29,代码来源:TestParamAndGradientIterationListener.java


注:本文中的org.deeplearning4j.nn.multilayer.MultiLayerNetwork.setListeners方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。