当前位置: 首页>>代码示例>>Java>>正文


Java ComputationGraph.setLayerMaskArrays方法代码示例

本文整理汇总了Java中org.deeplearning4j.nn.graph.ComputationGraph.setLayerMaskArrays方法的典型用法代码示例。如果您正苦于以下问题:Java ComputationGraph.setLayerMaskArrays方法的具体用法?Java ComputationGraph.setLayerMaskArrays怎么用?Java ComputationGraph.setLayerMaskArrays使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.deeplearning4j.nn.graph.ComputationGraph的用法示例。


在下文中一共展示了ComputationGraph.setLayerMaskArrays方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testLSTMWithLastTimeStepVertex

import org.deeplearning4j.nn.graph.ComputationGraph; //导入方法依赖的package包/类
@Test
public void testLSTMWithLastTimeStepVertex() {

    Nd4j.getRandom().setSeed(12345);
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1))
                    .updater(new NoOp()).graphBuilder().addInputs("input").setOutputs("out")
                    .addLayer("lstm1", new GravesLSTM.Builder().nIn(3).nOut(4).activation(Activation.TANH).build(),
                                    "input")
                    .addVertex("lastTS", new LastTimeStepVertex("input"), "lstm1")
                    .addLayer("out", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "lastTS")
                    .pretrain(false).backprop(true).build();

    ComputationGraph graph = new ComputationGraph(conf);
    graph.init();

    Random r = new Random(12345);
    INDArray input = Nd4j.rand(new int[] {3, 3, 5});
    INDArray labels = Nd4j.zeros(3, 3); //Here: labels are 2d (due to LastTimeStepVertex)
    for (int i = 0; i < 3; i++) {
        labels.putScalar(new int[] {i, r.nextInt(3)}, 1.0);
    }

    if (PRINT_RESULTS) {
        System.out.println("testLSTMWithLastTimeStepVertex()");
        for (int j = 0; j < graph.getNumLayers(); j++)
            System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams());
    }

    //First: test with no input mask array
    boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR,
                    DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {input},
                    new INDArray[] {labels});

    String msg = "testLSTMWithLastTimeStepVertex()";
    assertTrue(msg, gradOK);

    //Second: test with input mask arrays.
    INDArray inMask = Nd4j.zeros(3, 5);
    inMask.putRow(0, Nd4j.create(new double[] {1, 1, 1, 0, 0}));
    inMask.putRow(1, Nd4j.create(new double[] {1, 1, 1, 1, 0}));
    inMask.putRow(2, Nd4j.create(new double[] {1, 1, 1, 1, 1}));
    graph.setLayerMaskArrays(new INDArray[] {inMask}, null);
    gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR,
                    PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {input}, new INDArray[] {labels});

    assertTrue(msg, gradOK);
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:51,代码来源:GradientCheckTestsComputationGraph.java

示例2: testLSTMWithReverseTimeSeriesVertex

import org.deeplearning4j.nn.graph.ComputationGraph; //导入方法依赖的package包/类
@Test
public void testLSTMWithReverseTimeSeriesVertex() {

    Nd4j.getRandom().setSeed(12345);
    ComputationGraphConfiguration conf =
            new NeuralNetConfiguration.Builder().seed(12345)
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1))
                    .updater(new NoOp()).graphBuilder()
                    .addInputs("input").setOutputs("out")
                    .addLayer("lstm_a",
                            new GravesLSTM.Builder().nIn(3).nOut(4)
                                    .activation(Activation.TANH).build(),
                            "input")
                    .addVertex("input_rev", new ReverseTimeSeriesVertex("input"), "input")
                    .addLayer("lstm_b",
                            new GravesLSTM.Builder().nIn(3).nOut(4)
                                    .activation(Activation.TANH).build(),
                            "input_rev")
                    .addVertex("lstm_b_rev", new ReverseTimeSeriesVertex("input"), "lstm_b")
                    .addLayer("out", new RnnOutputLayer.Builder().nIn(4 + 4).nOut(3)
                                    .activation(Activation.SOFTMAX)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build(),
                            "lstm_a", "lstm_b_rev")
                    .pretrain(false).backprop(true).build();

    ComputationGraph graph = new ComputationGraph(conf);
    graph.init();

    Random r = new Random(12345);
    INDArray input  = Nd4j.rand(new int[] {3, 3, 5});
    INDArray labels = Nd4j.zeros(3, 3, 5);
    for (int i = 0; i < 3; i++) {
        for (int j = 0; j < 5; j++) {
            labels.putScalar(new int[] {i, r.nextInt(3), j}, 1.0);
        }
    }

    if (PRINT_RESULTS) {
        System.out.println("testLSTMWithReverseTimeSeriesVertex()");
        for (int j = 0; j < graph.getNumLayers(); j++)
            System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams());
    }

    boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR,
            DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {input},
            new INDArray[] {labels});

    String msg = "testLSTMWithDuplicateToTimeSeries()";
    assertTrue(msg, gradOK);

    //Second: test with input mask arrays.
    INDArray inMask = Nd4j.zeros(3, 5);
    inMask.putRow(0, Nd4j.create(new double[] {1, 1, 1, 0, 0}));
    inMask.putRow(1, Nd4j.create(new double[] {1, 1, 0, 1, 0}));
    inMask.putRow(2, Nd4j.create(new double[] {1, 1, 1, 1, 1}));
    graph.setLayerMaskArrays(new INDArray[] {inMask}, null);
    gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR,
            PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {input}, new INDArray[] {labels});

    assertTrue(msg, gradOK);
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:63,代码来源:GradientCheckTestsComputationGraph.java

示例3: testBasicStackUnstackVariableLengthTS

import org.deeplearning4j.nn.graph.ComputationGraph; //导入方法依赖的package包/类
@Test
public void testBasicStackUnstackVariableLengthTS() {

    int layerSizes = 2;

    Nd4j.getRandom().setSeed(12345);
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1))
                    .activation(Activation.TANH).updater(new NoOp()).graphBuilder()
                    .addInputs("in1", "in2")
                    .addLayer("d0", new GravesLSTM.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in1")
                    .addLayer("d1", new GravesLSTM.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in2")
                    .addVertex("stack", new StackVertex(), "d0", "d1")
                    .addLayer("d2", new GravesLSTM.Builder().nIn(layerSizes).nOut(layerSizes).build(), "stack")
                    .addVertex("u1", new UnstackVertex(0, 2), "d2").addVertex("u2", new UnstackVertex(1, 2), "d2")
                    .addLayer("p1", new GlobalPoolingLayer.Builder(PoolingType.AVG).build(), "u1")
                    .addLayer("p2", new GlobalPoolingLayer.Builder(PoolingType.AVG).build(), "u2")
                    .addLayer("out1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2)
                                    .nIn(layerSizes).nOut(layerSizes).activation(Activation.IDENTITY).build(), "p1")
                    .addLayer("out2", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2)
                                    .nIn(layerSizes).nOut(2).activation(Activation.IDENTITY).build(), "p2")
                    .setOutputs("out1", "out2").pretrain(false).backprop(true).build();

    ComputationGraph graph = new ComputationGraph(conf);
    graph.init();


    Nd4j.getRandom().setSeed(12345);
    int nParams = graph.numParams();
    INDArray newParams = Nd4j.rand(1, nParams);
    graph.setParams(newParams);

    int[] mbSizes = new int[] {1, 3, 10};
    for (int minibatch : mbSizes) {

        INDArray in1 = Nd4j.rand(new int[] {minibatch, layerSizes, 4});
        INDArray in2 = Nd4j.rand(new int[] {minibatch, layerSizes, 5});
        INDArray inMask1 = Nd4j.zeros(minibatch, 4);
        inMask1.get(NDArrayIndex.all(), NDArrayIndex.interval(0, 3)).assign(1);
        INDArray inMask2 = Nd4j.zeros(minibatch, 5);
        inMask2.get(NDArrayIndex.all(), NDArrayIndex.interval(0, 4)).assign(1);

        INDArray labels1 = Nd4j.rand(new int[] {minibatch, 2});
        INDArray labels2 = Nd4j.rand(new int[] {minibatch, 2});

        String testName = "testBasicStackUnstackVariableLengthTS() - minibatch = " + minibatch;

        if (PRINT_RESULTS) {
            System.out.println(testName);
            for (int j = 0; j < graph.getNumLayers(); j++)
                System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams());
        }

        graph.setLayerMaskArrays(new INDArray[] {inMask1, inMask2}, null);

        boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR,
                        DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {in1, in2},
                        new INDArray[] {labels1, labels2}, new INDArray[] {inMask1, inMask2}, null);

        assertTrue(testName, gradOK);
    }
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:64,代码来源:GradientCheckTestsComputationGraph.java

示例4: testLastTimeStepVertex

import org.deeplearning4j.nn.graph.ComputationGraph; //导入方法依赖的package包/类
@Test
public void testLastTimeStepVertex() {

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder().addInputs("in")
            .addLayer("lastTS", new LastTimeStep(new SimpleRnn.Builder()
                    .nIn(5).nOut(6).build()), "in")
            .setOutputs("lastTS")
            .build();

    ComputationGraph graph = new ComputationGraph(conf);
    graph.init();

    //First: test without input mask array
    Nd4j.getRandom().setSeed(12345);
    Layer l = graph.getLayer("lastTS");
    INDArray in = Nd4j.rand(new int[]{3, 5, 6});
    INDArray outUnderlying = ((LastTimeStepLayer)l).getUnderlying().activate(in);
    INDArray expOut = outUnderlying.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.point(5));


    //Forward pass:
    INDArray outFwd = l.activate(in);
    assertEquals(expOut, outFwd);

    //Second: test with input mask array
    INDArray inMask = Nd4j.zeros(3, 6);
    inMask.putRow(0, Nd4j.create(new double[]{1, 1, 1, 0, 0, 0}));
    inMask.putRow(1, Nd4j.create(new double[]{1, 1, 1, 1, 0, 0}));
    inMask.putRow(2, Nd4j.create(new double[]{1, 1, 1, 1, 1, 0}));
    graph.setLayerMaskArrays(new INDArray[]{inMask}, null);

    expOut = Nd4j.zeros(3, 6);
    expOut.putRow(0, outUnderlying.get(NDArrayIndex.point(0), NDArrayIndex.all(), NDArrayIndex.point(2)));
    expOut.putRow(1, outUnderlying.get(NDArrayIndex.point(1), NDArrayIndex.all(), NDArrayIndex.point(3)));
    expOut.putRow(2, outUnderlying.get(NDArrayIndex.point(2), NDArrayIndex.all(), NDArrayIndex.point(4)));

    outFwd = l.activate(in);
    assertEquals(expOut, outFwd);

    TestUtils.testModelSerialization(graph);
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:42,代码来源:TestLastTimeStepLayer.java

示例5: testLastTimeStepVertex

import org.deeplearning4j.nn.graph.ComputationGraph; //导入方法依赖的package包/类
@Test
public void testLastTimeStepVertex() {

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder().addInputs("in")
                    .addVertex("lastTS", new LastTimeStepVertex("in"), "in")
                    .addLayer("out", new OutputLayer.Builder().nIn(1).nOut(1).build(), "lastTS").setOutputs("out")
                    .build();

    ComputationGraph graph = new ComputationGraph(conf);
    graph.init();

    //First: test without input mask array
    Nd4j.getRandom().setSeed(12345);
    INDArray in = Nd4j.rand(new int[] {3, 5, 6});
    INDArray expOut = in.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.point(5));

    GraphVertex gv = graph.getVertex("lastTS");
    gv.setInputs(in);
    //Forward pass:
    INDArray outFwd = gv.doForward(true);
    assertEquals(expOut, outFwd);
    //Backward pass:
    gv.setEpsilon(expOut);
    Pair<Gradient, INDArray[]> pair = gv.doBackward(false);
    INDArray eps = pair.getSecond()[0];
    assertArrayEquals(in.shape(), eps.shape());
    assertEquals(Nd4j.zeros(3, 5, 5),
                    eps.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 4, true)));
    assertEquals(expOut, eps.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.point(5)));

    //Second: test with input mask array
    INDArray inMask = Nd4j.zeros(3, 6);
    inMask.putRow(0, Nd4j.create(new double[] {1, 1, 1, 0, 0, 0}));
    inMask.putRow(1, Nd4j.create(new double[] {1, 1, 1, 1, 0, 0}));
    inMask.putRow(2, Nd4j.create(new double[] {1, 1, 1, 1, 1, 0}));
    graph.setLayerMaskArrays(new INDArray[] {inMask}, null);

    expOut = Nd4j.zeros(3, 5);
    expOut.putRow(0, in.get(NDArrayIndex.point(0), NDArrayIndex.all(), NDArrayIndex.point(2)));
    expOut.putRow(1, in.get(NDArrayIndex.point(1), NDArrayIndex.all(), NDArrayIndex.point(3)));
    expOut.putRow(2, in.get(NDArrayIndex.point(2), NDArrayIndex.all(), NDArrayIndex.point(4)));

    gv.setInputs(in);
    outFwd = gv.doForward(true);
    assertEquals(expOut, outFwd);

    String json = conf.toJson();
    ComputationGraphConfiguration conf2 = ComputationGraphConfiguration.fromJson(json);
    assertEquals(conf, conf2);
}
 
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:51,代码来源:TestGraphNodes.java


注:本文中的org.deeplearning4j.nn.graph.ComputationGraph.setLayerMaskArrays方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。