本文整理汇总了Java中org.nd4j.linalg.activations.Activation类的典型用法代码示例。如果您正苦于以下问题:Java Activation类的具体用法?Java Activation怎么用?Java Activation使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
Activation类属于org.nd4j.linalg.activations包,在下文中一共展示了Activation类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getDeepDenseLayerNetworkConfiguration
import org.nd4j.linalg.activations.Activation; //导入依赖的package包/类
/** Returns the network configuration, 2 hidden DenseLayers of size 50.
*/
private static MultiLayerConfiguration getDeepDenseLayerNetworkConfiguration() {
final int numHiddenNodes = 50;
return new NeuralNetConfiguration.Builder()
.seed(seed)
.iterations(iterations)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.learningRate(learningRate)
.weightInit(WeightInit.XAVIER)
.updater(Updater.NESTEROVS).momentum(0.9)
.list()
.layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes)
.activation(Activation.TANH).build())
.layer(1, new DenseLayer.Builder().nIn(numHiddenNodes).nOut(numHiddenNodes)
.activation(Activation.TANH).build())
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MSE)
.activation(Activation.IDENTITY)
.nIn(numHiddenNodes).nOut(numOutputs).build())
.pretrain(false).backprop(true).build();
}
示例2: runClf
import org.nd4j.linalg.activations.Activation; //导入依赖的package包/类
/**
* Run dummy network with give activationfunction for the first layer
*
* @param act Activation function to test
* @throws Exception Something went wrong.
*/
public static void runClf(IActivation act) throws Exception {
Dl4jMlpClassifier clf = new Dl4jMlpClassifier();
// Data
DenseLayer denseLayer = new DenseLayer();
denseLayer.setNOut(2);
denseLayer.setLayerName("Dense-layer");
denseLayer.setActivationFn(act);
OutputLayer outputLayer = new OutputLayer();
outputLayer.setActivationFn(Activation.SOFTMAX.getActivationFunction());
outputLayer.setLayerName("Output-layer");
clf.setNumEpochs(1);
clf.setLayers(denseLayer, outputLayer);
final Instances data = DatasetLoader.loadIris();
clf.buildClassifier(data);
clf.distributionsForInstances(data);
}
示例3: runClf
import org.nd4j.linalg.activations.Activation; //导入依赖的package包/类
private void runClf(Instances data) throws Exception {
// Data
DenseLayer denseLayer = new DenseLayer();
denseLayer.setNOut(32);
denseLayer.setLayerName("Dense-layer");
denseLayer.setActivationFn(Activation.RELU.getActivationFunction());
OutputLayer outputLayer = new OutputLayer();
outputLayer.setActivationFn(Activation.SOFTMAX.getActivationFunction());
outputLayer.setLayerName("Output-layer");
NeuralNetConfiguration nnc = new NeuralNetConfiguration();
clf.setNumEpochs(DEFAULT_NUM_EPOCHS);
clf.setNeuralNetConfiguration(nnc);
clf.setLayers(denseLayer, outputLayer);
clf.buildClassifier(data);
clf.distributionsForInstances(data);
}
示例4: net
import org.nd4j.linalg.activations.Activation; //导入依赖的package包/类
private static MultiLayerConfiguration net(int nIn, int nOut) {
return new NeuralNetConfiguration.Builder()
.seed(42)
.iterations(1)
.activation(Activation.RELU)
.weightInit(WeightInit.XAVIER)
.learningRate(0.1)
.regularization(true).l2(1e-4)
.list(
new DenseLayer.Builder().nIn(nIn).nOut(3).build(),
new DenseLayer.Builder().nIn(3).nOut(3).build(),
new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.activation(Activation.SOFTMAX)
.nIn(3)
.nOut(nOut)
.build()
)
.build();
}
示例5: getGraphConfCNN
import org.nd4j.linalg.activations.Activation; //导入依赖的package包/类
private static ComputationGraphConfiguration getGraphConfCNN(int seed, IUpdater updater) {
Nd4j.getRandom().setSeed(seed);
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.weightInit(WeightInit.XAVIER).updater(updater).seed(seed).graphBuilder()
.addInputs("in")
.addLayer("0", new ConvolutionLayer.Builder().nOut(3).kernelSize(2, 2).stride(1, 1)
.padding(0, 0).activation(Activation.TANH).build(), "in")
.addLayer("1", new ConvolutionLayer.Builder().nOut(3).kernelSize(2, 2).stride(1, 1)
.padding(0, 0).activation(Activation.TANH).build(), "0")
.addLayer("2", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nOut(10)
.build(), "1")
.setOutputs("2").setInputTypes(InputType.convolutional(10, 10, 3)).pretrain(false)
.backprop(true).build();
return conf;
}
开发者ID:deeplearning4j,项目名称:deeplearning4j,代码行数:17,代码来源:TestCompareParameterAveragingSparkVsSingleMachine.java
示例6: testDataSetScore
import org.nd4j.linalg.activations.Activation; //导入依赖的package包/类
@Test
public void testDataSetScore() {
Nd4j.getRandom().setSeed(12345);
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.weightInit(WeightInit.XAVIER).seed(12345L).list()
.layer(0, new DenseLayer.Builder().nIn(4).nOut(3).activation(Activation.SIGMOID).build())
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(3).nOut(3).build())
.pretrain(false).backprop(true).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
INDArray in = Nd4j.create(new double[] {1.0, 2.0, 3.0, 4.0});
INDArray out = Nd4j.create(new double[] {1, 0, 0});
double score = net.score(new DataSet(in, out));
}
示例7: testMultiCNNLayer
import org.nd4j.linalg.activations.Activation; //导入依赖的package包/类
@Test
public void testMultiCNNLayer() throws Exception {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).seed(123).list()
.layer(0, new ConvolutionLayer.Builder().nIn(1).nOut(6).weightInit(WeightInit.XAVIER)
.activation(Activation.RELU).build())
.layer(1, new LocalResponseNormalization.Builder().build()).layer(2,
new DenseLayer.Builder()
.nOut(2).build())
.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
.weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).nIn(2).nOut(10)
.build())
.backprop(true).pretrain(false).setInputType(InputType.convolutionalFlat(28, 28, 1)).build();
MultiLayerNetwork network = new MultiLayerNetwork(conf);
network.init();
DataSetIterator iter = new MnistDataSetIterator(2, 2);
DataSet next = iter.next();
network.fit(next);
}
示例8: incompleteMnistLenet
import org.nd4j.linalg.activations.Activation; //导入依赖的package包/类
public MultiLayerConfiguration.Builder incompleteMnistLenet() {
MultiLayerConfiguration.Builder builder =
new NeuralNetConfiguration.Builder().seed(3)
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).list()
.layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
new int[] {5, 5}).nIn(1).nOut(20).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
new int[] {2, 2}, new int[] {2, 2}).build())
.layer(2, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
new int[] {5, 5}).nIn(20).nOut(50).build())
.layer(3, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
new int[] {2, 2}, new int[] {2, 2}).build())
.layer(4, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nOut(500)
.build())
.layer(5, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.activation(Activation.SOFTMAX).nOut(10)
.build());
return builder;
}
示例9: testListenersViaModel
import org.nd4j.linalg.activations.Activation; //导入依赖的package包/类
@Test
public void testListenersViaModel() {
TestListener.clearCounts();
MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().list().layer(0,
new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(10).nOut(10)
.activation(Activation.TANH).build());
MultiLayerConfiguration conf = builder.build();
MultiLayerNetwork model = new MultiLayerNetwork(conf);
model.init();
StatsStorage ss = new InMemoryStatsStorage();
model.setListeners(new TestListener(), new StatsListener(ss));
testListenersForModel(model, null);
assertEquals(1, ss.listSessionIDs().size());
assertEquals(2, ss.listWorkerIDsForSession(ss.listSessionIDs().get(0)).size());
}
示例10: testComputeZ
import org.nd4j.linalg.activations.Activation; //导入依赖的package包/类
@Test
public void testComputeZ() {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().weightInit(WeightInit.XAVIER)
.activation(Activation.TANH).list().layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
.layer(1, new DenseLayer.Builder().nIn(10).nOut(10).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
INDArray in = Nd4j.rand(10, 10);
List<INDArray> preOuts = net.computeZ(in, false);
assertEquals(3, preOuts.size()); //Includes original input
assertEquals(in, preOuts.get(0));
INDArray preOut0 = net.getLayer(0).preOutput(in);
INDArray out0 = net.getLayer(0).activate(in);
assertEquals(preOut0, preOuts.get(1));
INDArray preOut1 = net.getLayer(1).preOutput(out0);
INDArray out1 = net.getLayer(1).activate(out0);
assertEquals(preOut1, preOuts.get(2));
}
示例11: testRnnTimeStepWithPreprocessor
import org.nd4j.linalg.activations.Activation; //导入依赖的package包/类
@Test
public void testRnnTimeStepWithPreprocessor() {
MultiLayerConfiguration conf =
new NeuralNetConfiguration.Builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.list()
.layer(0, new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(10)
.nOut(10).activation(Activation.TANH).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(10)
.nOut(10).activation(Activation.TANH).build())
.layer(2, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(10).nOut(10).build())
.inputPreProcessor(0, new FeedForwardToRnnPreProcessor()).pretrain(false)
.backprop(true).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
INDArray in = Nd4j.rand(1, 10);
net.rnnTimeStep(in);
}
示例12: getConf
import org.nd4j.linalg.activations.Activation; //导入依赖的package包/类
private static MultiLayerConfiguration getConf() {
MultiLayerConfiguration conf =
new NeuralNetConfiguration.Builder().seed(12345L)
.list().layer(0,
new DenseLayer.Builder().nIn(4).nOut(3)
.weightInit(WeightInit.DISTRIBUTION)
.dist(new NormalDistribution(0,1))
.build())
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(3).nOut(3)
.weightInit(WeightInit.DISTRIBUTION)
.dist(new NormalDistribution(0, 1)).build())
.build();
return conf;
}
示例13: testRNG
import org.nd4j.linalg.activations.Activation; //导入依赖的package包/类
@Test
public void testRNG() {
DenseLayer layer = new DenseLayer.Builder().nIn(trainingSet.numInputs()).nOut(trainingSet.numOutcomes())
.weightInit(WeightInit.UNIFORM).activation(Activation.TANH).build();
NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().seed(123)
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).layer(layer).build();
int numParams = conf.getLayer().initializer().numParams(conf);
INDArray params = Nd4j.create(1, numParams);
Layer model = conf.getLayer().instantiate(conf, null, 0, params, true);
INDArray modelWeights = model.getParam(DefaultParamInitializer.WEIGHT_KEY);
DenseLayer layer2 = new DenseLayer.Builder().nIn(trainingSet.numInputs()).nOut(trainingSet.numOutcomes())
.weightInit(WeightInit.UNIFORM).activation(Activation.TANH).build();
NeuralNetConfiguration conf2 = new NeuralNetConfiguration.Builder().seed(123)
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).layer(layer2).build();
int numParams2 = conf2.getLayer().initializer().numParams(conf);
INDArray params2 = Nd4j.create(1, numParams);
Layer model2 = conf2.getLayer().instantiate(conf2, null, 0, params2, true);
INDArray modelWeights2 = model2.getParam(DefaultParamInitializer.WEIGHT_KEY);
assertEquals(modelWeights, modelWeights2);
}
示例14: createNet
import org.nd4j.linalg.activations.Activation; //导入依赖的package包/类
public static ComputationGraph createNet() throws Exception {
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
.graphBuilder()
.addInputs("in")
.addLayer("0", new ConvolutionLayer.Builder().nOut(3)
.kernelSize(2,2).stride(2,2).build(), "in")
.addLayer("1", new ConvolutionLayer.Builder().nOut(3)
.kernelSize(2,2).stride(2,2).build(), "0")
.addLayer("out", new OutputLayer.Builder().nOut(10)
.activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE)
.build(), "1")
.setOutputs("out")
.setInputTypes(InputType.convolutional(28,28,1))
.build();
ComputationGraph model = new ComputationGraph(conf);
model.init();
return model;
}
示例15: testPredict
import org.nd4j.linalg.activations.Activation; //导入依赖的package包/类
@Test
public void testPredict() throws Exception {
Nd4j.getRandom().setSeed(12345);
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.weightInit(WeightInit.XAVIER).seed(12345L).list()
.layer(0, new DenseLayer.Builder().nIn(784).nOut(50).activation(Activation.RELU).build())
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(50).nOut(10).build())
.pretrain(false).backprop(true).setInputType(InputType.convolutional(28, 28, 1)).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
DataSetIterator ds = new MnistDataSetIterator(10, 10);
net.fit(ds);
DataSetIterator testDs = new MnistDataSetIterator(1, 1);
DataSet testData = testDs.next();
testData.setLabelNames(Arrays.asList("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
String actualLables = testData.getLabelName(0);
List<String> prediction = net.predict(testData);
assertTrue(actualLables != null);
assertTrue(prediction.get(0) != null);
}