本文整理汇总了Java中org.deeplearning4j.nn.conf.MultiLayerConfiguration.fromYaml方法的典型用法代码示例。如果您正苦于以下问题:Java MultiLayerConfiguration.fromYaml方法的具体用法?Java MultiLayerConfiguration.fromYaml怎么用?Java MultiLayerConfiguration.fromYaml使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.deeplearning4j.nn.conf.MultiLayerConfiguration
的用法示例。
在下文中一共展示了MultiLayerConfiguration.fromYaml方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: loadConfigGuess
import org.deeplearning4j.nn.conf.MultiLayerConfiguration; //导入方法依赖的package包/类
/**
* Load the model from the given file path
* @param path the path of the file to "guess"
*
* @return the loaded model
* @throws Exception
*/
public static Object loadConfigGuess(String path) throws Exception {
String input = FileUtils.readFileToString(new File(path));
//note here that we load json BEFORE YAML. YAML
//turns out to load just fine *accidentally*
try {
return MultiLayerConfiguration.fromJson(input);
} catch (Exception e) {
log.warn("Tried multi layer config from json", e);
try {
return KerasModelImport.importKerasModelConfiguration(path);
} catch (Exception e1) {
log.warn("Tried keras model config", e);
try {
return KerasModelImport.importKerasSequentialConfiguration(path);
} catch (Exception e2) {
log.warn("Tried keras sequence config", e);
try {
return ComputationGraphConfiguration.fromJson(input);
} catch (Exception e3) {
log.warn("Tried computation graph from json");
try {
return MultiLayerConfiguration.fromYaml(input);
} catch (Exception e4) {
log.warn("Tried multi layer configuration from yaml");
try {
return ComputationGraphConfiguration.fromYaml(input);
} catch (Exception e5) {
throw new ModelGuesserException("Unable to load configuration from path " + path
+ " (invalid config file or not a known config type)");
}
}
}
}
}
}
}
示例2: testCustomActivationFn
import org.deeplearning4j.nn.conf.MultiLayerConfiguration; //导入方法依赖的package包/类
@Test
public void testCustomActivationFn() {
//First: Ensure that the CustomActivation class is registered
ObjectMapper mapper = NeuralNetConfiguration.mapper();
AnnotatedClass ac = AnnotatedClass.construct(IActivation.class,
mapper.getSerializationConfig().getAnnotationIntrospector(), null);
Collection<NamedType> types = mapper.getSubtypeResolver().collectAndResolveSubtypes(ac,
mapper.getSerializationConfig(), mapper.getSerializationConfig().getAnnotationIntrospector());
boolean found = false;
for (NamedType nt : types) {
System.out.println(nt);
if (nt.getType() == CustomActivation.class)
found = true;
}
assertTrue("CustomActivation: not registered with NeuralNetConfiguration mapper", found);
//Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().updater(new Sgd(0.1)).list()
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).activation(new CustomActivation()).build())
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10).build())
.pretrain(false).backprop(true).build();
String json = conf.toJson();
String yaml = conf.toYaml();
System.out.println(json);
MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
assertEquals(conf, confFromJson);
MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
assertEquals(conf, confFromYaml);
}
示例3: testJsonYaml
import org.deeplearning4j.nn.conf.MultiLayerConfiguration; //导入方法依赖的package包/类
@Test
public void testJsonYaml() {
MultiLayerConfiguration config = new NeuralNetConfiguration.Builder().seed(12345).list()
.layer(0, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
.reconstructionDistribution(new GaussianReconstructionDistribution(Activation.IDENTITY))
.nIn(3).nOut(4).encoderLayerSizes(5).decoderLayerSizes(6).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
.reconstructionDistribution(new GaussianReconstructionDistribution(Activation.TANH))
.nIn(7).nOut(8).encoderLayerSizes(9).decoderLayerSizes(10).build())
.layer(2, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
.reconstructionDistribution(new BernoulliReconstructionDistribution()).nIn(11)
.nOut(12).encoderLayerSizes(13).decoderLayerSizes(14).build())
.layer(3, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
.reconstructionDistribution(new ExponentialReconstructionDistribution(Activation.TANH))
.nIn(11).nOut(12).encoderLayerSizes(13).decoderLayerSizes(14).build())
.layer(4, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
.lossFunction(new ActivationTanH(), LossFunctions.LossFunction.MSE).nIn(11)
.nOut(12).encoderLayerSizes(13).decoderLayerSizes(14).build())
.layer(5, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
.reconstructionDistribution(new CompositeReconstructionDistribution.Builder()
.addDistribution(5, new GaussianReconstructionDistribution())
.addDistribution(5,
new GaussianReconstructionDistribution(Activation.TANH))
.addDistribution(5, new BernoulliReconstructionDistribution())
.build())
.nIn(15).nOut(16).encoderLayerSizes(17).decoderLayerSizes(18).build())
.layer(1, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(18)
.nOut(19).activation(new ActivationTanH()).build())
.pretrain(true).backprop(true).build();
String asJson = config.toJson();
String asYaml = config.toYaml();
MultiLayerConfiguration fromJson = MultiLayerConfiguration.fromJson(asJson);
MultiLayerConfiguration fromYaml = MultiLayerConfiguration.fromYaml(asYaml);
assertEquals(config, fromJson);
assertEquals(config, fromYaml);
}
示例4: testJsonMultiLayerNetwork
import org.deeplearning4j.nn.conf.MultiLayerConfiguration; //导入方法依赖的package包/类
@Test
public void testJsonMultiLayerNetwork() {
//First: Ensure that the CustomLayer class is registered
ObjectMapper mapper = NeuralNetConfiguration.mapper();
AnnotatedClass ac = AnnotatedClass.construct(Layer.class,
mapper.getSerializationConfig().getAnnotationIntrospector(), null);
Collection<NamedType> types = mapper.getSubtypeResolver().collectAndResolveSubtypes(ac,
mapper.getSerializationConfig(), mapper.getSerializationConfig().getAnnotationIntrospector());
Set<Class<?>> registeredSubtypes = new HashSet<>();
boolean found = false;
for (NamedType nt : types) {
System.out.println(nt);
// registeredSubtypes.add(nt.getType());
if (nt.getType() == CustomLayer.class)
found = true;
}
assertTrue("CustomLayer: not registered with NeuralNetConfiguration mapper", found);
//Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
MultiLayerConfiguration conf =
new NeuralNetConfiguration.Builder().list()
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
.layer(1, new CustomLayer(3.14159)).layer(2,
new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
.nIn(10).nOut(10).build())
.pretrain(false).backprop(true).build();
String json = conf.toJson();
String yaml = conf.toYaml();
System.out.println(json);
MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
assertEquals(conf, confFromJson);
MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
assertEquals(conf, confFromYaml);
}
示例5: testCustomOutputLayerMLN
import org.deeplearning4j.nn.conf.MultiLayerConfiguration; //导入方法依赖的package包/类
@Test
public void testCustomOutputLayerMLN() {
//First: Ensure that the CustomOutputLayer class is registered
ObjectMapper mapper = NeuralNetConfiguration.mapper();
AnnotatedClass ac = AnnotatedClass.construct(Layer.class,
mapper.getSerializationConfig().getAnnotationIntrospector(), null);
Collection<NamedType> types = mapper.getSubtypeResolver().collectAndResolveSubtypes(ac,
mapper.getSerializationConfig(), mapper.getSerializationConfig().getAnnotationIntrospector());
Set<Class<?>> registeredSubtypes = new HashSet<>();
boolean found = false;
for (NamedType nt : types) {
System.out.println(nt);
// registeredSubtypes.add(nt.getType());
if (nt.getType() == CustomOutputLayer.class)
found = true;
}
assertTrue("CustomOutputLayer: not registered with NeuralNetConfiguration mapper", found);
//Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
MultiLayerConfiguration conf =
new NeuralNetConfiguration.Builder().seed(12345).list()
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
.layer(1, new CustomOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
.nIn(10).nOut(10).build())
.pretrain(false).backprop(true).build();
String json = conf.toJson();
String yaml = conf.toYaml();
System.out.println(json);
MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
assertEquals(conf, confFromJson);
MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
assertEquals(conf, confFromYaml);
//Third: check initialization
Nd4j.getRandom().setSeed(12345);
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
assertTrue(net.getLayer(1) instanceof CustomOutputLayerImpl);
//Fourth: compare to an equivalent standard output layer (should be identical)
MultiLayerConfiguration conf2 =
new NeuralNetConfiguration.Builder().seed(12345).weightInit(WeightInit.XAVIER)
.list()
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1,
new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
.nIn(10).nOut(10).build())
.pretrain(false).backprop(true).build();
Nd4j.getRandom().setSeed(12345);
MultiLayerNetwork net2 = new MultiLayerNetwork(conf2);
net2.init();
assertEquals(net2.params(), net.params());
INDArray testFeatures = Nd4j.rand(1, 10);
INDArray testLabels = Nd4j.zeros(1, 10);
testLabels.putScalar(0, 3, 1.0);
DataSet ds = new DataSet(testFeatures, testLabels);
assertEquals(net2.output(testFeatures), net.output(testFeatures));
assertEquals(net2.score(ds), net.score(ds), 1e-6);
}
示例6: testCustomPreprocessor
import org.deeplearning4j.nn.conf.MultiLayerConfiguration; //导入方法依赖的package包/类
@Test
public void testCustomPreprocessor() {
//First: Ensure that the CustomLayer class is registered
ObjectMapper mapper = NeuralNetConfiguration.mapper();
AnnotatedClass ac = AnnotatedClass.construct(InputPreProcessor.class,
mapper.getSerializationConfig().getAnnotationIntrospector(), null);
Collection<NamedType> types = mapper.getSubtypeResolver().collectAndResolveSubtypes(ac,
mapper.getSerializationConfig(), mapper.getSerializationConfig().getAnnotationIntrospector());
boolean found = false;
for (NamedType nt : types) {
// System.out.println(nt);
if (nt.getType() == MyCustomPreprocessor.class) {
found = true;
break;
}
}
assertTrue("MyCustomPreprocessor: not registered with NeuralNetConfiguration mapper", found);
//Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
MultiLayerConfiguration conf =
new NeuralNetConfiguration.Builder().list()
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10)
.nOut(10).build())
.inputPreProcessor(0, new MyCustomPreprocessor()).pretrain(false).backprop(true)
.build();
String json = conf.toJson();
String yaml = conf.toYaml();
System.out.println(json);
MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
assertEquals(conf, confFromJson);
MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
assertEquals(conf, confFromYaml);
assertTrue(confFromJson.getInputPreProcess(0) instanceof MyCustomPreprocessor);
}