本文整理汇总了Java中org.datavec.api.util.ClassPathResource类的典型用法代码示例。如果您正苦于以下问题:Java ClassPathResource类的具体用法?Java ClassPathResource怎么用?Java ClassPathResource使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
ClassPathResource类属于org.datavec.api.util包,在下文中一共展示了ClassPathResource类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: test
import org.datavec.api.util.ClassPathResource; //导入依赖的package包/类
@Test
public void test() throws Exception {
CSVRecordReader rr = new CSVRecordReader(0, ',');
rr.initialize(new FileSplit(new ClassPathResource("iris.dat").getFile()));
CSVRecordReader rr2 = new CSVRecordReader(0, ',');
rr2.initialize(new FileSplit(new ClassPathResource("iris.dat").getFile()));
RecordReader rrC = new ConcatenatingRecordReader(rr, rr2);
int count = 0;
while(rrC.hasNext()){
rrC.next();
count++;
}
assertEquals(300, count);
}
示例2: testWord2VecPlot
import org.datavec.api.util.ClassPathResource; //导入依赖的package包/类
@Test
public void testWord2VecPlot() throws Exception {
File inputFile = new ClassPathResource("/big/raw_sentences.txt").getFile();
SentenceIterator iter = new BasicLineIterator(inputFile.getAbsolutePath());
TokenizerFactory t = new DefaultTokenizerFactory();
t.setTokenPreProcessor(new CommonPreprocessor());
Word2Vec vec = new Word2Vec.Builder().minWordFrequency(5).iterations(2).batchSize(1000).learningRate(0.025)
.layerSize(100).seed(42).sampling(0).negativeSample(0).windowSize(5)
.modelUtils(new BasicModelUtils<VocabWord>()).useAdaGrad(false).iterate(iter).workers(10)
.tokenizerFactory(t).build();
vec.fit();
// UiConnectionInfo connectionInfo = UiServer.getInstance().getConnectionInfo();
// vec.getLookupTable().plotVocab(100, connectionInfo);
Thread.sleep(10000000000L);
fail("Not implemented");
}
示例3: testAsWritable
import org.datavec.api.util.ClassPathResource; //导入依赖的package包/类
@Test
public void testAsWritable() throws Exception {
File f0 = new ClassPathResource("/testimages/class0/0.jpg").getFile();
NativeImageLoader imageLoader = new NativeImageLoader();
ImageWritable img = imageLoader.asWritable(f0);
assertEquals(32, img.getFrame().imageHeight);
assertEquals(32, img.getFrame().imageWidth);
assertEquals(3, img.getFrame().imageChannels);
BufferedImage img1 = makeRandomBufferedImage(0, 0, 3);
Mat img2 = makeRandomImage(0, 0, 4);
int w1 = 33, h1 = 77, ch1 = 1;
NativeImageLoader loader1 = new NativeImageLoader(h1, w1, ch1);
INDArray array1 = loader1.asMatrix(f0);
assertEquals(4, array1.rank());
assertEquals(1, array1.size(0));
assertEquals(1, array1.size(1));
assertEquals(h1, array1.size(2));
assertEquals(w1, array1.size(3));
}
示例4: testReader
import org.datavec.api.util.ClassPathResource; //导入依赖的package包/类
@Test
public void testReader() throws Exception {
TfidfVectorizer vectorizer = new TfidfVectorizer();
Configuration conf = new Configuration();
conf.setInt(TfidfVectorizer.MIN_WORD_FREQUENCY, 1);
conf.setBoolean(RecordReader.APPEND_LABEL, true);
vectorizer.initialize(conf);
TfidfRecordReader reader = new TfidfRecordReader();
reader.initialize(conf, new FileSplit(new ClassPathResource("labeled").getFile()));
int count = 0;
int[] labelAssertions = new int[3];
while (reader.hasNext()) {
Collection<Writable> record = reader.next();
Iterator<Writable> recordIter = record.iterator();
NDArrayWritable writable = (NDArrayWritable) recordIter.next();
labelAssertions[count] = recordIter.next().toInt();
count++;
}
assertArrayEquals(new int[] {0, 1, 2}, labelAssertions);
assertEquals(3, reader.getLabels().size());
assertEquals(3, count);
}
示例5: testRecordMetaData
import org.datavec.api.util.ClassPathResource; //导入依赖的package包/类
@Test
public void testRecordMetaData() throws Exception {
TfidfVectorizer vectorizer = new TfidfVectorizer();
Configuration conf = new Configuration();
conf.setInt(TfidfVectorizer.MIN_WORD_FREQUENCY, 1);
conf.setBoolean(RecordReader.APPEND_LABEL, true);
vectorizer.initialize(conf);
TfidfRecordReader reader = new TfidfRecordReader();
reader.initialize(conf, new FileSplit(new ClassPathResource("labeled").getFile()));
while (reader.hasNext()) {
Record record = reader.nextRecord();
assertNotNull(record.getMetaData().getURI());
assertEquals(record.getMetaData().getReaderClass(), TfidfRecordReader.class);
}
}
示例6: testReadRecordFromMetaData
import org.datavec.api.util.ClassPathResource; //导入依赖的package包/类
@Test
public void testReadRecordFromMetaData() throws Exception {
TfidfVectorizer vectorizer = new TfidfVectorizer();
Configuration conf = new Configuration();
conf.setInt(TfidfVectorizer.MIN_WORD_FREQUENCY, 1);
conf.setBoolean(RecordReader.APPEND_LABEL, true);
vectorizer.initialize(conf);
TfidfRecordReader reader = new TfidfRecordReader();
reader.initialize(conf, new FileSplit(new ClassPathResource("labeled").getFile()));
Record record = reader.nextRecord();
Record reread = reader.loadFromMetaData(record.getMetaData());
assertEquals(record.getRecord().size(), 2);
assertEquals(reread.getRecord().size(), 2);
assertEquals(record.getRecord().get(0), reread.getRecord().get(0));
assertEquals(record.getRecord().get(1), reread.getRecord().get(1));
assertEquals(record.getMetaData(), reread.getMetaData());
}
示例7: testCodecReader
import org.datavec.api.util.ClassPathResource; //导入依赖的package包/类
@Test
public void testCodecReader() throws Exception {
File file = new ClassPathResource("fire_lowres.mp4").getFile();
SequenceRecordReader reader = new CodecRecordReader();
Configuration conf = new Configuration();
conf.set(CodecRecordReader.RAVEL, "true");
conf.set(CodecRecordReader.START_FRAME, "160");
conf.set(CodecRecordReader.TOTAL_FRAMES, "500");
conf.set(CodecRecordReader.ROWS, "80");
conf.set(CodecRecordReader.COLUMNS, "46");
reader.initialize(new FileSplit(file));
reader.setConf(conf);
assertTrue(reader.hasNext());
List<List<Writable>> record = reader.sequenceRecord();
// System.out.println(record.size());
Iterator<List<Writable>> it = record.iterator();
List<Writable> first = it.next();
// System.out.println(first);
//Expected size: 80x46x3
assertEquals(1, first.size());
assertEquals(80 * 46 * 3, ((ArrayWritable) first.iterator().next()).length());
}
示例8: testCodecReaderMeta
import org.datavec.api.util.ClassPathResource; //导入依赖的package包/类
@Test
public void testCodecReaderMeta() throws Exception {
File file = new ClassPathResource("fire_lowres.mp4").getFile();
SequenceRecordReader reader = new CodecRecordReader();
Configuration conf = new Configuration();
conf.set(CodecRecordReader.RAVEL, "true");
conf.set(CodecRecordReader.START_FRAME, "160");
conf.set(CodecRecordReader.TOTAL_FRAMES, "500");
conf.set(CodecRecordReader.ROWS, "80");
conf.set(CodecRecordReader.COLUMNS, "46");
reader.initialize(new FileSplit(file));
reader.setConf(conf);
assertTrue(reader.hasNext());
List<List<Writable>> record = reader.sequenceRecord();
assertEquals(500, record.size()); //500 frames
reader.reset();
SequenceRecord seqR = reader.nextSequence();
assertEquals(record, seqR.getSequenceRecord());
RecordMetaData meta = seqR.getMetaData();
// System.out.println(meta);
assertTrue(meta.getURI().toString().endsWith("fire_lowres.mp4"));
SequenceRecord fromMeta = reader.loadSequenceFromMetaData(meta);
assertEquals(seqR, fromMeta);
}
示例9: testNativeCodecReader
import org.datavec.api.util.ClassPathResource; //导入依赖的package包/类
@Ignore
@Test
public void testNativeCodecReader() throws Exception {
File file = new ClassPathResource("fire_lowres.mp4").getFile();
SequenceRecordReader reader = new NativeCodecRecordReader();
Configuration conf = new Configuration();
conf.set(CodecRecordReader.RAVEL, "true");
conf.set(CodecRecordReader.START_FRAME, "160");
conf.set(CodecRecordReader.TOTAL_FRAMES, "500");
conf.set(CodecRecordReader.ROWS, "80");
conf.set(CodecRecordReader.COLUMNS, "46");
reader.initialize(new FileSplit(file));
reader.setConf(conf);
assertTrue(reader.hasNext());
List<List<Writable>> record = reader.sequenceRecord();
// System.out.println(record.size());
Iterator<List<Writable>> it = record.iterator();
List<Writable> first = it.next();
// System.out.println(first);
//Expected size: 80x46x3
assertEquals(1, first.size());
assertEquals(80 * 46 * 3, ((ArrayWritable) first.iterator().next()).length());
}
示例10: testSingleImageSparkTransform
import org.datavec.api.util.ClassPathResource; //导入依赖的package包/类
@Test
public void testSingleImageSparkTransform() throws Exception {
int seed = 12345;
File f1 = new ClassPathResource("/testimages/class1/A.jpg").getFile();
SingleImageRecord imgRecord = new SingleImageRecord(f1.toURI());
ImageTransformProcess imgTransformProcess = new ImageTransformProcess.Builder().seed(seed)
.scaleImageTransform(10).cropImageTransform(5).build();
ImageSparkTransform imgSparkTransform = new ImageSparkTransform(imgTransformProcess);
Base64NDArrayBody body = imgSparkTransform.toArray(imgRecord);
INDArray fromBase64 = Nd4jBase64.fromBase64(body.getNdarray());
System.out.println("Base 64ed array " + fromBase64);
assertEquals(1, fromBase64.size(0));
}
示例11: testBatchImageSparkTransform
import org.datavec.api.util.ClassPathResource; //导入依赖的package包/类
@Test
public void testBatchImageSparkTransform() throws Exception {
int seed = 12345;
File f0 = new ClassPathResource("/testimages/class1/A.jpg").getFile();
File f1 = new ClassPathResource("/testimages/class1/B.png").getFile();
File f2 = new ClassPathResource("/testimages/class1/C.jpg").getFile();
BatchImageRecord batch = new BatchImageRecord();
batch.add(f0.toURI());
batch.add(f1.toURI());
batch.add(f2.toURI());
ImageTransformProcess imgTransformProcess = new ImageTransformProcess.Builder().seed(seed)
.scaleImageTransform(10).cropImageTransform(5).build();
ImageSparkTransform imgSparkTransform = new ImageSparkTransform(imgTransformProcess);
Base64NDArrayBody body = imgSparkTransform.toArray(batch);
INDArray fromBase64 = Nd4jBase64.fromBase64(body.getNdarray());
System.out.println("Base 64ed array " + fromBase64);
assertEquals(3, fromBase64.size(0));
}
示例12: testMultilabelRecord
import org.datavec.api.util.ClassPathResource; //导入依赖的package包/类
@Test
public void testMultilabelRecord() throws Exception {
Configuration configWriter = new Configuration();
configWriter.setInt(SVMLightRecordWriter.FEATURE_FIRST_COLUMN, 0);
configWriter.setInt(SVMLightRecordWriter.FEATURE_LAST_COLUMN, 9);
configWriter.setBoolean(SVMLightRecordWriter.MULTILABEL, true);
Configuration configReader = new Configuration();
configReader.setInt(SVMLightRecordReader.NUM_FEATURES, 10);
configReader.setBoolean(SVMLightRecordReader.MULTILABEL, true);
configReader.setInt(SVMLightRecordReader.NUM_LABELS, 4);
configReader.setBoolean(SVMLightRecordReader.ZERO_BASED_INDEXING, false);
File inputFile = new ClassPathResource("svmlight/multilabel.txt").getFile();
executeTest(configWriter, configReader, inputFile);
}
示例13: testZeroBasedIndexing
import org.datavec.api.util.ClassPathResource; //导入依赖的package包/类
@Test
public void testZeroBasedIndexing() throws Exception {
Configuration configWriter = new Configuration();
configWriter.setBoolean(SVMLightRecordWriter.ZERO_BASED_INDEXING, true);
configWriter.setInt(SVMLightRecordWriter.FEATURE_FIRST_COLUMN, 0);
configWriter.setInt(SVMLightRecordWriter.FEATURE_LAST_COLUMN, 10);
configWriter.setBoolean(SVMLightRecordWriter.MULTILABEL, true);
Configuration configReader = new Configuration();
configReader.setInt(SVMLightRecordReader.NUM_FEATURES, 11);
configReader.setBoolean(SVMLightRecordReader.MULTILABEL, true);
configReader.setInt(SVMLightRecordReader.NUM_LABELS, 5);
File inputFile = new ClassPathResource("svmlight/multilabel.txt").getFile();
executeTest(configWriter, configReader, inputFile);
}
示例14: testMultilabelRecord
import org.datavec.api.util.ClassPathResource; //导入依赖的package包/类
@Test
public void testMultilabelRecord() throws Exception {
Configuration configWriter = new Configuration();
configWriter.setInt(LibSvmRecordWriter.FEATURE_FIRST_COLUMN, 0);
configWriter.setInt(LibSvmRecordWriter.FEATURE_LAST_COLUMN, 9);
configWriter.setBoolean(LibSvmRecordWriter.MULTILABEL, true);
Configuration configReader = new Configuration();
configReader.setInt(LibSvmRecordReader.NUM_FEATURES, 10);
configReader.setBoolean(LibSvmRecordReader.MULTILABEL, true);
configReader.setInt(LibSvmRecordReader.NUM_LABELS, 4);
configReader.setBoolean(LibSvmRecordReader.ZERO_BASED_INDEXING, false);
File inputFile = new ClassPathResource("svmlight/multilabel.txt").getFile();
executeTest(configWriter, configReader, inputFile);
}
示例15: testZeroBasedIndexing
import org.datavec.api.util.ClassPathResource; //导入依赖的package包/类
@Test
public void testZeroBasedIndexing() throws Exception {
Configuration configWriter = new Configuration();
configWriter.setBoolean(LibSvmRecordWriter.ZERO_BASED_INDEXING, true);
configWriter.setInt(LibSvmRecordWriter.FEATURE_FIRST_COLUMN, 0);
configWriter.setInt(LibSvmRecordWriter.FEATURE_LAST_COLUMN, 10);
configWriter.setBoolean(LibSvmRecordWriter.MULTILABEL, true);
Configuration configReader = new Configuration();
configReader.setInt(LibSvmRecordReader.NUM_FEATURES, 11);
configReader.setBoolean(LibSvmRecordReader.MULTILABEL, true);
configReader.setInt(LibSvmRecordReader.NUM_LABELS, 5);
File inputFile = new ClassPathResource("svmlight/multilabel.txt").getFile();
executeTest(configWriter, configReader, inputFile);
}