本文整理匯總了Java中org.deeplearning4j.iterator.LabeledSentenceProvider類的典型用法代碼示例。如果您正苦於以下問題:Java LabeledSentenceProvider類的具體用法?Java LabeledSentenceProvider怎麽用?Java LabeledSentenceProvider使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
LabeledSentenceProvider類屬於org.deeplearning4j.iterator包,在下文中一共展示了LabeledSentenceProvider類的8個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: getDataSetIterator
import org.deeplearning4j.iterator.LabeledSentenceProvider; //導入依賴的package包/類
public static DataSetIterator getDataSetIterator(String DATA_PATH, boolean isTraining, WordVectors wordVectors, int minibatchSize,
int maxSentenceLength, Random rng ){
String path = FilenameUtils.concat(DATA_PATH, (isTraining ? "aclImdb/train/" : "aclImdb/test/"));
String positiveBaseDir = FilenameUtils.concat(path, "pos");
String negativeBaseDir = FilenameUtils.concat(path, "neg");
File filePositive = new File(positiveBaseDir);
File fileNegative = new File(negativeBaseDir);
Map<String,List<File>> reviewFilesMap = new HashMap<>();
reviewFilesMap.put("Positive", Arrays.asList(filePositive.listFiles()));
reviewFilesMap.put("Negative", Arrays.asList(fileNegative.listFiles()));
LabeledSentenceProvider sentenceProvider = new FileLabeledSentenceProvider(reviewFilesMap, rng);
return new CnnSentenceDataSetIterator.Builder()
.sentenceProvider(sentenceProvider)
.wordVectors(wordVectors)
.minibatchSize(minibatchSize)
.maxSentenceLength(maxSentenceLength)
.useNormalizedWordVectors(false)
.build();
}
示例2: RnnTextEmbeddingDataSetIterator
import org.deeplearning4j.iterator.LabeledSentenceProvider; //導入依賴的package包/類
/**
* @param data Instances with documents and labels
* @param wordVectors WordVectors object
* @param tokenFact Tokenizer factory
* @param tpp Token pre processor
* @param stopWords Stop word object
* @param batchSize Size of each minibatch for training
* @param truncateLength If reviews exceed
*/
public RnnTextEmbeddingDataSetIterator(
Instances data,
WordVectors wordVectors,
TokenizerFactory tokenFact,
TokenPreProcess tpp,
AbstractStopwords stopWords,
LabeledSentenceProvider sentenceProvider,
int batchSize,
int truncateLength) {
this.batchSize = batchSize;
this.vectorSize = wordVectors.getWordVector(wordVectors.vocab().wordAtIndex(0)).length;
this.data = data;
this.wordVectors = wordVectors;
this.truncateLength = truncateLength;
this.tokenizerFactory = tokenFact;
this.tokenizerFactory.setTokenPreProcessor(tpp);
this.stopWords = stopWords;
this.sentenceProvider = sentenceProvider;
}
示例3: getDataSetIterator
import org.deeplearning4j.iterator.LabeledSentenceProvider; //導入依賴的package包/類
/**
* Returns the actual iterator.
*
* @param data the dataset to use
* @param seed the seed for the random number generator
* @param batchSize the batch size to use
* @return the DataSetIterator
*/
@Override
public DataSetIterator getDataSetIterator(Instances data, int seed, int batchSize)
throws InvalidInputDataException, IOException {
validate(data);
initWordVectors();
final LabeledSentenceProvider prov = getSentenceProvider(data);
return new RnnTextEmbeddingDataSetIterator(
data,
wordVectors,
tokenizerFactory,
tokenPreProcess,
stopwords,
prov,
batchSize,
truncateLength);
}
示例4: getDataSetIterator
import org.deeplearning4j.iterator.LabeledSentenceProvider; //導入依賴的package包/類
@Override
public DataSetIterator getDataSetIterator(Instances data, int seed, int batchSize)
throws InvalidInputDataException, IOException {
validate(data);
initWordVectors();
final LabeledSentenceProvider sentenceProvider = getSentenceProvider(data);
return new RnnTextEmbeddingDataSetIterator(
data,
wordVectors,
tokenizerFactory,
tokenPreProcess,
stopwords,
sentenceProvider,
batchSize,
truncateLength);
}
示例5: getSentenceProvider
import org.deeplearning4j.iterator.LabeledSentenceProvider; //導入依賴的package包/類
@Override
public LabeledSentenceProvider getSentenceProvider(Instances data) {
List<File> files = new ArrayList<>();
List<String> labels = new ArrayList<>();
final int clsIdx = data.classIndex();
for (Instance inst : data) {
labels.add(String.valueOf(inst.value(clsIdx)));
final String path = inst.stringValue(1 - clsIdx);
final File file = Paths.get(textsLocation.getAbsolutePath(), path).toFile();
files.add(file);
}
return new FileLabeledSentenceProvider(files, labels, data.numClasses());
}
示例6: getSentenceProvider
import org.deeplearning4j.iterator.LabeledSentenceProvider; //導入依賴的package包/類
/**
* Create a sentence provider from the given data.
*
* @param data Data
* @return Sentence provider
*/
public LabeledSentenceProvider getSentenceProvider(Instances data){
List<String> sentences = new ArrayList<>();
List<String> labels = new ArrayList<>();
final int clsIdx = data.classIndex();
for (Instance inst : data) {
labels.add(String.valueOf(inst.value(clsIdx)));
sentences.add(inst.stringValue(1 - clsIdx));
}
return new CollectionLabeledSentenceProvider(sentences, labels, data.numClasses());
}
示例7: getDataSetIterator
import org.deeplearning4j.iterator.LabeledSentenceProvider; //導入依賴的package包/類
@Override
public DataSetIterator getDataSetIterator(Instances data, int seed, int batchSize) {
initialize();
LabeledSentenceProvider clsp = getSentenceProvider(data);
return new CnnSentenceDataSetIterator.Builder()
.wordVectors(wordVectors)
.tokenizerFactory(tokenizerFactory)
.sentenceProvider(clsp)
.minibatchSize(batchSize)
.maxSentenceLength(truncateLength)
.useNormalizedWordVectors(false)
.sentencesAlongHeight(true)
.stopwords(stopwords)
.build();
}
示例8: sentenceProvider
import org.deeplearning4j.iterator.LabeledSentenceProvider; //導入依賴的package包/類
/** Specify how the (labelled) sentences / documents should be provided */
public CnnSentenceDataSetIterator.Builder sentenceProvider(
LabeledSentenceProvider labeledSentenceProvider) {
this.sentenceProvider = labeledSentenceProvider;
return this;
}