本文整理汇总了Java中org.apache.lucene.store.Directory类的典型用法代码示例。如果您正苦于以下问题:Java Directory类的具体用法?Java Directory怎么用?Java Directory使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
Directory类属于org.apache.lucene.store包,在下文中一共展示了Directory类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testSimpleNumericOps
import org.apache.lucene.store.Directory; //导入依赖的package包/类
public void testSimpleNumericOps() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
Document document = new Document();
document.add(new TextField("_id", "1", Field.Store.YES));
document.add(new LegacyIntField("test", 2, LegacyIntField.TYPE_STORED));
indexWriter.addDocument(document);
IndexReader reader = DirectoryReader.open(indexWriter);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1);
Document doc = searcher.doc(topDocs.scoreDocs[0].doc);
IndexableField f = doc.getField("test");
assertThat(f.stringValue(), equalTo("2"));
BytesRefBuilder bytes = new BytesRefBuilder();
LegacyNumericUtils.intToPrefixCoded(2, 0, bytes);
topDocs = searcher.search(new TermQuery(new Term("test", bytes.get())), 1);
doc = searcher.doc(topDocs.scoreDocs[0].doc);
f = doc.getField("test");
assertThat(f.stringValue(), equalTo("2"));
indexWriter.close();
}
示例2: testCache
import org.apache.lucene.store.Directory; //导入依赖的package包/类
/** Test that version map cache works, is evicted on close, etc */
public void testCache() throws Exception {
int size = Versions.lookupStates.size();
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
Document doc = new Document();
doc.add(new Field(UidFieldMapper.NAME, "6", UidFieldMapper.Defaults.FIELD_TYPE));
doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87));
writer.addDocument(doc);
DirectoryReader reader = DirectoryReader.open(writer);
// should increase cache size by 1
assertEquals(87, Versions.loadVersion(reader, new Term(UidFieldMapper.NAME, "6")));
assertEquals(size+1, Versions.lookupStates.size());
// should be cache hit
assertEquals(87, Versions.loadVersion(reader, new Term(UidFieldMapper.NAME, "6")));
assertEquals(size+1, Versions.lookupStates.size());
reader.close();
writer.close();
// core should be evicted from the map
assertEquals(size, Versions.lookupStates.size());
dir.close();
}
示例3: testStatsDirWrapper
import org.apache.lucene.store.Directory; //导入依赖的package包/类
public void testStatsDirWrapper() throws IOException {
Directory dir = newDirectory();
Directory target = newDirectory();
RecoveryState.Index indexStats = new RecoveryState.Index();
StoreRecovery.StatsDirectoryWrapper wrapper = new StoreRecovery.StatsDirectoryWrapper(target, indexStats);
try (IndexOutput output = dir.createOutput("foo.bar", IOContext.DEFAULT)) {
CodecUtil.writeHeader(output, "foo", 0);
int numBytes = randomIntBetween(100, 20000);
for (int i = 0; i < numBytes; i++) {
output.writeByte((byte)i);
}
CodecUtil.writeFooter(output);
}
wrapper.copyFrom(dir, "foo.bar", "bar.foo", IOContext.DEFAULT);
assertNotNull(indexStats.getFileDetails("bar.foo"));
assertNull(indexStats.getFileDetails("foo.bar"));
assertEquals(dir.fileLength("foo.bar"), indexStats.getFileDetails("bar.foo").length());
assertEquals(dir.fileLength("foo.bar"), indexStats.getFileDetails("bar.foo").recovered());
assertFalse(indexStats.getFileDetails("bar.foo").reused());
IOUtils.close(dir, target);
}
示例4: testCheckIntegrity
import org.apache.lucene.store.Directory; //导入依赖的package包/类
public void testCheckIntegrity() throws IOException {
Directory dir = newDirectory();
long luceneFileLength = 0;
try (IndexOutput output = dir.createOutput("lucene_checksum.bin", IOContext.DEFAULT)) {
int iters = scaledRandomIntBetween(10, 100);
for (int i = 0; i < iters; i++) {
BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024));
output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length);
luceneFileLength += bytesRef.length;
}
CodecUtil.writeFooter(output);
luceneFileLength += CodecUtil.footerLength();
}
final long luceneChecksum;
try (IndexInput indexInput = dir.openInput("lucene_checksum.bin", IOContext.DEFAULT)) {
assertEquals(luceneFileLength, indexInput.length());
luceneChecksum = CodecUtil.retrieveChecksum(indexInput);
}
dir.close();
}
示例5: setPreload
import org.apache.lucene.store.Directory; //导入依赖的package包/类
private static Directory setPreload(Directory directory, Path location, LockFactory lockFactory,
Set<String> preLoadExtensions) throws IOException {
if (preLoadExtensions.isEmpty() == false
&& directory instanceof MMapDirectory
&& ((MMapDirectory) directory).getPreload() == false) {
if (preLoadExtensions.contains("*")) {
((MMapDirectory) directory).setPreload(true);
return directory;
}
MMapDirectory primary = new MMapDirectory(location, lockFactory);
primary.setPreload(true);
return new FileSwitchDirectory(preLoadExtensions, primary, directory, true) {
@Override
public String[] listAll() throws IOException {
// avoid listing twice
return primary.listAll();
}
};
}
return directory;
}
示例6: init
import org.apache.lucene.store.Directory; //导入依赖的package包/类
public void init(String db, String uri, String lucene) {
Dataset ds = TDBFactory.createDataset(db);
// Lucene configuration
try {
Directory luceneDir = FSDirectory.open(new File(lucene));
EntityDefinition entDef = new EntityDefinition("comment", "text", RDFS.comment);
// Set uid in order to remove index entries automatically
entDef.setUidField("uid");
StandardAnalyzer stAn = new StandardAnalyzer(Version.LUCENE_4_9);
dataset = TextDatasetFactory.createLucene(ds, luceneDir, entDef, stAn);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
baseURI = uri;
servers = new ArrayList<>();
tdQueue = new PriorityQueue<ThingDescription>();
loadTDQueue();
}
示例7: main
import org.apache.lucene.store.Directory; //导入依赖的package包/类
public static void main(String[] args) {
try {
Directory directory = FSDirectory.getDirectory("demo index", false);
IndexReader reader = IndexReader.open(directory);
// Term term = new Term("path", "pizza");
// int deleted = reader.delete(term);
// System.out.println("deleted " + deleted +
// " documents containing " + term);
for (int i = 0; i < reader.maxDoc(); i++)
reader.delete(i);
reader.close();
directory.close();
} catch (Exception e) {
System.out.println(" caught a " + e.getClass() +
"\n with message: " + e.getMessage());
}
}
示例8: handleMergeException
import org.apache.lucene.store.Directory; //导入依赖的package包/类
@Override
protected void handleMergeException(final Directory dir, final Throwable exc) {
logger.error("failed to merge", exc);
if (config().getMergeSchedulerConfig().isNotifyOnMergeFailure()) {
engineConfig.getThreadPool().generic().execute(new AbstractRunnable() {
@Override
public void onFailure(Throwable t) {
logger.debug("merge failure action rejected", t);
}
@Override
protected void doRun() throws Exception {
MergePolicy.MergeException e = new MergePolicy.MergeException(exc, dir);
failEngine("merge failed", e);
}
});
}
}
示例9: process
import org.apache.lucene.store.Directory; //导入依赖的package包/类
@Override
public void process(ProcessingContext<Corpus> ctx, Corpus corpus) throws ModuleException {
try (KeywordAnalyzer kwa = new KeywordAnalyzer()) {
IndexWriterConfig writerConfig = new IndexWriterConfig(Version.LUCENE_36, kwa);
writerConfig.setOpenMode(append ? OpenMode.CREATE_OR_APPEND : OpenMode.CREATE);
try (Directory dir = FSDirectory.open(indexDir)) {
try (IndexWriter writer = new IndexWriter(dir, writerConfig)) {
AlvisDBIndexerResolvedObjects resObj = getResolvedObjects();
Logger logger = getLogger(ctx);
EvaluationContext evalCtx = new EvaluationContext(logger);
for (ADBElements.Resolved ent : resObj.elements) {
ent.indexElements(logger, writer, evalCtx, corpus);
}
}
}
catch (IOException e) {
rethrow(e);
}
}
}
示例10: testMissingShard
import org.apache.lucene.store.Directory; //导入依赖的package包/类
public void testMissingShard() throws IOException {
try (Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
w.addDocument(new Document());
try (IndexReader reader = w.getReader()) {
ShardCoreKeyMap map = new ShardCoreKeyMap();
for (LeafReaderContext ctx : reader.leaves()) {
try {
map.add(ctx.reader());
fail();
} catch (IllegalArgumentException expected) {
// ok
}
}
}
}
}
示例11: newDocValuesProducer
import org.apache.lucene.store.Directory; //导入依赖的package包/类
private RefCount<DocValuesProducer> newDocValuesProducer(SegmentCommitInfo si, IOContext context, Directory dir,
DocValuesFormat dvFormat, final Long gen, FieldInfos infos, int termsIndexDivisor) throws IOException {
Directory dvDir = dir;
String segmentSuffix = "";
if (gen.longValue() != -1) {
dvDir = si.info.dir; // gen'd files are written outside CFS, so use SegInfo directory
segmentSuffix = Long.toString(gen.longValue(), Character.MAX_RADIX);
}
// set SegmentReadState to list only the fields that are relevant to that gen
SegmentReadState srs = new SegmentReadState(dvDir, si.info, infos, context, termsIndexDivisor, segmentSuffix);
return new RefCount<DocValuesProducer>(dvFormat.fieldsProducer(srs)) {
@SuppressWarnings("synthetic-access")
@Override
protected void release() throws IOException {
object.close();
synchronized (SegmentDocValues.this) {
genDVProducers.remove(gen);
}
}
};
}
示例12: indexOneDoc
import org.apache.lucene.store.Directory; //导入依赖的package包/类
private IndexReader indexOneDoc(Directory dir, String field, String value, Analyzer analyzer) throws IOException {
IndexWriterConfig iwc = newIndexWriterConfig(analyzer);
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType ft = new FieldType(TextField.TYPE_STORED);
ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field textField = new Field(field, "", ft);
Document doc = new Document();
doc.add(textField);
textField.setStringValue(value);
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
return ir;
}
示例13: testMultiPhrasePrefixQuery
import org.apache.lucene.store.Directory; //导入依赖的package包/类
public void testMultiPhrasePrefixQuery() throws Exception {
Analyzer analyzer = new StandardAnalyzer();
Directory dir = newDirectory();
String value = "The quick brown fox.";
IndexReader ir = indexOneDoc(dir, "text", value, analyzer);
MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery();
query.add(new Term("text", "quick"));
query.add(new Term("text", "brown"));
query.add(new Term("text", "fo"));
IndexSearcher searcher = newSearcher(ir);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertThat(topDocs.totalHits, equalTo(1));
int docId = topDocs.scoreDocs[0].doc;
CustomPassageFormatter passageFormatter = new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder());
CustomUnifiedHighlighter highlighter = new CustomUnifiedHighlighter(searcher, analyzer,
passageFormatter, null, value, false);
Snippet[] snippets = highlighter.highlightField("text", query, docId, 5);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0].getText(), equalTo("The <b>quick</b> <b>brown</b> <b>fox</b>."));
ir.close();
dir.close();
}
示例14: writeLuceneIndexForProject
import org.apache.lucene.store.Directory; //导入依赖的package包/类
/**
* Stores features from a specified feature file to the specified project's Lucene index
* Sample query: featureId:rs44022* AND (variationType:del OR variationType:ins)
*
* @param featureFileId a FeatureFile, for which features to save
* @param projectId a project, for which to write an index
* @param entries a list of FeatureIndexEntry to write to index
* @throws IOException
*/
public void writeLuceneIndexForProject(final Long featureFileId, final long projectId,
final List<? extends FeatureIndexEntry> entries) throws IOException {
try (
StandardAnalyzer analyzer = new StandardAnalyzer();
Directory index = fileManager.createIndexForProject(projectId);
IndexWriter writer = new IndexWriter(index, new IndexWriterConfig(analyzer).setOpenMode(
IndexWriterConfig.OpenMode.CREATE_OR_APPEND))
) {
FacetsConfig facetsConfig = new FacetsConfig();
facetsConfig.setIndexFieldName(FeatureIndexFields.CHR_ID.getFieldName(),
FeatureIndexFields.FACET_CHR_ID.getFieldName());
for (FeatureIndexEntry entry : entries) {
Document document = new Document();
addCommonDocumentFields(document, entry, featureFileId);
if (entry instanceof VcfIndexEntry) {
addVcfDocumentFields(document, entry);
}
writer.addDocument(facetsConfig.build(document));
}
}
}
示例15: deleteFromIndexByFileId
import org.apache.lucene.store.Directory; //导入依赖的package包/类
/**
* Deletes features from specified feature files from project's index
*
* @param projectId a project to delete index entries
* @param fileIds a list of Pair of feature types to file Ids, which entries to delete. To delete gene file
* entries, pass FeatureType.GENE
*/
public void deleteFromIndexByFileId(final long projectId, List<Pair<FeatureType, Long>> fileIds) {
if (fileIds == null || fileIds.isEmpty() || !fileManager.indexForProjectExists(projectId)) {
return;
}
try (
StandardAnalyzer analyzer = new StandardAnalyzer();
Directory index = fileManager.getIndexForProject(projectId);
IndexWriter writer = new IndexWriter(index, new IndexWriterConfig(analyzer).setOpenMode(
IndexWriterConfig.OpenMode.CREATE_OR_APPEND))
) {
if (fileManager.indexForProjectExists(projectId)) {
for (Pair<FeatureType, Long> id : fileIds) {
deleteDocumentByTypeAndId(id.getKey(), id.getValue(), writer);
}
}
} catch (IOException e) {
LOGGER.error("Exception while deleting from index:", e);
}
}