本文整理汇总了Java中org.apache.lucene.search.SearcherManager类的典型用法代码示例。如果您正苦于以下问题:Java SearcherManager类的具体用法?Java SearcherManager怎么用?Java SearcherManager使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
SearcherManager类属于org.apache.lucene.search包,在下文中一共展示了SearcherManager类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: wrapSearcher
import org.apache.lucene.search.SearcherManager; //导入依赖的package包/类
public Engine.Searcher wrapSearcher(String source, Engine.Searcher engineSearcher, IndexSearcher searcher, SearcherManager manager) {
final AssertingIndexSearcher assertingIndexSearcher = newSearcher(source, searcher, manager);
assertingIndexSearcher.setSimilarity(searcher.getSimilarity(true));
// pass the original searcher to the super.newSearcher() method to make sure this is the searcher that will
// be released later on. If we wrap an index reader here must not pass the wrapped version to the manager
// on release otherwise the reader will be closed too early. - good news, stuff will fail all over the place if we don't get this right here
AssertingSearcher assertingSearcher = new AssertingSearcher(assertingIndexSearcher, engineSearcher, shardId, logger) {
@Override
public void close() {
try {
searcherCloseable.remove(this);
} finally {
super.close();
}
}
};
searcherCloseable.add(assertingSearcher, engineSearcher.source());
return assertingSearcher;
}
示例2: readLastCommittedSegmentInfos
import org.apache.lucene.search.SearcherManager; //导入依赖的package包/类
/**
* Read the last segments info from the commit pointed to by the searcher manager
*/
protected static SegmentInfos readLastCommittedSegmentInfos(final SearcherManager sm, final Store store) throws IOException {
IndexSearcher searcher = sm.acquire();
try {
IndexCommit latestCommit = ((DirectoryReader) searcher.getIndexReader()).getIndexCommit();
return Lucene.readSegmentInfos(latestCommit);
} catch (IOException e) {
// Fall back to reading from the store if reading from the commit fails
try {
return store.readLastCommittedSegmentsInfo();
} catch (IOException e2) {
e2.addSuppressed(e);
throw e2;
}
} finally {
sm.release(searcher);
}
}
示例3: readLastCommittedSegmentInfos
import org.apache.lucene.search.SearcherManager; //导入依赖的package包/类
/**
* Read the last segments info from the commit pointed to by the searcher manager
*/
protected static SegmentInfos readLastCommittedSegmentInfos(final SearcherManager sm, final Store store) throws IOException {
IndexSearcher searcher = sm.acquire();
try {
IndexCommit latestCommit = ((DirectoryReader) searcher.getIndexReader()).getIndexCommit();
return Lucene.readSegmentInfos(latestCommit);
} catch (IOException e) {
// Fall back to reading from the store if reading from the commit fails
try {
return store. readLastCommittedSegmentsInfo();
} catch (IOException e2) {
e2.addSuppressed(e);
throw e2;
}
} finally {
sm.release(searcher);
}
}
示例4: on
import org.apache.lucene.search.SearcherManager; //导入依赖的package包/类
@Transactional
@Listen
public void on(EntityRemoved event) {
if (event.getEntity() instanceof Project) {
dao.doAfterCommit(new Runnable() {
@Override
public void run() {
synchronized (searcherManagers) {
Long projectId = event.getEntity().getId();
SearcherManager searcherManager = searcherManagers.remove(projectId);
if (searcherManager != null) {
try {
searcherManager.close();
} catch (IOException e) {
Throwables.propagate(e);
}
}
}
}
});
}
}
示例5: LuceneService
import org.apache.lucene.search.SearcherManager; //导入依赖的package包/类
public LuceneService(final SearcherManager searcherManager,
final Analyzer analyzer,
final LuceneQueryTransformer queryTransformer,
final LuceneDocumentTransformer<QR> documentTransformer,
final SortTypeFactory sortTypeFactory,
final Integer maxSearchResults) {
this.searcherManager = searcherManager;
this.analyzer = analyzer;
this.queryTransformer = queryTransformer;
this.documentTransformer = documentTransformer;
this.sortTypeFactory = sortTypeFactory;
this.maxSearchResults = maxSearchResults;
this.fieldDocSerializer = new StandardFieldDocSerializer();
Validate.notNull(this.searcherManager);
Validate.notNull(this.queryTransformer);
Validate.notNull(this.documentTransformer);
Validate.notNull(this.sortTypeFactory);
Validate.notNull(this.maxSearchResults);
}
示例6: findWave
import org.apache.lucene.search.SearcherManager; //导入依赖的package包/类
@Override
public Digest findWave(WaveId waveId, ParticipantId viewer) {
TermQuery query = new TermQuery(new Term(IndexCondition.Field.WAVE_ID.toString(), waveId.serialise()));
SearcherManager searcherManager = nrtManager.getSearcherManager(true);
IndexSearcher indexSearcher = searcherManager.acquire();
try {
TopDocs hints = indexSearcher.search(query, 1);
if (hints.totalHits != 0) {
ScoreDoc hint = hints.scoreDocs[0];
return parseDigest(indexSearcher.doc(hint.doc), null);
}
} catch (IOException ex) {
LOG.log(Level.SEVERE, "Search wave " + waveId.serialise() + " failed", ex);
}
return null;
}
示例7: AnalyzingInfixSuggester
import org.apache.lucene.search.SearcherManager; //导入依赖的package包/类
/** Create a new instance, loading from a previously built
* AnalyzingInfixSuggester directory, if it exists. This directory must be
* private to the infix suggester (i.e., not an external
* Lucene index). Note that {@link #close}
* will also close the provided directory.
*
* @param minPrefixChars Minimum number of leading characters
* before PrefixQuery is used (default 4).
* Prefixes shorter than this are indexed as character
* ngrams (increasing index size but making lookups
* faster).
*/
public AnalyzingInfixSuggester(Version matchVersion, Directory dir, Analyzer indexAnalyzer, Analyzer queryAnalyzer, int minPrefixChars) throws IOException {
if (minPrefixChars < 0) {
throw new IllegalArgumentException("minPrefixChars must be >= 0; got: " + minPrefixChars);
}
this.queryAnalyzer = queryAnalyzer;
this.indexAnalyzer = indexAnalyzer;
this.matchVersion = matchVersion;
this.dir = dir;
this.minPrefixChars = minPrefixChars;
if (DirectoryReader.indexExists(dir)) {
// Already built; open it:
writer = new IndexWriter(dir,
getIndexWriterConfig(matchVersion, getGramAnalyzer(), IndexWriterConfig.OpenMode.APPEND));
searcherMgr = new SearcherManager(writer, true, null);
}
}
示例8: refreshIfNeeded
import org.apache.lucene.search.SearcherManager; //导入依赖的package包/类
@Override
protected SearcherAndTaxonomy refreshIfNeeded(SearcherAndTaxonomy ref) throws IOException {
// Must re-open searcher first, otherwise we may get a
// new reader that references ords not yet known to the
// taxonomy reader:
final IndexReader r = ref.searcher.getIndexReader();
final IndexReader newReader = DirectoryReader.openIfChanged((DirectoryReader) r);
if (newReader == null) {
return null;
} else {
DirectoryTaxonomyReader tr = TaxonomyReader.openIfChanged(ref.taxonomyReader);
if (tr == null) {
ref.taxonomyReader.incRef();
tr = ref.taxonomyReader;
} else if (taxoWriter != null && taxoWriter.getTaxonomyEpoch() != taxoEpoch) {
IOUtils.close(newReader, tr);
throw new IllegalStateException("DirectoryTaxonomyWriter.replaceTaxonomy was called, which is not allowed when using SearcherTaxonomyManager");
}
return new SearcherAndTaxonomy(SearcherManager.getSearcher(searcherFactory, newReader), tr);
}
}
示例9: LuceneFiler
import org.apache.lucene.search.SearcherManager; //导入依赖的package包/类
public LuceneFiler(@Nonnull Filer delegate, @Nonnull Config config) throws IOException {
super(delegate);
String path = config.getString("index.path");
maxAge = config.getTime("index.maxAge", "-1");
double maxMergeMb = config.getDouble("index.maxMergeMb", 4);
double maxCachedMb = config.getDouble("index.maxCacheMb", 64);
long targetMaxStale = config.getTime("index.targetMaxStale", "5s");
long targetMinStale = config.getTime("index.targetMinStale", "1s");
Directory dir = FSDirectory.open(new File(path).toPath());
NRTCachingDirectory cachingDir = new NRTCachingDirectory(dir, maxMergeMb, maxCachedMb);
IndexWriterConfig writerConfig = new IndexWriterConfig(null);
writerConfig.setOpenMode(OpenMode.CREATE_OR_APPEND);
writer = new TrackingIndexWriter(new IndexWriter(cachingDir, writerConfig));
manager = new SearcherManager(writer.getIndexWriter(), true, new SearcherFactory());
thread = new ControlledRealTimeReopenThread<>(writer, manager, targetMaxStale, targetMinStale);
thread.start();
}
示例10: setDAG
import org.apache.lucene.search.SearcherManager; //导入依赖的package包/类
@Override
public void setDAG(DirectedAcyclicGraph directedAcyclicGraph) {
super.setDAG(directedAcyclicGraph);
// Connect to the Lucene DB
try {
Analyzer analyser = new KeywordAnalyzer();
IndexWriterConfig config = new IndexWriterConfig(analyser);
config.setOpenMode(OpenMode.CREATE_OR_APPEND);
Path path = DAGModule.moduleFile(directedAcyclicGraph.rootDir_,
INDEX_FOLDER).toPath();
Directory directory = FSDirectory.open(path);
// Directory directory = new RAMDirectory();
writer_ = new IndexWriter(directory, config);
// Searching
parser_ = new QueryParser(LOWERCASE_FIELD, analyser);
manager_ = new SearcherManager(writer_, true, new SearcherFactory());
} catch (Exception e) {
e.printStackTrace();
}
}
示例11: refreshIfNeeded
import org.apache.lucene.search.SearcherManager; //导入依赖的package包/类
@Override
protected SearcherAndTaxonomy refreshIfNeeded(SearcherAndTaxonomy ref) throws IOException {
// Must re-open searcher first, otherwise we may get a
// new reader that references ords not yet known to the
// taxonomy reader:
final IndexReader r = ref.searcher.getIndexReader();
final IndexReader newReader = DirectoryReader.openIfChanged((DirectoryReader) r);
if (newReader == null) {
return null;
} else {
DirectoryTaxonomyReader tr = TaxonomyReader.openIfChanged(ref.taxonomyReader);
if (tr == null) {
ref.taxonomyReader.incRef();
tr = ref.taxonomyReader;
} else if (taxoWriter.getTaxonomyEpoch() != taxoEpoch) {
IOUtils.close(newReader, tr);
throw new IllegalStateException("DirectoryTaxonomyWriter.replaceTaxonomy was called, which is not allowed when using SearcherTaxonomyManager");
}
return new SearcherAndTaxonomy(SearcherManager.getSearcher(searcherFactory, newReader), tr);
}
}
示例12: newSearcher
import org.apache.lucene.search.SearcherManager; //导入依赖的package包/类
public AssertingIndexSearcher newSearcher(String source, IndexSearcher searcher, SearcherManager manager) throws EngineException {
IndexReader reader = searcher.getIndexReader();
IndexReader wrappedReader = reader;
assert reader != null;
if (reader instanceof DirectoryReader && mockContext.wrapReader) {
wrappedReader = wrapReader((DirectoryReader) reader);
}
// this executes basic query checks and asserts that weights are normalized only once etc.
final AssertingIndexSearcher assertingIndexSearcher = new AssertingIndexSearcher(mockContext.random, wrappedReader);
assertingIndexSearcher.setSimilarity(searcher.getSimilarity(true));
assertingIndexSearcher.setQueryCache(filterCache);
assertingIndexSearcher.setQueryCachingPolicy(filterCachingPolicy);
return assertingIndexSearcher;
}
示例13: ShadowEngine
import org.apache.lucene.search.SearcherManager; //导入依赖的package包/类
public ShadowEngine(EngineConfig engineConfig) {
super(engineConfig);
if (engineConfig.getRefreshListeners() != null) {
throw new IllegalArgumentException("ShadowEngine doesn't support RefreshListeners");
}
SearcherFactory searcherFactory = new EngineSearcherFactory(engineConfig);
final long nonexistentRetryTime = engineConfig.getIndexSettings().getSettings()
.getAsTime(NONEXISTENT_INDEX_RETRY_WAIT, DEFAULT_NONEXISTENT_INDEX_RETRY_WAIT)
.getMillis();
try {
DirectoryReader reader = null;
store.incRef();
boolean success = false;
try {
if (Lucene.waitForIndex(store.directory(), nonexistentRetryTime)) {
reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(store.directory()), shardId);
this.searcherManager = new SearcherManager(reader, searcherFactory);
this.lastCommittedSegmentInfos = readLastCommittedSegmentInfos(searcherManager, store);
success = true;
} else {
throw new IllegalStateException("failed to open a shadow engine after" +
nonexistentRetryTime + "ms, " +
"directory is not an index");
}
} catch (Exception e) {
logger.warn("failed to create new reader", e);
throw e;
} finally {
if (success == false) {
IOUtils.closeWhileHandlingException(reader);
store.decRef();
}
}
} catch (IOException ex) {
throw new EngineCreationFailureException(shardId, "failed to open index reader", ex);
}
logger.trace("created new ShadowEngine");
}
示例14: ShadowEngine
import org.apache.lucene.search.SearcherManager; //导入依赖的package包/类
public ShadowEngine(EngineConfig engineConfig) {
super(engineConfig);
SearcherFactory searcherFactory = new EngineSearcherFactory(engineConfig);
final long nonexistentRetryTime = engineConfig.getIndexSettings()
.getAsTime(NONEXISTENT_INDEX_RETRY_WAIT, DEFAULT_NONEXISTENT_INDEX_RETRY_WAIT)
.getMillis();
try {
DirectoryReader reader = null;
store.incRef();
boolean success = false;
try {
if (Lucene.waitForIndex(store.directory(), nonexistentRetryTime)) {
reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(store.directory()), shardId);
this.searcherManager = new SearcherManager(reader, searcherFactory);
this.lastCommittedSegmentInfos = readLastCommittedSegmentInfos(searcherManager, store);
success = true;
} else {
throw new IllegalStateException("failed to open a shadow engine after" +
nonexistentRetryTime + "ms, " +
"directory is not an index");
}
} catch (Throwable e) {
logger.warn("failed to create new reader", e);
throw e;
} finally {
if (success == false) {
IOUtils.closeWhileHandlingException(reader);
store.decRef();
}
}
} catch (IOException ex) {
throw new EngineCreationFailureException(shardId, "failed to open index reader", ex);
}
logger.trace("created new ShadowEngine");
}
示例15: LuceneRetrievalService
import org.apache.lucene.search.SearcherManager; //导入依赖的package包/类
public LuceneRetrievalService(final SearcherManager searcherManager,
final Analyzer analyzer,
final LuceneQueryTransformer queryTransformer,
final LuceneDocumentTransformer<E> documentTransformer,
final SortTypeFactory sortTypeFactory) {
this.searcherManager = searcherManager;
this.analyzer = analyzer;
this.queryTransformer = queryTransformer;
this.documentTransformer = documentTransformer;
this.sortTypeFactory = sortTypeFactory;
Validate.notNull(this.searcherManager);
Validate.notNull(this.queryTransformer);
Validate.notNull(this.documentTransformer);
Validate.notNull(this.sortTypeFactory);
}