本文整理汇总了Java中org.apache.lucene.util.InfoStream类的典型用法代码示例。如果您正苦于以下问题:Java InfoStream类的具体用法?Java InfoStream怎么用?Java InfoStream使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
InfoStream类属于org.apache.lucene.util包,在下文中一共展示了InfoStream类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getIndexWriterConfig
import org.apache.lucene.util.InfoStream; //导入依赖的package包/类
private IndexWriterConfig getIndexWriterConfig(boolean create) {
final IndexWriterConfig iwc = new IndexWriterConfig(engineConfig.getAnalyzer());
iwc.setCommitOnClose(false); // we by default don't commit on close
iwc.setOpenMode(create ? IndexWriterConfig.OpenMode.CREATE : IndexWriterConfig.OpenMode.APPEND);
iwc.setIndexDeletionPolicy(deletionPolicy);
// with tests.verbose, lucene sets this up: plumb to align with filesystem stream
boolean verbose = false;
try {
verbose = Boolean.parseBoolean(System.getProperty("tests.verbose"));
} catch (Exception ignore) {
}
iwc.setInfoStream(verbose ? InfoStream.getDefault() : new LoggerInfoStream(logger));
iwc.setMergeScheduler(mergeScheduler);
MergePolicy mergePolicy = config().getMergePolicy();
// Give us the opportunity to upgrade old segments while performing
// background merges
mergePolicy = new ElasticsearchMergePolicy(mergePolicy);
iwc.setMergePolicy(mergePolicy);
iwc.setSimilarity(engineConfig.getSimilarity());
iwc.setRAMBufferSizeMB(engineConfig.getIndexingBufferSize().getMbFrac());
iwc.setCodec(engineConfig.getCodec());
iwc.setUseCompoundFile(true); // always use compound on flush - reduces # of file-handles on refresh
return iwc;
}
示例2: SegmentMerger
import org.apache.lucene.util.InfoStream; //导入依赖的package包/类
SegmentMerger(List<AtomicReader> readers, SegmentInfo segmentInfo, InfoStream infoStream, Directory dir, int termIndexInterval,
MergeState.CheckAbort checkAbort, FieldInfos.FieldNumbers fieldNumbers, IOContext context, boolean validate) throws IOException {
// validate incoming readers
if (validate) {
for (AtomicReader reader : readers) {
reader.checkIntegrity();
}
}
mergeState = new MergeState(readers, segmentInfo, infoStream, checkAbort);
directory = dir;
this.termIndexInterval = termIndexInterval;
this.codec = segmentInfo.getCodec();
this.context = context;
this.fieldInfosBuilder = new FieldInfos.Builder(fieldNumbers);
mergeState.segmentInfo.setDocCount(setDocMaps());
}
示例3: IndexReplicationHandler
import org.apache.lucene.util.InfoStream; //导入依赖的package包/类
/**
* Constructor with the given index directory and callback to notify when the
* indexes were updated.
*/
public IndexReplicationHandler(Directory indexDir, Callable<Boolean> callback) throws IOException {
this.callback = callback;
this.indexDir = indexDir;
currentRevisionFiles = null;
currentVersion = null;
if (DirectoryReader.indexExists(indexDir)) {
final List<IndexCommit> commits = DirectoryReader.listCommits(indexDir);
final IndexCommit commit = commits.get(commits.size() - 1);
currentRevisionFiles = IndexRevision.revisionFiles(commit);
currentVersion = IndexRevision.revisionVersion(commit);
final InfoStream infoStream = InfoStream.getDefault();
if (infoStream.isEnabled(INFO_STREAM_COMPONENT)) {
infoStream.message(INFO_STREAM_COMPONENT, "constructor(): currentVersion=" + currentVersion
+ " currentRevisionFiles=" + currentRevisionFiles);
infoStream.message(INFO_STREAM_COMPONENT, "constructor(): commit=" + commit);
}
}
}
示例4: testNoSegmentsDotGenInflation
import org.apache.lucene.util.InfoStream; //导入依赖的package包/类
public void testNoSegmentsDotGenInflation() throws IOException {
Directory dir = newMockDirectory();
// empty commit
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null)).close();
SegmentInfos sis = new SegmentInfos();
sis.read(dir);
assertEquals(1, sis.getGeneration());
// no inflation
IndexFileDeleter.inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
assertEquals(1, sis.getGeneration());
dir.close();
}
示例5: testSegmentsInflation
import org.apache.lucene.util.InfoStream; //导入依赖的package包/类
public void testSegmentsInflation() throws IOException {
MockDirectoryWrapper dir = newMockDirectory();
dir.setCheckIndexOnClose(false); // TODO: allow falling back more than one commit
// empty commit
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null)).close();
SegmentInfos sis = new SegmentInfos();
sis.read(dir);
assertEquals(1, sis.getGeneration());
// add trash commit
dir.createOutput(IndexFileNames.SEGMENTS + "_2", IOContext.DEFAULT).close();
// ensure inflation
IndexFileDeleter.inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
assertEquals(2, sis.getGeneration());
// add another trash commit
dir.createOutput(IndexFileNames.SEGMENTS + "_4", IOContext.DEFAULT).close();
IndexFileDeleter.inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
assertEquals(4, sis.getGeneration());
dir.close();
}
示例6: testTrashyFile
import org.apache.lucene.util.InfoStream; //导入依赖的package包/类
public void testTrashyFile() throws IOException {
MockDirectoryWrapper dir = newMockDirectory();
dir.setCheckIndexOnClose(false); // TODO: maybe handle such trash better elsewhere...
// empty commit
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null)).close();
SegmentInfos sis = new SegmentInfos();
sis.read(dir);
assertEquals(1, sis.getGeneration());
// add trash file
dir.createOutput(IndexFileNames.SEGMENTS + "_", IOContext.DEFAULT).close();
// no inflation
IndexFileDeleter.inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
assertEquals(1, sis.getGeneration());
dir.close();
}
示例7: testTrashyGenFile
import org.apache.lucene.util.InfoStream; //导入依赖的package包/类
public void testTrashyGenFile() throws IOException {
MockDirectoryWrapper dir = newMockDirectory();
// initial commit
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
iw.addDocument(new Document());
iw.commit();
iw.close();
// no deletes: start at 1
SegmentInfos sis = new SegmentInfos();
sis.read(dir);
assertEquals(1, sis.info(0).getNextDelGen());
// add trash file
dir.createOutput("_1_A", IOContext.DEFAULT).close();
// no inflation
IndexFileDeleter.inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
assertEquals(1, sis.info(0).getNextDelGen());
dir.close();
}
示例8: write
import org.apache.lucene.util.InfoStream; //导入依赖的package包/类
private void write(final FieldInfos fieldInfos, final Directory dir, final FieldData[] fields, boolean allowPreFlex) throws Throwable {
final int termIndexInterval = TestUtil.nextInt(random(), 13, 27);
final Codec codec = Codec.getDefault();
final SegmentInfo si = new SegmentInfo(dir, Version.LATEST, SEGMENT, 10000, false, codec, null);
final SegmentWriteState state = new SegmentWriteState(InfoStream.getDefault(), dir, si, fieldInfos, termIndexInterval, null, newIOContext(random()));
final FieldsConsumer consumer = codec.postingsFormat().fieldsConsumer(state);
Arrays.sort(fields);
for (final FieldData field : fields) {
if (!allowPreFlex && codec instanceof Lucene3xCodec) {
// code below expects unicode sort order
continue;
}
field.write(consumer);
}
consumer.close();
}
示例9: write
import org.apache.lucene.util.InfoStream; //导入依赖的package包/类
private void write(final FieldInfos fieldInfos, final Directory dir, final FieldData[] fields, boolean allowPreFlex) throws Throwable {
final int termIndexInterval = _TestUtil.nextInt(random(), 13, 27);
final Codec codec = Codec.getDefault();
final SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, false, codec, null, null);
final SegmentWriteState state = new SegmentWriteState(InfoStream.getDefault(), dir, si, fieldInfos, termIndexInterval, null, newIOContext(random()));
final FieldsConsumer consumer = codec.postingsFormat().fieldsConsumer(state);
Arrays.sort(fields);
for (final FieldData field : fields) {
if (!allowPreFlex && codec instanceof Lucene3xCodec) {
// code below expects unicode sort order
continue;
}
field.write(consumer);
}
consumer.close();
}
示例10: write
import org.apache.lucene.util.InfoStream; //导入依赖的package包/类
private void write(final FieldInfos fieldInfos, final Directory dir, final FieldData[] fields, boolean allowPreFlex) throws Throwable {
final int termIndexInterval = _TestUtil.nextInt(random(), 13, 27);
final Codec codec = Codec.getDefault();
final SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, false, codec, null);
final SegmentWriteState state = new SegmentWriteState(InfoStream.getDefault(), dir, si, fieldInfos, termIndexInterval, null, newIOContext(random()));
final FieldsConsumer consumer = codec.postingsFormat().fieldsConsumer(state);
Arrays.sort(fields);
for (final FieldData field : fields) {
if (!allowPreFlex && codec instanceof Lucene3xCodec) {
// code below expects unicode sort order
continue;
}
field.write(consumer);
}
consumer.close();
}
示例11: SegmentWriteState
import org.apache.lucene.util.InfoStream; //导入依赖的package包/类
/**
* Constructor which takes segment suffix.
*
* @see #SegmentWriteState(InfoStream, Directory, SegmentInfo, FieldInfos, int,
* BufferedUpdates, IOContext)
*/
public SegmentWriteState(InfoStream infoStream, Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos,
int termIndexInterval, BufferedUpdates segUpdates, IOContext context, String segmentSuffix) {
this.infoStream = infoStream;
this.segUpdates = segUpdates;
this.directory = directory;
this.segmentInfo = segmentInfo;
this.fieldInfos = fieldInfos;
this.termIndexInterval = termIndexInterval;
assert assertSegmentSuffix(segmentSuffix);
this.segmentSuffix = segmentSuffix;
this.context = context;
}
示例12: IndexUpgrader
import org.apache.lucene.util.InfoStream; //导入依赖的package包/类
/** Creates index upgrader on the given directory, using an {@link IndexWriter} using the given
* {@code matchVersion}. You have the possibility to upgrade indexes with multiple commit points by removing
* all older ones. If {@code infoStream} is not {@code null}, all logging output will be sent to this stream. */
public IndexUpgrader(Directory dir, Version matchVersion, InfoStream infoStream, boolean deletePriorCommits) {
this(dir, new IndexWriterConfig(matchVersion, null), deletePriorCommits);
if (null != infoStream) {
this.iwc.setInfoStream(infoStream);
}
}
示例13: upgrade
import org.apache.lucene.util.InfoStream; //导入依赖的package包/类
/** Perform the upgrade. */
public void upgrade() throws IOException {
if (!DirectoryReader.indexExists(dir)) {
throw new IndexNotFoundException(dir.toString());
}
if (!deletePriorCommits) {
final Collection<IndexCommit> commits = DirectoryReader.listCommits(dir);
if (commits.size() > 1) {
throw new IllegalArgumentException("This tool was invoked to not delete prior commit points, but the following commits were found: " + commits);
}
}
iwc.setMergePolicy(new UpgradeIndexMergePolicy(iwc.getMergePolicy()));
iwc.setIndexDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
final IndexWriter w = new IndexWriter(dir, iwc);
try {
InfoStream infoStream = iwc.getInfoStream();
if (infoStream.isEnabled("IndexUpgrader")) {
infoStream.message("IndexUpgrader", "Upgrading all pre-" + Version.LATEST + " segments of index directory '" + dir + "' to version " + Version.LATEST + "...");
}
w.forceMerge(1);
if (infoStream.isEnabled("IndexUpgrader")) {
infoStream.message("IndexUpgrader", "All segments upgraded to version " + Version.LATEST);
}
} finally {
w.close();
}
}
示例14: MergeState
import org.apache.lucene.util.InfoStream; //导入依赖的package包/类
/** Sole constructor. */
MergeState(List<AtomicReader> readers, SegmentInfo segmentInfo, InfoStream infoStream, CheckAbort checkAbort) {
this.readers = readers;
this.segmentInfo = segmentInfo;
this.infoStream = infoStream;
this.checkAbort = checkAbort;
}
示例15: DocumentsWriterPerThread
import org.apache.lucene.util.InfoStream; //导入依赖的package包/类
public DocumentsWriterPerThread(String segmentName, Directory directory, LiveIndexWriterConfig indexWriterConfig, InfoStream infoStream, DocumentsWriterDeleteQueue deleteQueue,
FieldInfos.Builder fieldInfos, AtomicLong pendingNumDocs) throws IOException {
this.directoryOrig = directory;
this.directory = new TrackingDirectoryWrapper(directory);
this.fieldInfos = fieldInfos;
this.indexWriterConfig = indexWriterConfig;
this.infoStream = infoStream;
this.codec = indexWriterConfig.getCodec();
this.docState = new DocState(this, infoStream);
this.docState.similarity = indexWriterConfig.getSimilarity();
this.pendingNumDocs = pendingNumDocs;
bytesUsed = Counter.newCounter();
byteBlockAllocator = new DirectTrackingAllocator(bytesUsed);
pendingUpdates = new BufferedUpdates();
intBlockAllocator = new IntBlockAllocator(bytesUsed);
this.deleteQueue = deleteQueue;
assert numDocsInRAM == 0 : "num docs " + numDocsInRAM;
pendingUpdates.clear();
deleteSlice = deleteQueue.newSlice();
segmentInfo = new SegmentInfo(directoryOrig, Version.LATEST, segmentName, -1, false, codec, null);
assert numDocsInRAM == 0;
if (INFO_VERBOSE && infoStream.isEnabled("DWPT")) {
infoStream.message("DWPT", Thread.currentThread().getName() + " init seg=" + segmentName + " delQueue=" + deleteQueue);
}
// this should be the last call in the ctor
// it really sucks that we need to pull this within the ctor and pass this ref to the chain!
consumer = indexWriterConfig.getIndexingChain().getChain(this);
}