本文整理汇总了Java中org.apache.lucene.index.LogMergePolicy.setUseCompoundFile方法的典型用法代码示例。如果您正苦于以下问题:Java LogMergePolicy.setUseCompoundFile方法的具体用法?Java LogMergePolicy.setUseCompoundFile怎么用?Java LogMergePolicy.setUseCompoundFile使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.lucene.index.LogMergePolicy
的用法示例。
在下文中一共展示了LogMergePolicy.setUseCompoundFile方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: reduceOpenFiles
import org.apache.lucene.index.LogMergePolicy; //导入方法依赖的package包/类
/** just tries to configure things to keep the open file
* count lowish */
public static void reduceOpenFiles(IndexWriter w) {
// keep number of open files lowish
MergePolicy mp = w.getConfig().getMergePolicy();
if (mp instanceof LogMergePolicy) {
LogMergePolicy lmp = (LogMergePolicy) mp;
lmp.setMergeFactor(Math.min(5, lmp.getMergeFactor()));
lmp.setUseCompoundFile(true);
} else if (mp instanceof TieredMergePolicy) {
TieredMergePolicy tmp = (TieredMergePolicy) mp;
tmp.setMaxMergeAtOnce(Math.min(5, tmp.getMaxMergeAtOnce()));
tmp.setSegmentsPerTier(Math.min(5, tmp.getSegmentsPerTier()));
tmp.setUseCompoundFile(true);
}
MergeScheduler ms = w.getConfig().getMergeScheduler();
if (ms instanceof ConcurrentMergeScheduler) {
((ConcurrentMergeScheduler) ms).setMaxThreadCount(2);
((ConcurrentMergeScheduler) ms).setMaxMergeCount(3);
}
}
示例2: beforeClass
import org.apache.lucene.index.LogMergePolicy; //导入方法依赖的package包/类
/** we will manually instantiate preflex-rw here */
@BeforeClass
public static void beforeClass() throws Exception {
LuceneTestCase.PREFLEX_IMPERSONATION_IS_ACTIVE = true;
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random(), MockTokenizer.KEYWORD, false));
termIndexInterval = config.getTermIndexInterval();
indexDivisor = _TestUtil.nextInt(random(), 1, 10);
NUMBER_OF_DOCUMENTS = atLeast(100);
NUMBER_OF_FIELDS = atLeast(Math.max(10, 3*termIndexInterval*indexDivisor/NUMBER_OF_DOCUMENTS));
directory = newDirectory();
config.setCodec(new PreFlexRWCodec());
LogMergePolicy mp = newLogMergePolicy();
// turn off compound file, this test will open some index files directly.
mp.setUseCompoundFile(false);
config.setMergePolicy(mp);
populate(directory, config);
DirectoryReader r0 = IndexReader.open(directory);
SegmentReader r = LuceneTestCase.getOnlySegmentReader(r0);
String segment = r.getSegmentName();
r.close();
FieldInfosReader infosReader = new PreFlexRWCodec().fieldInfosFormat().getFieldInfosReader();
FieldInfos fieldInfos = infosReader.read(directory, segment, IOContext.READONCE);
String segmentFileName = IndexFileNames.segmentFileName(segment, "", Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION);
long tiiFileLength = directory.fileLength(segmentFileName);
IndexInput input = directory.openInput(segmentFileName, newIOContext(random()));
termEnum = new SegmentTermEnum(directory.openInput(IndexFileNames.segmentFileName(segment, "", Lucene3xPostingsFormat.TERMS_EXTENSION), newIOContext(random())), fieldInfos, false);
int totalIndexInterval = termEnum.indexInterval * indexDivisor;
SegmentTermEnum indexEnum = new SegmentTermEnum(input, fieldInfos, true);
index = new TermInfosReaderIndex(indexEnum, indexDivisor, tiiFileLength, totalIndexInterval);
indexEnum.close();
input.close();
reader = IndexReader.open(directory);
sampleTerms = sample(random(),reader,1000);
}