本文整理汇总了C#中IndexWriter.ForceMerge方法的典型用法代码示例。如果您正苦于以下问题:C# IndexWriter.ForceMerge方法的具体用法?C# IndexWriter.ForceMerge怎么用?C# IndexWriter.ForceMerge使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类IndexWriter
的用法示例。
在下文中一共展示了IndexWriter.ForceMerge方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: TearDown
public override void TearDown()
{
Iw.Dispose();
TestUtil.CheckIndex(Dir); // for some extra coverage, checkIndex before we forceMerge
Iwc.SetOpenMode(IndexWriterConfig.OpenMode_e.APPEND);
IndexWriter iw = new IndexWriter(Dir, (IndexWriterConfig)Iwc.Clone());
iw.ForceMerge(1);
iw.Dispose();
Dir.Dispose(); // just force a checkindex for now
base.TearDown();
}
示例2: TestCustomMergeScheduler
public void TestCustomMergeScheduler()
{
// we don't really need to execute anything, just to make sure the custom MS
// compiles. But ensure that it can be used as well, e.g., no other hidden
// dependencies or something. Therefore, don't use any random API !
Directory dir = new RAMDirectory();
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, null);
conf.SetMergeScheduler(new ReportingMergeScheduler());
IndexWriter writer = new IndexWriter(dir, conf);
writer.AddDocument(new Document());
writer.Commit(); // trigger flush
writer.AddDocument(new Document());
writer.Commit(); // trigger flush
writer.ForceMerge(1);
writer.Dispose();
dir.Dispose();
}
示例3: TestCollationKeySort
// Test using various international locales with accented characters (which
// sort differently depending on locale)
//
// Copied (and slightly modified) from
// Lucene.Net.Search.TestSort.testInternationalSort()
//
// TODO: this test is really fragile. there are already 3 different cases,
// depending upon unicode version.
public virtual void TestCollationKeySort(Analyzer usAnalyzer, Analyzer franceAnalyzer, Analyzer swedenAnalyzer, Analyzer denmarkAnalyzer, string usResult, string frResult, string svResult, string dkResult)
{
Directory indexStore = NewDirectory();
IndexWriter writer = new IndexWriter(indexStore, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)));
// document data:
// the tracer field is used to determine which document was hit
string[][] sortData = new string[][] { new string[] { "A", "x", "p\u00EAche", "p\u00EAche", "p\u00EAche", "p\u00EAche" }, new string[] { "B", "y", "HAT", "HAT", "HAT", "HAT" }, new string[] { "C", "x", "p\u00E9ch\u00E9", "p\u00E9ch\u00E9", "p\u00E9ch\u00E9", "p\u00E9ch\u00E9" }, new string[] { "D", "y", "HUT", "HUT", "HUT", "HUT" }, new string[] { "E", "x", "peach", "peach", "peach", "peach" }, new string[] { "F", "y", "H\u00C5T", "H\u00C5T", "H\u00C5T", "H\u00C5T" }, new string[] { "G", "x", "sin", "sin", "sin", "sin" }, new string[] { "H", "y", "H\u00D8T", "H\u00D8T", "H\u00D8T", "H\u00D8T" }, new string[] { "I", "x", "s\u00EDn", "s\u00EDn", "s\u00EDn", "s\u00EDn" }, new string[] { "J", "y", "HOT", "HOT", "HOT", "HOT" } };
FieldType customType = new FieldType();
customType.Stored = true;
for (int i = 0; i < sortData.Length; ++i)
{
Document doc = new Document();
doc.Add(new Field("tracer", sortData[i][0], customType));
doc.Add(new TextField("contents", sortData[i][1], Field.Store.NO));
if (sortData[i][2] != null)
{
doc.Add(new TextField("US", usAnalyzer.TokenStream("US", new StringReader(sortData[i][2]))));
}
if (sortData[i][3] != null)
{
doc.Add(new TextField("France", franceAnalyzer.TokenStream("France", new StringReader(sortData[i][3]))));
}
if (sortData[i][4] != null)
{
doc.Add(new TextField("Sweden", swedenAnalyzer.TokenStream("Sweden", new StringReader(sortData[i][4]))));
}
if (sortData[i][5] != null)
{
doc.Add(new TextField("Denmark", denmarkAnalyzer.TokenStream("Denmark", new StringReader(sortData[i][5]))));
}
writer.AddDocument(doc);
}
writer.ForceMerge(1);
writer.Dispose();
IndexReader reader = DirectoryReader.Open(indexStore);
IndexSearcher searcher = new IndexSearcher(reader);
Sort sort = new Sort();
Query queryX = new TermQuery(new Term("contents", "x"));
Query queryY = new TermQuery(new Term("contents", "y"));
sort.SetSort(new SortField("US", SortField.Type_e.STRING));
AssertMatches(searcher, queryY, sort, usResult);
sort.SetSort(new SortField("France", SortField.Type_e.STRING));
AssertMatches(searcher, queryX, sort, frResult);
sort.SetSort(new SortField("Sweden", SortField.Type_e.STRING));
AssertMatches(searcher, queryY, sort, svResult);
sort.SetSort(new SortField("Denmark", SortField.Type_e.STRING));
AssertMatches(searcher, queryY, sort, dkResult);
reader.Dispose();
indexStore.Dispose();
}
示例4: TestMultiValuedField
public virtual void TestMultiValuedField()
{
Directory indexStore = NewDirectory();
IndexWriter writer = new IndexWriter(indexStore, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
for (int i = 0; i < 5; i++)
{
Document doc = new Document();
doc.Add(new StringField("string", "a" + i, Field.Store.NO));
doc.Add(new StringField("string", "b" + i, Field.Store.NO));
writer.AddDocument(doc);
}
writer.ForceMerge(1); // enforce one segment to have a higher unique term count in all cases
writer.Dispose();
Sort sort = new Sort(new SortField("string", SortField.Type_e.STRING), SortField.FIELD_DOC);
// this should not throw AIOOBE or RuntimeEx
IndexReader reader = DirectoryReader.Open(indexStore);
IndexSearcher searcher = NewSearcher(reader);
searcher.Search(new MatchAllDocsQuery(), null, 500, sort);
reader.Dispose();
indexStore.Dispose();
}
示例5: CreateRandomTerms
public virtual void CreateRandomTerms(int nDocs, int nTerms, double power, Directory dir)
{
int[] freq = new int[nTerms];
Terms = new Term[nTerms];
for (int i = 0; i < nTerms; i++)
{
int f = (nTerms + 1) - i; // make first terms less frequent
freq[i] = (int)Math.Ceiling(Math.Pow(f, power));
Terms[i] = new Term("f", char.ToString((char)('A' + i)));
}
IndexWriter iw = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE));
for (int i = 0; i < nDocs; i++)
{
Document d = new Document();
for (int j = 0; j < nTerms; j++)
{
if (Random().Next(freq[j]) == 0)
{
d.Add(NewStringField("f", Terms[j].Text(), Field.Store.NO));
//System.out.println(d);
}
}
iw.AddDocument(d);
}
iw.ForceMerge(1);
iw.Dispose();
}
示例6: TestSparseIndex
public virtual void TestSparseIndex()
{
Directory dir = NewDirectory();
IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
for (int d = -20; d <= 20; d++)
{
Document doc = new Document();
doc.Add(NewStringField("id", Convert.ToString(d), Field.Store.NO));
doc.Add(NewStringField("body", "body", Field.Store.NO));
writer.AddDocument(doc);
}
writer.ForceMerge(1);
writer.DeleteDocuments(new Term("id", "0"));
writer.Dispose();
IndexReader reader = DirectoryReader.Open(dir);
IndexSearcher search = NewSearcher(reader);
Assert.IsTrue(reader.HasDeletions);
ScoreDoc[] result;
Query q = new TermQuery(new Term("body", "body"));
result = search.Search(q, FieldCacheRangeFilter.NewByteRange("id", (sbyte?)-20, (sbyte?)20, T, T), 100).ScoreDocs;
Assert.AreEqual(40, result.Length, "find all");
result = search.Search(q, FieldCacheRangeFilter.NewByteRange("id", (sbyte?)0, (sbyte?)20, T, T), 100).ScoreDocs;
Assert.AreEqual(20, result.Length, "find all");
result = search.Search(q, FieldCacheRangeFilter.NewByteRange("id", (sbyte?)-20, (sbyte?)0, T, T), 100).ScoreDocs;
Assert.AreEqual(20, result.Length, "find all");
result = search.Search(q, FieldCacheRangeFilter.NewByteRange("id", (sbyte?)10, (sbyte?)20, T, T), 100).ScoreDocs;
Assert.AreEqual(11, result.Length, "find all");
result = search.Search(q, FieldCacheRangeFilter.NewByteRange("id", (sbyte?)-20, (sbyte?)-10, T, T), 100).ScoreDocs;
Assert.AreEqual(11, result.Length, "find all");
reader.Dispose();
dir.Dispose();
}
示例7: TestRAMDirectorySize
public virtual void TestRAMDirectorySize()
{
Directory dir = NewFSDirectory(IndexDir);
MockDirectoryWrapper ramDir = new MockDirectoryWrapper(Random(), new RAMDirectory(dir, NewIOContext(Random())));
dir.Dispose();
IndexWriter writer = new IndexWriter(ramDir, (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetOpenMode(IndexWriterConfig.OpenMode_e.APPEND));
writer.ForceMerge(1);
Assert.AreEqual(ramDir.SizeInBytes(), ramDir.RecomputedSizeInBytes);
ThreadClass[] threads = new ThreadClass[NumThreads];
for (int i = 0; i < NumThreads; i++)
{
int num = i;
threads[i] = new ThreadAnonymousInnerClassHelper(this, writer, num);
}
for (int i = 0; i < NumThreads; i++)
{
threads[i].Start();
}
for (int i = 0; i < NumThreads; i++)
{
threads[i].Join();
}
writer.ForceMerge(1);
Assert.AreEqual(ramDir.SizeInBytes(), ramDir.RecomputedSizeInBytes);
writer.Dispose();
}
示例8: MakeEmptyIndex
private static IndexReader MakeEmptyIndex(Random random, int numDocs)
{
Debug.Assert(numDocs > 0);
Directory d = new MockDirectoryWrapper(random, new RAMDirectory());
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, new MockAnalyzer(random)));
for (int i = 0; i < numDocs; i++)
{
w.AddDocument(new Document());
}
w.ForceMerge(1);
w.Commit();
w.Dispose();
DirectoryReader reader = DirectoryReader.Open(d);
return new AllDeletedFilterReader(LuceneTestCase.GetOnlySegmentReader(reader));
}