本文整理汇总了C#中RandomIndexWriter.AddIndexes方法的典型用法代码示例。如果您正苦于以下问题:C# RandomIndexWriter.AddIndexes方法的具体用法?C# RandomIndexWriter.AddIndexes怎么用?C# RandomIndexWriter.AddIndexes使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类RandomIndexWriter
的用法示例。
在下文中一共展示了RandomIndexWriter.AddIndexes方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: TestAddEmpty
public virtual void TestAddEmpty()
{
Directory d1 = NewDirectory();
RandomIndexWriter w = new RandomIndexWriter(Random(), d1);
MultiReader empty = new MultiReader();
w.AddIndexes(empty);
w.Dispose();
DirectoryReader dr = DirectoryReader.Open(d1);
foreach (AtomicReaderContext ctx in dr.Leaves)
{
Assert.IsTrue(ctx.Reader.MaxDoc > 0, "empty segments should be dropped by addIndexes");
}
dr.Dispose();
d1.Dispose();
}
示例2: TestAddIndexes
public virtual void TestAddIndexes()
{
Directory d1 = NewDirectory();
RandomIndexWriter w = new RandomIndexWriter(Random(), d1, Similarity, TimeZone);
Document doc = new Document();
doc.Add(NewStringField("id", "1", Field.Store.YES));
doc.Add(new NumericDocValuesField("dv", 1));
w.AddDocument(doc);
IndexReader r1 = w.Reader;
w.Dispose();
Directory d2 = NewDirectory();
w = new RandomIndexWriter(Random(), d2, Similarity, TimeZone);
doc = new Document();
doc.Add(NewStringField("id", "2", Field.Store.YES));
doc.Add(new NumericDocValuesField("dv", 2));
w.AddDocument(doc);
IndexReader r2 = w.Reader;
w.Dispose();
Directory d3 = NewDirectory();
w = new RandomIndexWriter(Random(), d3, Similarity, TimeZone);
w.AddIndexes(SlowCompositeReaderWrapper.Wrap(r1), SlowCompositeReaderWrapper.Wrap(r2));
r1.Dispose();
d1.Dispose();
r2.Dispose();
d2.Dispose();
w.ForceMerge(1);
DirectoryReader r3 = w.Reader;
w.Dispose();
AtomicReader sr = GetOnlySegmentReader(r3);
Assert.AreEqual(2, sr.NumDocs);
NumericDocValues docValues = sr.GetNumericDocValues("dv");
Assert.IsNotNull(docValues);
r3.Dispose();
d3.Dispose();
}
示例3: TestLocksBlock
public virtual void TestLocksBlock()
{
Directory src = NewDirectory();
RandomIndexWriter w1 = new RandomIndexWriter(Random(), src, Similarity, TimeZone);
w1.AddDocument(new Document());
w1.Commit();
Directory dest = NewDirectory();
IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
iwc.SetWriteLockTimeout(1);
RandomIndexWriter w2 = new RandomIndexWriter(Random(), dest, iwc);
try
{
w2.AddIndexes(src);
Assert.Fail("did not hit expected exception");
}
catch (LockObtainFailedException lofe)
{
// expected
}
IOUtils.Close(w1, w2, src, dest);
}
示例4: TestFakeAllDeleted
public virtual void TestFakeAllDeleted()
{
Directory src = NewDirectory(), dest = NewDirectory();
RandomIndexWriter w = new RandomIndexWriter(Random(), src, Similarity, TimeZone);
w.AddDocument(new Document());
IndexReader allDeletedReader = new AllDeletedFilterReader((AtomicReader)w.Reader.Leaves[0].Reader);
w.Dispose();
w = new RandomIndexWriter(Random(), dest, Similarity, TimeZone);
w.AddIndexes(allDeletedReader);
w.Dispose();
DirectoryReader dr = DirectoryReader.Open(src);
foreach (AtomicReaderContext ctx in dr.Leaves)
{
Assert.IsTrue(ctx.Reader.MaxDoc > 0, "empty segments should be dropped by addIndexes");
}
dr.Dispose();
allDeletedReader.Dispose();
src.Dispose();
dest.Dispose();
}
示例5: TestFieldNamesChanged
public virtual void TestFieldNamesChanged()
{
Directory d1 = NewDirectory();
RandomIndexWriter w = new RandomIndexWriter(Random(), d1, Similarity, TimeZone);
Document doc = new Document();
doc.Add(NewStringField("f1", "doc1 field1", Field.Store.YES));
doc.Add(NewStringField("id", "1", Field.Store.YES));
w.AddDocument(doc);
IndexReader r1 = w.Reader;
w.Dispose();
Directory d2 = NewDirectory();
w = new RandomIndexWriter(Random(), d2, Similarity, TimeZone);
doc = new Document();
doc.Add(NewStringField("f2", "doc2 field2", Field.Store.YES));
doc.Add(NewStringField("id", "2", Field.Store.YES));
w.AddDocument(doc);
IndexReader r2 = w.Reader;
w.Dispose();
Directory d3 = NewDirectory();
w = new RandomIndexWriter(Random(), d3, Similarity, TimeZone);
w.AddIndexes(r1, r2);
r1.Dispose();
d1.Dispose();
r2.Dispose();
d2.Dispose();
IndexReader r3 = w.Reader;
w.Dispose();
Assert.AreEqual(2, r3.NumDocs);
for (int docID = 0; docID < 2; docID++)
{
Document d = r3.Document(docID);
if (d.Get("id").Equals("1"))
{
Assert.AreEqual("doc1 field1", d.Get("f1"));
}
else
{
Assert.AreEqual("doc2 field2", d.Get("f2"));
}
}
r3.Dispose();
d3.Dispose();
}
示例6: BeforeClass
public static void BeforeClass()
{
Directory = NewDirectory();
RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
for (int i = 0; i < DocFields.Length; i++)
{
Document doc = new Document();
doc.Add(NewTextField(field, DocFields[i], Field.Store.NO));
writer.AddDocument(doc);
}
writer.Dispose();
LittleReader = DirectoryReader.Open(Directory);
Searcher = NewSearcher(LittleReader);
// this is intentionally using the baseline sim, because it compares against bigSearcher (which uses a random one)
Searcher.Similarity = new DefaultSimilarity();
// Make big index
Dir2 = new MockDirectoryWrapper(Random(), new RAMDirectory(Directory, IOContext.DEFAULT));
// First multiply small test index:
MulFactor = 1;
int docCount = 0;
if (VERBOSE)
{
Console.WriteLine("\nTEST: now copy index...");
}
do
{
if (VERBOSE)
{
Console.WriteLine("\nTEST: cycle...");
}
Directory copy = new MockDirectoryWrapper(Random(), new RAMDirectory(Dir2, IOContext.DEFAULT));
RandomIndexWriter w = new RandomIndexWriter(Random(), Dir2);
w.AddIndexes(copy);
docCount = w.MaxDoc();
w.Dispose();
MulFactor *= 2;
} while (docCount < 3000);
RandomIndexWriter riw = new RandomIndexWriter(Random(), Dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(TestUtil.NextInt(Random(), 50, 1000)));
Document doc_ = new Document();
doc_.Add(NewTextField("field2", "xxx", Field.Store.NO));
for (int i = 0; i < NUM_EXTRA_DOCS / 2; i++)
{
riw.AddDocument(doc_);
}
doc_ = new Document();
doc_.Add(NewTextField("field2", "big bad bug", Field.Store.NO));
for (int i = 0; i < NUM_EXTRA_DOCS / 2; i++)
{
riw.AddDocument(doc_);
}
Reader = riw.Reader;
BigSearcher = NewSearcher(Reader);
riw.Dispose();
}
示例7: TestMergeStability
/// <summary>
/// The purpose of this test is to make sure that bulk merge doesn't accumulate useless data over runs.
/// </summary>
// [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass
public virtual void TestMergeStability()
{
Directory dir = NewDirectory();
// do not use newMergePolicy that might return a MockMergePolicy that ignores the no-CFS ratio
MergePolicy mp = NewTieredMergePolicy();
mp.NoCFSRatio = 0;
var cfg = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetUseCompoundFile(false).SetMergePolicy(mp);
using (var w = new RandomIndexWriter(Random(), dir, cfg))
{
var numDocs = AtLeast(500);
for (var i = 0; i < numDocs; ++i)
{
var d = new Document();
AddRandomFields(d);
w.AddDocument(d);
}
w.ForceMerge(1);
w.Commit();
}
IndexReader reader = DirectoryReader.Open(dir);
Directory dir2 = NewDirectory();
mp = NewTieredMergePolicy();
mp.NoCFSRatio = 0;
cfg = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetUseCompoundFile(false).SetMergePolicy(mp);
using (var w = new RandomIndexWriter(Random(), dir2, cfg))
{
w.AddIndexes(reader);
w.Commit();
}
assertEquals(BytesUsedByExtension(dir), BytesUsedByExtension(dir2));
reader.Dispose();
dir.Dispose();
dir2.Dispose();
}