本文整理汇总了C#中Lucene.Net.Index.IndexWriter.UpdateNumericDocValue方法的典型用法代码示例。如果您正苦于以下问题:C# IndexWriter.UpdateNumericDocValue方法的具体用法?C# IndexWriter.UpdateNumericDocValue怎么用?C# IndexWriter.UpdateNumericDocValue使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Index.IndexWriter
的用法示例。
在下文中一共展示了IndexWriter.UpdateNumericDocValue方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: TestUpdatesAreFlushed
public virtual void TestUpdatesAreFlushed()
{
Directory dir = NewDirectory();
IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)).SetRAMBufferSizeMB(0.00000001));
writer.AddDocument(Doc(0)); // val=1
writer.AddDocument(Doc(1)); // val=2
writer.AddDocument(Doc(3)); // val=2
writer.Commit();
Assert.AreEqual(1, writer.FlushDeletesCount);
writer.UpdateNumericDocValue(new Term("id", "doc-0"), "val", 5L);
Assert.AreEqual(2, writer.FlushDeletesCount);
writer.UpdateNumericDocValue(new Term("id", "doc-1"), "val", 6L);
Assert.AreEqual(3, writer.FlushDeletesCount);
writer.UpdateNumericDocValue(new Term("id", "doc-2"), "val", 7L);
Assert.AreEqual(4, writer.FlushDeletesCount);
writer.Config.SetRAMBufferSizeMB(1000d);
writer.UpdateNumericDocValue(new Term("id", "doc-2"), "val", 7L);
Assert.AreEqual(4, writer.FlushDeletesCount);
writer.Dispose();
dir.Dispose();
}
示例2: TestDocumentWithNoValue
public virtual void TestDocumentWithNoValue()
{
Directory dir = NewDirectory();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
IndexWriter writer = new IndexWriter(dir, conf);
for (int i = 0; i < 2; i++)
{
Document doc = new Document();
doc.Add(new StringField("dvUpdateKey", "dv", Store.NO));
if (i == 0) // index only one document with value
{
doc.Add(new NumericDocValuesField("ndv", 5));
}
writer.AddDocument(doc);
}
writer.Commit();
// update all docs' ndv field
writer.UpdateNumericDocValue(new Term("dvUpdateKey", "dv"), "ndv", 17L);
writer.Dispose();
DirectoryReader reader = DirectoryReader.Open(dir);
AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
NumericDocValues ndv = r.GetNumericDocValues("ndv");
for (int i = 0; i < r.MaxDoc; i++)
{
Assert.AreEqual(17, ndv.Get(i));
}
reader.Dispose();
dir.Dispose();
}
示例3: TestUpdateDocumentByMultipleTerms
public virtual void TestUpdateDocumentByMultipleTerms()
{
// make sure the order of updates is respected, even when multiple terms affect same document
Directory dir = NewDirectory();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
doc.Add(new StringField("k1", "v1", Store.NO));
doc.Add(new StringField("k2", "v2", Store.NO));
doc.Add(new NumericDocValuesField("ndv", 5));
writer.AddDocument(doc); // flushed document
writer.Commit();
writer.AddDocument(doc); // in-memory document
writer.UpdateNumericDocValue(new Term("k1", "v1"), "ndv", 17L);
writer.UpdateNumericDocValue(new Term("k2", "v2"), "ndv", 3L);
writer.Dispose();
DirectoryReader reader = DirectoryReader.Open(dir);
AtomicReader r = SlowCompositeReaderWrapper.Wrap(reader);
NumericDocValues ndv = r.GetNumericDocValues("ndv");
for (int i = 0; i < r.MaxDoc; i++)
{
Assert.AreEqual(3, ndv.Get(i));
}
reader.Dispose();
dir.Dispose();
}
示例4: TestUpdateAndDeleteSameDocument
public virtual void TestUpdateAndDeleteSameDocument()
{
// update and delete same document in same commit session
Directory dir = NewDirectory();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
conf.SetMaxBufferedDocs(10); // control segment flushing
IndexWriter writer = new IndexWriter(dir, conf);
writer.AddDocument(Doc(0));
writer.AddDocument(Doc(1));
if (Random().NextBoolean())
{
writer.Commit();
}
writer.DeleteDocuments(new Term("id", "doc-0"));
writer.UpdateNumericDocValue(new Term("id", "doc-0"), "val", 17L);
DirectoryReader reader;
if (Random().NextBoolean()) // not NRT
{
writer.Dispose();
reader = DirectoryReader.Open(dir);
} // NRT
else
{
reader = DirectoryReader.Open(writer, true);
writer.Dispose();
}
AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
Assert.IsFalse(r.LiveDocs.Get(0));
Assert.AreEqual(1, r.GetNumericDocValues("val").Get(0)); // deletes are currently applied first
reader.Dispose();
dir.Dispose();
}
示例5: TestUnsetValue
public virtual void TestUnsetValue()
{
AssumeTrue("codec does not support docsWithField", DefaultCodecSupportsDocsWithField());
Directory dir = NewDirectory();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
IndexWriter writer = new IndexWriter(dir, conf);
for (int i = 0; i < 2; i++)
{
Document doc = new Document();
doc.Add(new StringField("id", "doc" + i, Store.NO));
doc.Add(new NumericDocValuesField("ndv", 5));
writer.AddDocument(doc);
}
writer.Commit();
// unset the value of 'doc0'
writer.UpdateNumericDocValue(new Term("id", "doc0"), "ndv", null);
writer.Dispose();
DirectoryReader reader = DirectoryReader.Open(dir);
AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
NumericDocValues ndv = r.GetNumericDocValues("ndv");
for (int i = 0; i < r.MaxDoc; i++)
{
if (i == 0)
{
Assert.AreEqual(0, ndv.Get(i));
}
else
{
Assert.AreEqual(5, ndv.Get(i));
}
}
Bits docsWithField = r.GetDocsWithField("ndv");
Assert.IsFalse(docsWithField.Get(0));
Assert.IsTrue(docsWithField.Get(1));
reader.Dispose();
dir.Dispose();
}
示例6: TestTonsOfUpdates
public virtual void TestTonsOfUpdates()
{
// LUCENE-5248: make sure that when there are many updates, we don't use too much RAM
Directory dir = NewDirectory();
Random random = Random();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
conf.SetRAMBufferSizeMB(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB);
conf.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH); // don't flush by doc
IndexWriter writer = new IndexWriter(dir, conf);
// test data: lots of documents (few 10Ks) and lots of update terms (few hundreds)
int numDocs = AtLeast(20000);
int numNumericFields = AtLeast(5);
int numTerms = TestUtil.NextInt(random, 10, 100); // terms should affect many docs
HashSet<string> updateTerms = new HashSet<string>();
while (updateTerms.Count < numTerms)
{
updateTerms.Add(TestUtil.RandomSimpleString(random));
}
// System.out.println("numDocs=" + numDocs + " numNumericFields=" + numNumericFields + " numTerms=" + numTerms);
// build a large index with many NDV fields and update terms
for (int i = 0; i < numDocs; i++)
{
Document doc = new Document();
int numUpdateTerms = TestUtil.NextInt(random, 1, numTerms / 10);
for (int j = 0; j < numUpdateTerms; j++)
{
doc.Add(new StringField("upd", RandomInts.RandomFrom(random, updateTerms), Store.NO));
}
for (int j = 0; j < numNumericFields; j++)
{
long val = random.Next();
doc.Add(new NumericDocValuesField("f" + j, val));
doc.Add(new NumericDocValuesField("cf" + j, val * 2));
}
writer.AddDocument(doc);
}
writer.Commit(); // commit so there's something to apply to
// set to flush every 2048 bytes (approximately every 12 updates), so we get
// many flushes during numeric updates
writer.Config.SetRAMBufferSizeMB(2048.0 / 1024 / 1024);
int numUpdates = AtLeast(100);
// System.out.println("numUpdates=" + numUpdates);
for (int i = 0; i < numUpdates; i++)
{
int field = random.Next(numNumericFields);
Term updateTerm = new Term("upd", RandomInts.RandomFrom(random, updateTerms));
long value = random.Next();
writer.UpdateNumericDocValue(updateTerm, "f" + field, value);
writer.UpdateNumericDocValue(updateTerm, "cf" + field, value * 2);
}
writer.Dispose();
DirectoryReader reader = DirectoryReader.Open(dir);
foreach (AtomicReaderContext context in reader.Leaves)
{
for (int i = 0; i < numNumericFields; i++)
{
AtomicReader r = context.AtomicReader;
NumericDocValues f = r.GetNumericDocValues("f" + i);
NumericDocValues cf = r.GetNumericDocValues("cf" + i);
for (int j = 0; j < r.MaxDoc; j++)
{
Assert.AreEqual(cf.Get(j), f.Get(j) * 2, "reader=" + r + ", field=f" + i + ", doc=" + j);
}
}
}
reader.Dispose();
dir.Dispose();
}
示例7: TestSegmentMerges
public virtual void TestSegmentMerges()
{
Directory dir = NewDirectory();
Random random = Random();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
int docid = 0;
int numRounds = AtLeast(10);
for (int rnd = 0; rnd < numRounds; rnd++)
{
Document doc = new Document();
doc.Add(new StringField("key", "doc", Store.NO));
doc.Add(new NumericDocValuesField("ndv", -1));
int numDocs = AtLeast(30);
for (int i = 0; i < numDocs; i++)
{
doc.RemoveField("id");
doc.Add(new StringField("id", Convert.ToString(docid++), Store.NO));
writer.AddDocument(doc);
}
long value = rnd + 1;
writer.UpdateNumericDocValue(new Term("key", "doc"), "ndv", value);
if (random.NextDouble() < 0.2) // randomly delete some docs
{
writer.DeleteDocuments(new Term("id", Convert.ToString(random.Next(docid))));
}
// randomly commit or reopen-IW (or nothing), before forceMerge
if (random.NextDouble() < 0.4)
{
writer.Commit();
}
else if (random.NextDouble() < 0.1)
{
writer.Dispose();
writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
}
// add another document with the current value, to be sure forceMerge has
// something to merge (for instance, it could be that CMS finished merging
// all segments down to 1 before the delete was applied, so when
// forceMerge is called, the index will be with one segment and deletes
// and some MPs might now merge it, thereby invalidating test's
// assumption that the reader has no deletes).
doc = new Document();
doc.Add(new StringField("id", Convert.ToString(docid++), Store.NO));
doc.Add(new StringField("key", "doc", Store.NO));
doc.Add(new NumericDocValuesField("ndv", value));
writer.AddDocument(doc);
writer.ForceMerge(1, true);
DirectoryReader reader;
if (random.NextBoolean())
{
writer.Commit();
reader = DirectoryReader.Open(dir);
}
else
{
reader = DirectoryReader.Open(writer, true);
}
Assert.AreEqual(1, reader.Leaves.Count);
AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
Assert.IsNull(r.LiveDocs, "index should have no deletes after forceMerge");
NumericDocValues ndv = r.GetNumericDocValues("ndv");
Assert.IsNotNull(ndv);
for (int i = 0; i < r.MaxDoc; i++)
{
Assert.AreEqual(value, ndv.Get(i));
}
reader.Dispose();
}
writer.Dispose();
dir.Dispose();
}
示例8: TestMultipleNumericDocValues
public virtual void TestMultipleNumericDocValues()
{
Directory dir = NewDirectory();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
conf.SetMaxBufferedDocs(10); // prevent merges
IndexWriter writer = new IndexWriter(dir, conf);
for (int i = 0; i < 2; i++)
{
Document doc = new Document();
doc.Add(new StringField("dvUpdateKey", "dv", Store.NO));
doc.Add(new NumericDocValuesField("ndv1", i));
doc.Add(new NumericDocValuesField("ndv2", i));
writer.AddDocument(doc);
}
writer.Commit();
// update all docs' ndv1 field
writer.UpdateNumericDocValue(new Term("dvUpdateKey", "dv"), "ndv1", 17L);
writer.Dispose();
DirectoryReader reader = DirectoryReader.Open(dir);
AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
NumericDocValues ndv1 = r.GetNumericDocValues("ndv1");
NumericDocValues ndv2 = r.GetNumericDocValues("ndv2");
for (int i = 0; i < r.MaxDoc; i++)
{
Assert.AreEqual(17, ndv1.Get(i));
Assert.AreEqual(i, ndv2.Get(i));
}
reader.Dispose();
dir.Dispose();
}
示例9: TestUpdateSegmentWithNoDocValues
public virtual void TestUpdateSegmentWithNoDocValues()
{
Directory dir = NewDirectory();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
// prevent merges, otherwise by the time updates are applied
// (writer.Dispose()), the segments might have merged and that update becomes
// legit.
conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
IndexWriter writer = new IndexWriter(dir, conf);
// first segment with NDV
Document doc = new Document();
doc.Add(new StringField("id", "doc0", Store.NO));
doc.Add(new NumericDocValuesField("ndv", 3));
writer.AddDocument(doc);
doc = new Document();
doc.Add(new StringField("id", "doc4", Store.NO)); // document without 'ndv' field
writer.AddDocument(doc);
writer.Commit();
// second segment with no NDV
doc = new Document();
doc.Add(new StringField("id", "doc1", Store.NO));
writer.AddDocument(doc);
doc = new Document();
doc.Add(new StringField("id", "doc2", Store.NO)); // document that isn't updated
writer.AddDocument(doc);
writer.Commit();
// update document in the first segment - should not affect docsWithField of
// the document without NDV field
writer.UpdateNumericDocValue(new Term("id", "doc0"), "ndv", 5L);
// update document in the second segment - field should be added and we should
// be able to handle the other document correctly (e.g. no NPE)
writer.UpdateNumericDocValue(new Term("id", "doc1"), "ndv", 5L);
writer.Dispose();
DirectoryReader reader = DirectoryReader.Open(dir);
foreach (AtomicReaderContext context in reader.Leaves)
{
AtomicReader r = context.AtomicReader;
NumericDocValues ndv = r.GetNumericDocValues("ndv");
Bits docsWithField = r.GetDocsWithField("ndv");
Assert.IsNotNull(docsWithField);
Assert.IsTrue(docsWithField.Get(0));
Assert.AreEqual(5L, ndv.Get(0));
Assert.IsFalse(docsWithField.Get(1));
Assert.AreEqual(0L, ndv.Get(1));
}
reader.Dispose();
dir.Dispose();
}
示例10: TestUpdatesAndDeletes
public virtual void TestUpdatesAndDeletes()
{
// create an index with a segment with only deletes, a segment with both
// deletes and updates and a segment with only updates
Directory dir = NewDirectory();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
conf.SetMaxBufferedDocs(10); // control segment flushing
conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES); // prevent merges for this test
IndexWriter writer = new IndexWriter(dir, conf);
for (int i = 0; i < 6; i++)
{
writer.AddDocument(Doc(i));
if (i % 2 == 1)
{
writer.Commit(); // create 2-docs segments
}
}
// delete doc-1 and doc-2
writer.DeleteDocuments(new Term("id", "doc-1"), new Term("id", "doc-2")); // 1st and 2nd segments
// update docs 3 and 5
writer.UpdateNumericDocValue(new Term("id", "doc-3"), "val", 17L);
writer.UpdateNumericDocValue(new Term("id", "doc-5"), "val", 17L);
DirectoryReader reader;
if (Random().NextBoolean()) // not NRT
{
writer.Dispose();
reader = DirectoryReader.Open(dir);
} // NRT
else
{
reader = DirectoryReader.Open(writer, true);
writer.Dispose();
}
AtomicReader slow = SlowCompositeReaderWrapper.Wrap(reader);
Bits liveDocs = slow.LiveDocs;
bool[] expectedLiveDocs = new bool[] { true, false, false, true, true, true };
for (int i = 0; i < expectedLiveDocs.Length; i++)
{
Assert.AreEqual(expectedLiveDocs[i], liveDocs.Get(i));
}
long[] expectedValues = new long[] { 1, 2, 3, 17, 5, 17 };
NumericDocValues ndv = slow.GetNumericDocValues("val");
for (int i = 0; i < expectedValues.Length; i++)
{
Assert.AreEqual(expectedValues[i], ndv.Get(i));
}
reader.Dispose();
dir.Dispose();
}
示例11: TestUpdateSameDocMultipleTimes
public virtual void TestUpdateSameDocMultipleTimes()
{
Directory dir = NewDirectory();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
doc.Add(new StringField("key", "doc", Store.NO));
doc.Add(new NumericDocValuesField("ndv", 5));
writer.AddDocument(doc); // flushed document
writer.Commit();
writer.AddDocument(doc); // in-memory document
writer.UpdateNumericDocValue(new Term("key", "doc"), "ndv", 17L); // update existing field
writer.UpdateNumericDocValue(new Term("key", "doc"), "ndv", 3L); // update existing field 2nd time in this commit
writer.Dispose();
DirectoryReader reader = DirectoryReader.Open(dir);
AtomicReader r = SlowCompositeReaderWrapper.Wrap(reader);
NumericDocValues ndv = r.GetNumericDocValues("ndv");
for (int i = 0; i < r.MaxDoc; i++)
{
Assert.AreEqual(3, ndv.Get(i));
}
reader.Dispose();
dir.Dispose();
}
示例12: TestUpdateOldSegments
public virtual void TestUpdateOldSegments()
{
Codec[] oldCodecs = new Codec[] { new Lucene40RWCodec(), new Lucene41RWCodec(), new Lucene42RWCodec(), new Lucene45RWCodec() };
Directory dir = NewDirectory();
bool oldValue = OLD_FORMAT_IMPERSONATION_IS_ACTIVE;
// create a segment with an old Codec
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
conf.SetCodec(oldCodecs[Random().Next(oldCodecs.Length)]);
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true;
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
doc.Add(new StringField("id", "doc", Store.NO));
doc.Add(new NumericDocValuesField("f", 5));
writer.AddDocument(doc);
writer.Dispose();
conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
writer = new IndexWriter(dir, conf);
writer.UpdateNumericDocValue(new Term("id", "doc"), "f", 4L);
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = false;
try
{
writer.Dispose();
Assert.Fail("should not have succeeded to update a segment written with an old Codec");
}
catch (System.NotSupportedException e)
{
writer.Rollback();
}
finally
{
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = oldValue;
}
dir.Dispose();
}
示例13: TestUpdateNumericDVFieldWithSameNameAsPostingField
public virtual void TestUpdateNumericDVFieldWithSameNameAsPostingField()
{
// this used to fail because FieldInfos.Builder neglected to update
// globalFieldMaps.docValueTypes map
Directory dir = NewDirectory();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
doc.Add(new StringField("f", "mock-value", Store.NO));
doc.Add(new NumericDocValuesField("f", 5));
writer.AddDocument(doc);
writer.Commit();
writer.UpdateNumericDocValue(new Term("f", "mock-value"), "f", 17L);
writer.Dispose();
DirectoryReader r = DirectoryReader.Open(dir);
NumericDocValues ndv = ((AtomicReader)r.Leaves[0].Reader).GetNumericDocValues("f");
Assert.AreEqual(17, ndv.Get(0));
r.Dispose();
dir.Dispose();
}
示例14: TestUpdateNonNumericDocValuesField
public virtual void TestUpdateNonNumericDocValuesField()
{
// we don't support adding new fields or updating existing non-numeric-dv
// fields through numeric updates
Directory dir = NewDirectory();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
doc.Add(new StringField("key", "doc", Store.NO));
doc.Add(new StringField("foo", "bar", Store.NO));
writer.AddDocument(doc); // flushed document
writer.Commit();
writer.AddDocument(doc); // in-memory document
try
{
writer.UpdateNumericDocValue(new Term("key", "doc"), "ndv", 17L);
Assert.Fail("should not have allowed creating new fields through update");
}
catch (System.ArgumentException e)
{
// ok
}
try
{
writer.UpdateNumericDocValue(new Term("key", "doc"), "foo", 17L);
Assert.Fail("should not have allowed updating an existing field to numeric-dv");
}
catch (System.ArgumentException e)
{
// ok
}
writer.Dispose();
dir.Dispose();
}
示例15: TestUpdateFewSegments
public virtual void TestUpdateFewSegments()
{
Directory dir = NewDirectory();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
conf.SetMaxBufferedDocs(2); // generate few segments
conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES); // prevent merges for this test
IndexWriter writer = new IndexWriter(dir, conf);
int numDocs = 10;
long[] expectedValues = new long[numDocs];
for (int i = 0; i < numDocs; i++)
{
writer.AddDocument(Doc(i));
expectedValues[i] = i + 1;
}
writer.Commit();
// update few docs
for (int i = 0; i < numDocs; i++)
{
if (Random().NextDouble() < 0.4)
{
long value = (i + 1) * 2;
writer.UpdateNumericDocValue(new Term("id", "doc-" + i), "val", value);
expectedValues[i] = value;
}
}
DirectoryReader reader;
if (Random().NextBoolean()) // not NRT
{
writer.Dispose();
reader = DirectoryReader.Open(dir);
} // NRT
else
{
reader = DirectoryReader.Open(writer, true);
writer.Dispose();
}
foreach (AtomicReaderContext context in reader.Leaves)
{
AtomicReader r = context.AtomicReader;
NumericDocValues ndv = r.GetNumericDocValues("val");
Assert.IsNotNull(ndv);
for (int i = 0; i < r.MaxDoc; i++)
{
long expected = expectedValues[i + context.DocBase];
long actual = ndv.Get(i);
Assert.AreEqual(expected, actual);
}
}
reader.Dispose();
dir.Dispose();
}