本文整理汇总了C#中Lucene.Net.Index.SegmentReader.Dispose方法的典型用法代码示例。如果您正苦于以下问题:C# SegmentReader.Dispose方法的具体用法?C# SegmentReader.Dispose怎么用?C# SegmentReader.Dispose使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Index.SegmentReader
的用法示例。
在下文中一共展示了SegmentReader.Dispose方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: TestTermDocs
public virtual void TestTermDocs(int indexDivisor)
{
//After adding the document, we should be able to read it back in
SegmentReader reader = new SegmentReader(Info, indexDivisor, NewIOContext(Random()));
Assert.IsTrue(reader != null);
Assert.AreEqual(indexDivisor, reader.TermInfosIndexDivisor);
TermsEnum terms = reader.Fields.Terms(DocHelper.TEXT_FIELD_2_KEY).Iterator(null);
terms.SeekCeil(new BytesRef("field"));
DocsEnum termDocs = TestUtil.Docs(Random(), terms, reader.LiveDocs, null, DocsEnum.FLAG_FREQS);
if (termDocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
{
int docId = termDocs.DocID();
Assert.IsTrue(docId == 0);
int freq = termDocs.Freq();
Assert.IsTrue(freq == 3);
}
reader.Dispose();
}
示例2: TestBadSeek
public virtual void TestBadSeek(int indexDivisor)
{
{
//After adding the document, we should be able to read it back in
SegmentReader reader = new SegmentReader(Info, indexDivisor, NewIOContext(Random()));
Assert.IsTrue(reader != null);
DocsEnum termDocs = TestUtil.Docs(Random(), reader, "textField2", new BytesRef("bad"), reader.LiveDocs, null, 0);
Assert.IsNull(termDocs);
reader.Dispose();
}
{
//After adding the document, we should be able to read it back in
SegmentReader reader = new SegmentReader(Info, indexDivisor, NewIOContext(Random()));
Assert.IsTrue(reader != null);
DocsEnum termDocs = TestUtil.Docs(Random(), reader, "junk", new BytesRef("bad"), reader.LiveDocs, null, 0);
Assert.IsNull(termDocs);
reader.Dispose();
}
}
示例3: DoCheckIndex
//.........这里部分代码省略.........
{
// LUCENENET NOTE: Some tests rely on the error type being in
// the message. We can't get the error type with StackTrace, we
// need ToString() for that.
infoStream.WriteLine(t.ToString());
//infoStream.WriteLine(t.StackTrace);
}
result.CantOpenSegments = true;
return result;
}
int format = 0;
try
{
format = input.ReadInt();
}
catch (Exception t)
{
Msg(infoStream, "ERROR: could not read segment file version in directory");
if (infoStream != null)
{
// LUCENENET NOTE: Some tests rely on the error type being in
// the message. We can't get the error type with StackTrace, we
// need ToString() for that.
infoStream.WriteLine(t.ToString());
//infoStream.WriteLine(t.StackTrace);
}
result.MissingSegmentVersion = true;
return result;
}
finally
{
if (input != null)
{
input.Dispose();
}
}
string sFormat = "";
bool skip = false;
result.SegmentsFileName = segmentsFileName;
result.NumSegments = numSegments;
result.UserData = sis.UserData;
string userDataString;
if (sis.UserData.Count > 0)
{
userDataString = " userData=" + sis.UserData;
}
else
{
userDataString = "";
}
string versionString = null;
if (oldSegs != null)
{
if (foundNonNullVersion)
{
versionString = "versions=[" + oldSegs + " .. " + newest + "]";
}
else
{
versionString = "version=" + oldSegs;
}
}
else
示例4: PrintSegment
private void PrintSegment(StreamWriter @out, SegmentCommitInfo si)
{
SegmentReader reader = new SegmentReader(si, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random()));
for (int i = 0; i < reader.NumDocs; i++)
{
@out.WriteLine(reader.Document(i));
}
Fields fields = reader.Fields;
foreach (string field in fields)
{
Terms terms = fields.Terms(field);
Assert.IsNotNull(terms);
TermsEnum tis = terms.Iterator(null);
while (tis.Next() != null)
{
@out.Write(" term=" + field + ":" + tis.Term());
@out.WriteLine(" DF=" + tis.DocFreq());
DocsAndPositionsEnum positions = tis.DocsAndPositions(reader.LiveDocs, null);
while (positions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
{
@out.Write(" doc=" + positions.DocID());
@out.Write(" TF=" + positions.Freq());
@out.Write(" pos=");
@out.Write(positions.NextPosition());
for (int j = 1; j < positions.Freq(); j++)
{
@out.Write("," + positions.NextPosition());
}
@out.WriteLine("");
}
}
}
reader.Dispose();
}
示例5: TestMerge
public virtual void TestMerge()
{
Codec codec = Codec.Default;
SegmentInfo si = new SegmentInfo(MergedDir, Constants.LUCENE_MAIN_VERSION, MergedSegment, -1, false, codec, null);
SegmentMerger merger = new SegmentMerger(Arrays.AsList<AtomicReader>(Reader1, Reader2), si, InfoStream.Default, MergedDir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, MergeState.CheckAbort.NONE, new FieldInfos.FieldNumbers(), NewIOContext(Random()), true);
MergeState mergeState = merger.Merge();
int docsMerged = mergeState.SegmentInfo.DocCount;
Assert.IsTrue(docsMerged == 2);
//Should be able to open a new SegmentReader against the new directory
SegmentReader mergedReader = new SegmentReader(new SegmentCommitInfo(new SegmentInfo(MergedDir, Constants.LUCENE_MAIN_VERSION, MergedSegment, docsMerged, false, codec, null), 0, -1L, -1L), DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random()));
Assert.IsTrue(mergedReader != null);
Assert.IsTrue(mergedReader.NumDocs == 2);
Document newDoc1 = mergedReader.Document(0);
Assert.IsTrue(newDoc1 != null);
//There are 2 unstored fields on the document
Assert.IsTrue(DocHelper.NumFields(newDoc1) == DocHelper.NumFields(Doc1) - DocHelper.Unstored.Count);
Document newDoc2 = mergedReader.Document(1);
Assert.IsTrue(newDoc2 != null);
Assert.IsTrue(DocHelper.NumFields(newDoc2) == DocHelper.NumFields(Doc2) - DocHelper.Unstored.Count);
DocsEnum termDocs = TestUtil.Docs(Random(), mergedReader, DocHelper.TEXT_FIELD_2_KEY, new BytesRef("field"), MultiFields.GetLiveDocs(mergedReader), null, 0);
Assert.IsTrue(termDocs != null);
Assert.IsTrue(termDocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
int tvCount = 0;
foreach (FieldInfo fieldInfo in mergedReader.FieldInfos)
{
if (fieldInfo.HasVectors())
{
tvCount++;
}
}
//System.out.println("stored size: " + stored.Size());
Assert.AreEqual(3, tvCount, "We do not have 3 fields that were indexed with term vector");
Terms vector = mergedReader.GetTermVectors(0).Terms(DocHelper.TEXT_FIELD_2_KEY);
Assert.IsNotNull(vector);
Assert.AreEqual(3, vector.Size());
TermsEnum termsEnum = vector.Iterator(null);
int i = 0;
while (termsEnum.Next() != null)
{
string term = termsEnum.Term().Utf8ToString();
int freq = (int)termsEnum.TotalTermFreq();
//System.out.println("Term: " + term + " Freq: " + freq);
Assert.IsTrue(DocHelper.FIELD_2_TEXT.IndexOf(term) != -1);
Assert.IsTrue(DocHelper.FIELD_2_FREQS[i] == freq);
i++;
}
TestSegmentReader.CheckNorms(mergedReader);
mergedReader.Dispose();
}
示例6: Merge
private SegmentCommitInfo Merge(Directory dir, SegmentCommitInfo si1, SegmentCommitInfo si2, string merged, bool useCompoundFile)
{
IOContext context = NewIOContext(Random());
SegmentReader r1 = new SegmentReader(si1, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, context);
SegmentReader r2 = new SegmentReader(si2, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, context);
Codec codec = Codec.Default;
TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(si1.Info.Dir);
SegmentInfo si = new SegmentInfo(si1.Info.Dir, Constants.LUCENE_MAIN_VERSION, merged, -1, false, codec, null);
SegmentMerger merger = new SegmentMerger(Arrays.AsList<AtomicReader>(r1, r2), si, InfoStream.Default, trackingDir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, MergeState.CheckAbort.NONE, new FieldInfos.FieldNumbers(), context, true);
MergeState mergeState = merger.Merge();
r1.Dispose();
r2.Dispose();
SegmentInfo info = new SegmentInfo(si1.Info.Dir, Constants.LUCENE_MAIN_VERSION, merged, si1.Info.DocCount + si2.Info.DocCount, false, codec, null);
info.Files = new HashSet<string>(trackingDir.CreatedFiles);
if (useCompoundFile)
{
ICollection<string> filesToDelete = IndexWriter.CreateCompoundFile(InfoStream.Default, dir, MergeState.CheckAbort.NONE, info, NewIOContext(Random()));
info.UseCompoundFile = true;
foreach (String fileToDelete in filesToDelete)
{
si1.Info.Dir.DeleteFile(fileToDelete);
}
}
return new SegmentCommitInfo(info, 0, -1L, -1L);
}
示例7: TestAddDocument
public virtual void TestAddDocument()
{
Document testDoc = new Document();
DocHelper.SetupDoc(testDoc);
IndexWriter writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
writer.AddDocument(testDoc);
writer.Commit();
SegmentCommitInfo info = writer.NewestSegment();
writer.Dispose();
//After adding the document, we should be able to read it back in
SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random()));
Assert.IsTrue(reader != null);
Document doc = reader.Document(0);
Assert.IsTrue(doc != null);
//System.out.println("Document: " + doc);
IndexableField[] fields = doc.GetFields("textField2");
Assert.IsTrue(fields != null && fields.Length == 1);
Assert.IsTrue(fields[0].StringValue.Equals(DocHelper.FIELD_2_TEXT));
Assert.IsTrue(fields[0].FieldType().StoreTermVectors);
fields = doc.GetFields("textField1");
Assert.IsTrue(fields != null && fields.Length == 1);
Assert.IsTrue(fields[0].StringValue.Equals(DocHelper.FIELD_1_TEXT));
Assert.IsFalse(fields[0].FieldType().StoreTermVectors);
fields = doc.GetFields("keyField");
Assert.IsTrue(fields != null && fields.Length == 1);
Assert.IsTrue(fields[0].StringValue.Equals(DocHelper.KEYWORD_TEXT));
fields = doc.GetFields(DocHelper.NO_NORMS_KEY);
Assert.IsTrue(fields != null && fields.Length == 1);
Assert.IsTrue(fields[0].StringValue.Equals(DocHelper.NO_NORMS_TEXT));
fields = doc.GetFields(DocHelper.TEXT_FIELD_3_KEY);
Assert.IsTrue(fields != null && fields.Length == 1);
Assert.IsTrue(fields[0].StringValue.Equals(DocHelper.FIELD_3_TEXT));
// test that the norms are not present in the segment if
// omitNorms is true
foreach (FieldInfo fi in reader.FieldInfos)
{
if (fi.Indexed)
{
Assert.IsTrue(fi.OmitsNorms() == (reader.GetNormValues(fi.Name) == null));
}
}
reader.Dispose();
}
示例8: TestTokenReuse
public virtual void TestTokenReuse()
{
Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper2(this);
IndexWriter writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
Document doc = new Document();
doc.Add(NewTextField("f1", "a 5 a a", Field.Store.YES));
writer.AddDocument(doc);
writer.Commit();
SegmentCommitInfo info = writer.NewestSegment();
writer.Dispose();
SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random()));
DocsAndPositionsEnum termPositions = MultiFields.GetTermPositionsEnum(reader, reader.LiveDocs, "f1", new BytesRef("a"));
Assert.IsTrue(termPositions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
int freq = termPositions.Freq();
Assert.AreEqual(3, freq);
Assert.AreEqual(0, termPositions.NextPosition());
Assert.IsNotNull(termPositions.Payload);
Assert.AreEqual(6, termPositions.NextPosition());
Assert.IsNull(termPositions.Payload);
Assert.AreEqual(7, termPositions.NextPosition());
Assert.IsNull(termPositions.Payload);
reader.Dispose();
}
示例9: TestPreAnalyzedField
public virtual void TestPreAnalyzedField()
{
IndexWriter writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
Document doc = new Document();
doc.Add(new TextField("preanalyzed", new TokenStreamAnonymousInnerClassHelper(this)));
writer.AddDocument(doc);
writer.Commit();
SegmentCommitInfo info = writer.NewestSegment();
writer.Dispose();
SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random()));
DocsAndPositionsEnum termPositions = reader.TermPositionsEnum(new Term("preanalyzed", "term1"));
Assert.IsTrue(termPositions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
Assert.AreEqual(1, termPositions.Freq());
Assert.AreEqual(0, termPositions.NextPosition());
termPositions = reader.TermPositionsEnum(new Term("preanalyzed", "term2"));
Assert.IsTrue(termPositions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
Assert.AreEqual(2, termPositions.Freq());
Assert.AreEqual(1, termPositions.NextPosition());
Assert.AreEqual(3, termPositions.NextPosition());
termPositions = reader.TermPositionsEnum(new Term("preanalyzed", "term3"));
Assert.IsTrue(termPositions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
Assert.AreEqual(1, termPositions.Freq());
Assert.AreEqual(2, termPositions.NextPosition());
reader.Dispose();
}
示例10: TestPositionIncrementGap
public virtual void TestPositionIncrementGap()
{
Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper(this);
IndexWriter writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
Document doc = new Document();
doc.Add(NewTextField("repeated", "repeated one", Field.Store.YES));
doc.Add(NewTextField("repeated", "repeated two", Field.Store.YES));
writer.AddDocument(doc);
writer.Commit();
SegmentCommitInfo info = writer.NewestSegment();
writer.Dispose();
SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random()));
DocsAndPositionsEnum termPositions = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), "repeated", new BytesRef("repeated"));
Assert.IsTrue(termPositions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
int freq = termPositions.Freq();
Assert.AreEqual(2, freq);
Assert.AreEqual(0, termPositions.NextPosition());
Assert.AreEqual(502, termPositions.NextPosition());
reader.Dispose();
}