本文整理汇总了C#中Lucene.Net.Index.SegmentInfo.GetDocStoreOffset方法的典型用法代码示例。如果您正苦于以下问题:C# SegmentInfo.GetDocStoreOffset方法的具体用法?C# SegmentInfo.GetDocStoreOffset怎么用?C# SegmentInfo.GetDocStoreOffset使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Index.SegmentInfo
的用法示例。
在下文中一共展示了SegmentInfo.GetDocStoreOffset方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: ReopenSegment
internal virtual SegmentReader ReopenSegment(SegmentInfo si)
{
lock (this)
{
bool deletionsUpToDate = (this.si.HasDeletions() == si.HasDeletions()) && (!si.HasDeletions() || this.si.GetDelFileName().Equals(si.GetDelFileName()));
bool normsUpToDate = true;
bool[] fieldNormsChanged = new bool[fieldInfos.Size()];
if (normsUpToDate)
{
for (int i = 0; i < fieldInfos.Size(); i++)
{
if (!this.si.GetNormFileName(i).Equals(si.GetNormFileName(i)))
{
normsUpToDate = false;
fieldNormsChanged[i] = true;
}
}
}
if (normsUpToDate && deletionsUpToDate)
{
return this;
}
// clone reader
SegmentReader clone;
if (readOnly)
clone = new ReadOnlySegmentReader();
else
clone = new SegmentReader();
bool success = false;
try
{
clone.readOnly = readOnly;
clone.directory = directory;
clone.si = si;
clone.segment = segment;
clone.readBufferSize = readBufferSize;
clone.cfsReader = cfsReader;
clone.storeCFSReader = storeCFSReader;
clone.fieldInfos = fieldInfos;
clone.tis = tis;
clone.freqStream = freqStream;
clone.proxStream = proxStream;
clone.termVectorsReaderOrig = termVectorsReaderOrig;
// we have to open a new FieldsReader, because it is not thread-safe
// and can thus not be shared among multiple SegmentReaders
// TODO: Change this in case FieldsReader becomes thread-safe in the future
System.String fieldsSegment;
Directory storeDir = Directory();
if (si.GetDocStoreOffset() != - 1)
{
fieldsSegment = si.GetDocStoreSegment();
if (storeCFSReader != null)
{
storeDir = storeCFSReader;
}
}
else
{
fieldsSegment = segment;
if (cfsReader != null)
{
storeDir = cfsReader;
}
}
if (fieldsReader != null)
{
clone.fieldsReader = new FieldsReader(storeDir, fieldsSegment, fieldInfos, readBufferSize, si.GetDocStoreOffset(), si.docCount);
}
if (!deletionsUpToDate)
{
// load deleted docs
clone.deletedDocs = null;
clone.LoadDeletedDocs();
}
else
{
clone.deletedDocs = this.deletedDocs;
}
clone.norms = new System.Collections.Hashtable();
if (!normsUpToDate)
{
// load norms
for (int i = 0; i < fieldNormsChanged.Length; i++)
{
// copy unchanged norms to the cloned reader and incRef those norms
if (!fieldNormsChanged[i])
{
System.String curField = fieldInfos.FieldInfo(i).name;
Norm norm = (Norm) this.norms[curField];
//.........这里部分代码省略.........
示例2: OpenDocStores
internal void OpenDocStores(SegmentInfo si)
{
lock (this)
{
System.Diagnostics.Debug.Assert(si.name.Equals(segment));
if (fieldsReaderOrig == null)
{
Directory storeDir;
if (si.GetDocStoreOffset() != - 1)
{
if (si.GetDocStoreIsCompoundFile())
{
System.Diagnostics.Debug.Assert(storeCFSReader == null);
storeCFSReader = new CompoundFileReader(dir, si.GetDocStoreSegment() + "." + IndexFileNames.COMPOUND_FILE_STORE_EXTENSION, readBufferSize);
storeDir = storeCFSReader;
System.Diagnostics.Debug.Assert(storeDir != null);
}
else
{
storeDir = dir;
System.Diagnostics.Debug.Assert(storeDir != null);
}
}
else if (si.GetUseCompoundFile())
{
// In some cases, we were originally opened when CFS
// was not used, but then we are asked to open doc
// stores after the segment has switched to CFS
if (cfsReader == null)
{
cfsReader = new CompoundFileReader(dir, segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize);
}
storeDir = cfsReader;
System.Diagnostics.Debug.Assert(storeDir != null);
}
else
{
storeDir = dir;
System.Diagnostics.Debug.Assert(storeDir != null);
}
System.String storesSegment;
if (si.GetDocStoreOffset() != - 1)
{
storesSegment = si.GetDocStoreSegment();
}
else
{
storesSegment = segment;
}
fieldsReaderOrig = new FieldsReader(storeDir, storesSegment, fieldInfos, readBufferSize, si.GetDocStoreOffset(), si.docCount);
// Verify two sources of "maxDoc" agree:
if (si.GetDocStoreOffset() == - 1 && fieldsReaderOrig.Size() != si.docCount)
{
throw new CorruptIndexException("doc counts differ for segment " + segment + ": fieldsReader shows " + fieldsReaderOrig.Size() + " but segmentInfo shows " + si.docCount);
}
if (fieldInfos.HasVectors())
{
// open term vector files only as needed
termVectorsReaderOrig = new TermVectorsReader(storeDir, storesSegment, fieldInfos, readBufferSize, si.GetDocStoreOffset(), si.docCount);
}
}
}
}
示例3: Initialize
private void Initialize(SegmentInfo si, int readBufferSize, bool doOpenStores)
{
segment = si.name;
this.si = si;
this.readBufferSize = readBufferSize;
bool success = false;
try
{
// Use compound file directory for some files, if it exists
Directory cfsDir = Directory();
if (si.GetUseCompoundFile())
{
cfsReader = new CompoundFileReader(Directory(), segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize);
cfsDir = cfsReader;
}
Directory storeDir;
if (doOpenStores)
{
if (si.GetDocStoreOffset() != - 1)
{
if (si.GetDocStoreIsCompoundFile())
{
storeCFSReader = new CompoundFileReader(Directory(), si.GetDocStoreSegment() + "." + IndexFileNames.COMPOUND_FILE_STORE_EXTENSION, readBufferSize);
storeDir = storeCFSReader;
}
else
{
storeDir = Directory();
}
}
else
{
storeDir = cfsDir;
}
}
else
storeDir = null;
fieldInfos = new FieldInfos(cfsDir, segment + ".fnm");
bool anyProx = false;
int numFields = fieldInfos.Size();
for (int i = 0; !anyProx && i < numFields; i++)
if (!fieldInfos.FieldInfo(i).omitTf)
anyProx = true;
System.String fieldsSegment;
if (si.GetDocStoreOffset() != - 1)
fieldsSegment = si.GetDocStoreSegment();
else
fieldsSegment = segment;
if (doOpenStores)
{
fieldsReader = new FieldsReader(storeDir, fieldsSegment, fieldInfos, readBufferSize, si.GetDocStoreOffset(), si.docCount);
// Verify two sources of "maxDoc" agree:
if (si.GetDocStoreOffset() == - 1 && fieldsReader.Size() != si.docCount)
{
throw new CorruptIndexException("doc counts differ for segment " + si.name + ": fieldsReader shows " + fieldsReader.Size() + " but segmentInfo shows " + si.docCount);
}
}
tis = new TermInfosReader(cfsDir, segment, fieldInfos, readBufferSize);
LoadDeletedDocs();
// make sure that all index files have been read or are kept open
// so that if an index update removes them we'll still have them
freqStream = cfsDir.OpenInput(segment + ".frq", readBufferSize);
if (anyProx)
proxStream = cfsDir.OpenInput(segment + ".prx", readBufferSize);
OpenNorms(cfsDir, readBufferSize);
if (doOpenStores && fieldInfos.HasVectors())
{
// open term vector files only as needed
System.String vectorsSegment;
if (si.GetDocStoreOffset() != - 1)
vectorsSegment = si.GetDocStoreSegment();
else
vectorsSegment = segment;
termVectorsReaderOrig = new TermVectorsReader(storeDir, vectorsSegment, fieldInfos, readBufferSize, si.GetDocStoreOffset(), si.docCount);
}
success = true;
}
finally
{
// With lock-less commits, it's entirely possible (and
// fine) to hit a FileNotFound exception above. In
// this case, we want to explicitly close any subset
// of things that were opened so that we don't have to
// wait for a GC to do so.
if (!success)
//.........这里部分代码省略.........