本文整理汇总了C#中Lucene.Net.Index.SegmentInfo类的典型用法代码示例。如果您正苦于以下问题:C# SegmentInfo类的具体用法?C# SegmentInfo怎么用?C# SegmentInfo使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
SegmentInfo类属于Lucene.Net.Index命名空间,在下文中一共展示了SegmentInfo类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: SegmentReadState
/// <summary>
/// Create a {@code SegmentReadState}. </summary>
public SegmentReadState(Directory dir, SegmentInfo info, FieldInfos fieldInfos, IOContext context, int termsIndexDivisor, string segmentSuffix)
{
this.Directory = dir;
this.SegmentInfo = info;
this.FieldInfos = fieldInfos;
this.Context = context;
this.TermsIndexDivisor = termsIndexDivisor;
this.SegmentSuffix = segmentSuffix;
}
示例2: CompressingTermVectorsReader
/// <summary>
/// Sole constructor. </summary>
public CompressingTermVectorsReader(Directory d, SegmentInfo si, string segmentSuffix, FieldInfos fn, IOContext context, string formatName, CompressionMode compressionMode)
{
this.compressionMode = compressionMode;
string segment = si.Name;
bool success = false;
fieldInfos = fn;
numDocs = si.DocCount;
ChecksumIndexInput indexStream = null;
try
{
// Load the index into memory
string indexStreamFN = IndexFileNames.SegmentFileName(segment, segmentSuffix, CompressingTermVectorsWriter.VECTORS_INDEX_EXTENSION);
indexStream = d.OpenChecksumInput(indexStreamFN, context);
string codecNameIdx = formatName + CompressingTermVectorsWriter.CODEC_SFX_IDX;
version = CodecUtil.CheckHeader(indexStream, codecNameIdx, CompressingTermVectorsWriter.VERSION_START, CompressingTermVectorsWriter.VERSION_CURRENT);
Debug.Assert(CodecUtil.HeaderLength(codecNameIdx) == indexStream.FilePointer);
indexReader = new CompressingStoredFieldsIndexReader(indexStream, si);
if (version >= CompressingTermVectorsWriter.VERSION_CHECKSUM)
{
indexStream.ReadVLong(); // the end of the data file
CodecUtil.CheckFooter(indexStream);
}
else
{
CodecUtil.CheckEOF(indexStream);
}
indexStream.Dispose();
indexStream = null;
// Open the data file and read metadata
string vectorsStreamFN = IndexFileNames.SegmentFileName(segment, segmentSuffix, CompressingTermVectorsWriter.VECTORS_EXTENSION);
vectorsStream = d.OpenInput(vectorsStreamFN, context);
string codecNameDat = formatName + CompressingTermVectorsWriter.CODEC_SFX_DAT;
int version2 = CodecUtil.CheckHeader(vectorsStream, codecNameDat, CompressingTermVectorsWriter.VERSION_START, CompressingTermVectorsWriter.VERSION_CURRENT);
if (version != version2)
{
throw new Exception("Version mismatch between stored fields index and data: " + version + " != " + version2);
}
Debug.Assert(CodecUtil.HeaderLength(codecNameDat) == vectorsStream.FilePointer);
packedIntsVersion = vectorsStream.ReadVInt();
chunkSize = vectorsStream.ReadVInt();
decompressor = compressionMode.NewDecompressor();
this.reader = new BlockPackedReaderIterator(vectorsStream, packedIntsVersion, CompressingTermVectorsWriter.BLOCK_SIZE, 0);
success = true;
}
finally
{
if (!success)
{
IOUtils.CloseWhileHandlingException(this, indexStream);
}
}
}
示例3: CompressingStoredFieldsWriter
private int NumBufferedDocs; // docBase + numBufferedDocs == current doc ID
/// <summary>
/// Sole constructor. </summary>
public CompressingStoredFieldsWriter(Directory directory, SegmentInfo si, string segmentSuffix, IOContext context, string formatName, CompressionMode compressionMode, int chunkSize)
{
Debug.Assert(directory != null);
this.Directory = directory;
this.Segment = si.Name;
this.SegmentSuffix = segmentSuffix;
this.CompressionMode = compressionMode;
this.Compressor = compressionMode.NewCompressor();
this.ChunkSize = chunkSize;
this.DocBase = 0;
this.BufferedDocs = new GrowableByteArrayDataOutput(chunkSize);
this.NumStoredFields = new int[16];
this.EndOffsets = new int[16];
this.NumBufferedDocs = 0;
bool success = false;
IndexOutput indexStream = directory.CreateOutput(IndexFileNames.SegmentFileName(Segment, segmentSuffix, Lucene40StoredFieldsWriter.FIELDS_INDEX_EXTENSION), context);
try
{
FieldsStream = directory.CreateOutput(IndexFileNames.SegmentFileName(Segment, segmentSuffix, Lucene40StoredFieldsWriter.FIELDS_EXTENSION), context);
string codecNameIdx = formatName + CODEC_SFX_IDX;
string codecNameDat = formatName + CODEC_SFX_DAT;
CodecUtil.WriteHeader(indexStream, codecNameIdx, VERSION_CURRENT);
CodecUtil.WriteHeader(FieldsStream, codecNameDat, VERSION_CURRENT);
Debug.Assert(CodecUtil.HeaderLength(codecNameDat) == FieldsStream.FilePointer);
Debug.Assert(CodecUtil.HeaderLength(codecNameIdx) == indexStream.FilePointer);
IndexWriter = new CompressingStoredFieldsIndexWriter(indexStream);
indexStream = null;
FieldsStream.WriteVInt(chunkSize);
FieldsStream.WriteVInt(PackedInts.VERSION_CURRENT);
success = true;
}
finally
{
if (!success)
{
IOUtils.CloseWhileHandlingException(indexStream);
Abort();
}
}
}
示例4: SegmentMerger
// note, just like in codec apis Directory 'dir' is NOT the same as segmentInfo.dir!!
public SegmentMerger(IList<AtomicReader> readers, SegmentInfo segmentInfo, InfoStream infoStream, Directory dir, int termIndexInterval, MergeState.CheckAbort checkAbort, FieldInfos.FieldNumbers fieldNumbers, IOContext context, bool validate)
{
// validate incoming readers
if (validate)
{
foreach (AtomicReader reader in readers)
{
reader.CheckIntegrity();
}
}
MergeState = new MergeState(readers, segmentInfo, infoStream, checkAbort);
Directory = dir;
this.TermIndexInterval = termIndexInterval;
this.Codec = segmentInfo.Codec;
this.Context = context;
this.FieldInfosBuilder = new FieldInfos.Builder(fieldNumbers);
MergeState.SegmentInfo.DocCount = SetDocMaps();
}
示例5: Read
public void Read(Directory directory)
{
IndexInput input = directory.OpenInput(IndexFileNames.SEGMENTS);
try
{
int format = input.ReadInt();
if (format < 0)
{
// file contains explicit format info
// check that it is a format we can understand
if (format < FORMAT)
throw new System.IO.IOException("Unknown format version: " + format);
version = input.ReadLong(); // read version
counter = input.ReadInt(); // read counter
}
else
{
// file is in old format without explicit format info
counter = format;
}
for (int i = input.ReadInt(); i > 0; i--)
{
// read segmentInfos
SegmentInfo si = new SegmentInfo(input.ReadString(), input.ReadInt(), directory);
Add(si);
}
if (format >= 0)
{
// in old format the version number may be at the end of the file
if (input.GetFilePointer() >= input.Length())
version = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
// old file format without version number
else
version = input.ReadLong(); // read version
}
}
finally
{
input.Close();
}
}
示例6: Reset
/// <summary> Copy everything from src SegmentInfo into our instance.</summary>
internal void Reset(SegmentInfo src)
{
name = src.name;
docCount = src.docCount;
dir = src.dir;
preLockless = src.preLockless;
delGen = src.delGen;
if (src.normGen == null)
{
normGen = null;
}
else
{
normGen = new long[src.normGen.Length];
Array.Copy(src.normGen, 0, normGen, 0, src.normGen.Length);
}
isCompoundFile = src.isCompoundFile;
hasSingleNormFile = src.hasSingleNormFile;
}
示例7: Lucene41PostingsReader
// public static boolean DEBUG = false;
/// <summary>
/// Sole constructor. </summary>
public Lucene41PostingsReader(Directory dir, FieldInfos fieldInfos, SegmentInfo segmentInfo, IOContext ioContext, string segmentSuffix)
{
bool success = false;
IndexInput docIn = null;
IndexInput posIn = null;
IndexInput payIn = null;
try
{
docIn = dir.OpenInput(IndexFileNames.SegmentFileName(segmentInfo.Name, segmentSuffix, Lucene41PostingsFormat.DOC_EXTENSION), ioContext);
Version = CodecUtil.CheckHeader(docIn, Lucene41PostingsWriter.DOC_CODEC, Lucene41PostingsWriter.VERSION_START, Lucene41PostingsWriter.VERSION_CURRENT);
forUtil = new ForUtil(docIn);
if (fieldInfos.HasProx())
{
posIn = dir.OpenInput(IndexFileNames.SegmentFileName(segmentInfo.Name, segmentSuffix, Lucene41PostingsFormat.POS_EXTENSION), ioContext);
CodecUtil.CheckHeader(posIn, Lucene41PostingsWriter.POS_CODEC, Version, Version);
if (fieldInfos.HasPayloads() || fieldInfos.HasOffsets())
{
payIn = dir.OpenInput(IndexFileNames.SegmentFileName(segmentInfo.Name, segmentSuffix, Lucene41PostingsFormat.PAY_EXTENSION), ioContext);
CodecUtil.CheckHeader(payIn, Lucene41PostingsWriter.PAY_CODEC, Version, Version);
}
}
this.DocIn = docIn;
this.PosIn = posIn;
this.PayIn = payIn;
success = true;
}
finally
{
if (!success)
{
IOUtils.CloseWhileHandlingException(docIn, posIn, payIn);
}
}
}
示例8: SegmentCommitInfo
/// <summary>
/// Sole constructor.
/// </summary>
/// <param name="info">
/// <seealso cref="SegmentInfo"/> that we wrap </param>
/// <param name="delCount">
/// number of deleted documents in this segment </param>
/// <param name="delGen">
/// deletion generation number (used to name deletion files) </param>
/// <param name="fieldInfosGen">
/// FieldInfos generation number (used to name field-infos files)
/// </param>
public SegmentCommitInfo(SegmentInfo info, int delCount, long delGen, long fieldInfosGen)
{
this.Info = info;
this.DelCount_Renamed = delCount;
this.DelGen_Renamed = delGen;
if (delGen == -1)
{
NextWriteDelGen = 1;
}
else
{
NextWriteDelGen = delGen + 1;
}
this.FieldInfosGen_Renamed = fieldInfosGen;
if (fieldInfosGen == -1)
{
NextWriteFieldInfosGen = 1;
}
else
{
NextWriteFieldInfosGen = fieldInfosGen + 1;
}
}
示例9: Initialize
private void Initialize(SegmentInfo si)
{
segment = si.name;
this.si = si;
bool success = false;
try
{
// Use compound file directory for some files, if it exists
Directory cfsDir = Directory();
if (si.GetUseCompoundFile())
{
cfsReader = new CompoundFileReader(Directory(), segment + ".cfs");
cfsDir = cfsReader;
}
// No compound file exists - use the multi-file format
fieldInfos = new FieldInfos(cfsDir, segment + ".fnm");
fieldsReader = new FieldsReader(cfsDir, segment, fieldInfos);
// Verify two sources of "maxDoc" agree:
if (fieldsReader.Size() != si.docCount)
{
throw new System.SystemException("doc counts differ for segment " + si.name + ": fieldsReader shows " + fieldsReader.Size() + " but segmentInfo shows " + si.docCount);
}
tis = new TermInfosReader(cfsDir, segment, fieldInfos);
// NOTE: the bitvector is stored using the regular directory, not cfs
if (HasDeletions(si))
{
deletedDocs = new BitVector(Directory(), si.GetDelFileName());
// Verify # deletes does not exceed maxDoc for this segment:
if (deletedDocs.Count() > MaxDoc())
{
throw new System.SystemException("number of deletes (" + deletedDocs.Count() + ") exceeds max doc (" + MaxDoc() + ") for segment " + si.name);
}
}
// make sure that all index files have been read or are kept open
// so that if an index update removes them we'll still have them
freqStream = cfsDir.OpenInput(segment + ".frq");
proxStream = cfsDir.OpenInput(segment + ".prx");
OpenNorms(cfsDir);
if (fieldInfos.HasVectors())
{
// open term vector files only as needed
termVectorsReaderOrig = new TermVectorsReader(cfsDir, segment, fieldInfos);
}
success = true;
}
finally
{
// With lock-less commits, it's entirely possible (and
// fine) to hit a FileNotFound exception above. In
// this case, we want to explicitly close any subset
// of things that were opened so that we don't have to
// wait for a GC to do so.
if (!success)
{
DoClose();
}
}
}
示例10: Get
public static SegmentReader Get(Directory dir, SegmentInfo si, SegmentInfos sis, bool closeDir, bool ownDir)
{
SegmentReader instance;
try
{
instance = (SegmentReader) System.Activator.CreateInstance(IMPL);
}
catch (System.Exception e)
{
throw new System.SystemException("cannot load SegmentReader class: " + e, e);
}
instance.Init(dir, sis, closeDir, ownDir);
instance.Initialize(si);
return instance;
}
示例11: Get
public static SegmentReader Get(SegmentInfo si)
{
return Get(false, si.dir, si, BufferedIndexInput.BUFFER_SIZE, true, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
}
示例12: HasSeparateNorms
internal static bool HasSeparateNorms(SegmentInfo si)
{
return si.HasSeparateNorms();
}
示例13: TestStoredFields
/// <summary> Test stored fields for a segment.</summary>
private Status.StoredFieldStatus TestStoredFields(SegmentInfo info, SegmentReader reader, System.Globalization.NumberFormatInfo format)
{
var status = new Status.StoredFieldStatus();
try
{
if (infoStream != null)
{
infoStream.Write(" test: stored fields.......");
}
// Scan stored fields for all documents
for (int j = 0; j < info.docCount; ++j)
{
if (!reader.IsDeleted(j))
{
status.docCount++;
Document doc = reader.Document(j);
status.totFields += doc.GetFields().Count;
}
}
// Validate docCount
if (status.docCount != reader.NumDocs())
{
throw new System.SystemException("docCount=" + status.docCount + " but saw " + status.docCount + " undeleted docs");
}
Msg(string.Format(format, "OK [{0:d} total field count; avg {1:f} fields per doc]", new object[] { status.totFields, (((float) status.totFields) / status.docCount) }));
}
catch (System.Exception e)
{
Msg("ERROR [" + System.Convert.ToString(e.Message) + "]");
status.error = e;
if (infoStream != null)
{
infoStream.WriteLine(e.StackTrace);
}
}
return status;
}
示例14: ReWrite
internal void ReWrite(SegmentInfo si)
{
// NOTE: norms are re-written in regular directory, not cfs
System.String oldFileName = si.GetNormFileName(this.number);
if (oldFileName != null && !oldFileName.EndsWith("." + IndexFileNames.NORMS_EXTENSION))
{
// Mark this file for deletion. Note that we don't
// actually try to delete it until the new segments files is
// successfully written:
Enclosing_Instance.deleter.AddPendingFile(oldFileName);
}
si.AdvanceNormGen(this.number);
IndexOutput out_Renamed = Enclosing_Instance.Directory().CreateOutput(si.GetNormFileName(this.number));
try
{
out_Renamed.WriteBytes(bytes, Enclosing_Instance.MaxDoc());
}
finally
{
out_Renamed.Close();
}
this.dirty = false;
}
示例15: Flush
/// <summary>Flush all pending docs to a new segment </summary>
internal int Flush(bool closeDocStore)
{
lock (this)
{
System.Diagnostics.Debug.Assert(AllThreadsIdle());
System.Diagnostics.Debug.Assert(numDocsInRAM > 0);
System.Diagnostics.Debug.Assert(nextDocID == numDocsInRAM);
System.Diagnostics.Debug.Assert(waitQueue.numWaiting == 0);
System.Diagnostics.Debug.Assert(waitQueue.waitingBytes == 0);
InitFlushState(false);
docStoreOffset = numDocsInStore;
if (infoStream != null)
Message("flush postings as segment " + flushState.segmentName + " numDocs=" + numDocsInRAM);
bool success = false;
try
{
if (closeDocStore)
{
System.Diagnostics.Debug.Assert(flushState.docStoreSegmentName != null);
System.Diagnostics.Debug.Assert(flushState.docStoreSegmentName.Equals(flushState.segmentName));
CloseDocStore();
flushState.numDocsInStore = 0;
}
System.Collections.Hashtable threads = new System.Collections.Hashtable();
for (int i = 0; i < threadStates.Length; i++)
threads[threadStates[i].consumer] = threadStates[i].consumer;
consumer.Flush(threads, flushState);
if (infoStream != null)
{
SegmentInfo si = new SegmentInfo(flushState.segmentName, flushState.numDocs, directory);
long newSegmentSize = si.SizeInBytes();
System.String message = System.String.Format(nf, " oldRAMSize={0:d} newFlushedSize={1:d} docs/MB={2:f} new/old={3:%}",
new System.Object[] { numBytesUsed, newSegmentSize, (numDocsInRAM / (newSegmentSize / 1024.0 / 1024.0)), (100.0 * newSegmentSize / numBytesUsed) });
Message(message);
}
flushedDocCount += flushState.numDocs;
DoAfterFlush();
success = true;
}
finally
{
if (!success)
{
Abort();
}
}
System.Diagnostics.Debug.Assert(waitQueue.waitingBytes == 0);
return flushState.numDocs;
}
}