本文整理汇总了C#中Lucene.Net.Store.IndexInput.Length方法的典型用法代码示例。如果您正苦于以下问题:C# Lucene.Net.Store.IndexInput.Length方法的具体用法?C# Lucene.Net.Store.IndexInput.Length怎么用?C# Lucene.Net.Store.IndexInput.Length使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Store.IndexInput
的用法示例。
在下文中一共展示了Lucene.Net.Store.IndexInput.Length方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: TermVectorsReader
internal TermVectorsReader(Directory d, System.String segment, FieldInfos fieldInfos, int readBufferSize, int docStoreOffset, int size)
{
bool success = false;
try
{
if (d.FileExists(segment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION))
{
tvx = d.OpenInput(segment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION, readBufferSize);
CheckValidFormat(tvx);
tvd = d.OpenInput(segment + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION, readBufferSize);
tvdFormat = CheckValidFormat(tvd);
tvf = d.OpenInput(segment + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION, readBufferSize);
tvfFormat = CheckValidFormat(tvf);
if (- 1 == docStoreOffset)
{
this.docStoreOffset = 0;
this.size = (int) (tvx.Length() >> 3);
}
else
{
this.docStoreOffset = docStoreOffset;
this.size = size;
// Verify the file is long enough to hold all of our
// docs
System.Diagnostics.Debug.Assert(((int) (tvx.Length() / 8)) >= size + docStoreOffset);
}
}
this.fieldInfos = fieldInfos;
success = true;
}
finally
{
// With lock-less commits, it's entirely possible (and
// fine) to hit a FileNotFound exception above. In
// this case, we want to explicitly close any subset
// of things that were opened so that we don't have to
// wait for a GC to do so.
if (!success)
{
Close();
}
}
}
示例2: FieldsReader
/*internal*/
public FieldsReader(Directory d, System.String segment, FieldInfos fn)
{
fieldInfos = fn;
fieldsStream = d.OpenInput(segment + ".fdt");
indexStream = d.OpenInput(segment + ".fdx");
size = (int) (indexStream.Length() / 8);
}
示例3: TermVectorsReader
/*internal*/
public TermVectorsReader(Directory d, System.String segment, FieldInfos fieldInfos)
{
if (d.FileExists(segment + TermVectorsWriter.TVX_EXTENSION))
{
tvx = d.OpenInput(segment + TermVectorsWriter.TVX_EXTENSION);
CheckValidFormat(tvx);
tvd = d.OpenInput(segment + TermVectorsWriter.TVD_EXTENSION);
tvdFormat = CheckValidFormat(tvd);
tvf = d.OpenInput(segment + TermVectorsWriter.TVF_EXTENSION);
tvfFormat = CheckValidFormat(tvf);
size = (int) tvx.Length() / 8;
}
this.fieldInfos = fieldInfos;
}
示例4: Lucene40StoredFieldsReader
/// <summary>
/// Sole constructor. </summary>
public Lucene40StoredFieldsReader(Directory d, SegmentInfo si, FieldInfos fn, IOContext context)
{
string segment = si.Name;
bool success = false;
FieldInfos = fn;
try
{
FieldsStream = d.OpenInput(IndexFileNames.SegmentFileName(segment, "", Lucene40StoredFieldsWriter.FIELDS_EXTENSION), context);
string indexStreamFN = IndexFileNames.SegmentFileName(segment, "", Lucene40StoredFieldsWriter.FIELDS_INDEX_EXTENSION);
IndexStream = d.OpenInput(indexStreamFN, context);
CodecUtil.CheckHeader(IndexStream, Lucene40StoredFieldsWriter.CODEC_NAME_IDX, Lucene40StoredFieldsWriter.VERSION_START, Lucene40StoredFieldsWriter.VERSION_CURRENT);
CodecUtil.CheckHeader(FieldsStream, Lucene40StoredFieldsWriter.CODEC_NAME_DAT, Lucene40StoredFieldsWriter.VERSION_START, Lucene40StoredFieldsWriter.VERSION_CURRENT);
Debug.Assert(Lucene40StoredFieldsWriter.HEADER_LENGTH_DAT == FieldsStream.FilePointer);
Debug.Assert(Lucene40StoredFieldsWriter.HEADER_LENGTH_IDX == IndexStream.FilePointer);
long indexSize = IndexStream.Length() - Lucene40StoredFieldsWriter.HEADER_LENGTH_IDX;
this.Size_Renamed = (int)(indexSize >> 3);
// Verify two sources of "maxDoc" agree:
if (this.Size_Renamed != si.DocCount)
{
throw new CorruptIndexException("doc counts differ for segment " + segment + ": fieldsReader shows " + this.Size_Renamed + " but segmentInfo shows " + si.DocCount);
}
NumTotalDocs = (int)(indexSize >> 3);
success = true;
}
finally
{
// With lock-less commits, it's entirely possible (and
// fine) to hit a FileNotFound exception above. In
// this case, we want to explicitly close any subset
// of things that were opened so that we don't have to
// wait for a GC to do so.
if (!success)
{
try
{
Dispose();
} // ensure we throw our original exception
catch (Exception)
{
}
}
}
}
示例5: FieldsReader
internal FieldsReader(Directory d, System.String segment, FieldInfos fn, int readBufferSize, int docStoreOffset, int size)
{
bool success = false;
isOriginal = true;
try
{
fieldInfos = fn;
cloneableFieldsStream = d.OpenInput(segment + "." + IndexFileNames.FIELDS_EXTENSION, readBufferSize);
cloneableIndexStream = d.OpenInput(segment + "." + IndexFileNames.FIELDS_INDEX_EXTENSION, readBufferSize);
// First version of fdx did not include a format
// header, but, the first int will always be 0 in that
// case
int firstInt = cloneableIndexStream.ReadInt();
format = firstInt == 0 ? 0 : firstInt;
if (format > FieldsWriter.FORMAT_CURRENT)
throw new CorruptIndexException("Incompatible format version: " + format + " expected " + FieldsWriter.FORMAT_CURRENT + " or lower");
formatSize = format > FieldsWriter.FORMAT ? 4 : 0;
if (format < FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES)
cloneableFieldsStream.SetModifiedUTF8StringsMode();
fieldsStream = (IndexInput) cloneableFieldsStream.Clone();
long indexSize = cloneableIndexStream.Length() - formatSize;
if (docStoreOffset != - 1)
{
// We read only a slice out of this shared fields file
this.docStoreOffset = docStoreOffset;
this.size = size;
// Verify the file is long enough to hold all of our
// docs
System.Diagnostics.Debug.Assert(((int)(indexSize / 8)) >= size + this.docStoreOffset, "indexSize=" + indexSize + " size=" + size + " docStoreOffset=" + docStoreOffset);
}
else
{
this.docStoreOffset = 0;
this.size = (int) (indexSize >> 3);
}
indexStream = (IndexInput) cloneableIndexStream.Clone();
numTotalDocs = (int) (indexSize >> 3);
success = true;
}
finally
{
// With lock-less commits, it's entirely possible (and
// fine) to hit a FileNotFound exception above. In
// this case, we want to explicitly close any subset
// of things that were opened so that we don't have to
// wait for a GC to do so.
if (!success)
{
Dispose();
}
}
}
示例6: Read
private void Read(IndexInput input, System.String fileName)
{
int firstInt = input.ReadVInt();
if (firstInt < 0)
{
// This is a real format
format = firstInt;
}
else
{
format = FORMAT_PRE;
}
if (format != FORMAT_PRE & format != FORMAT_START)
{
throw new CorruptIndexException("unrecognized format " + format + " in file \"" + fileName + "\"");
}
int size;
if (format == FORMAT_PRE)
{
size = firstInt;
}
else
{
size = input.ReadVInt(); //read in the size
}
for (int i = 0; i < size; i++)
{
System.String name = StringHelper.Intern(input.ReadString());
byte bits = input.ReadByte();
bool isIndexed = (bits & IS_INDEXED) != 0;
bool storeTermVector = (bits & STORE_TERMVECTOR) != 0;
bool storePositionsWithTermVector = (bits & STORE_POSITIONS_WITH_TERMVECTOR) != 0;
bool storeOffsetWithTermVector = (bits & STORE_OFFSET_WITH_TERMVECTOR) != 0;
bool omitNorms = (bits & OMIT_NORMS) != 0;
bool storePayloads = (bits & STORE_PAYLOADS) != 0;
bool omitTermFreqAndPositions = (bits & OMIT_TERM_FREQ_AND_POSITIONS) != 0;
AddInternal(name, isIndexed, storeTermVector, storePositionsWithTermVector, storeOffsetWithTermVector, omitNorms, storePayloads, omitTermFreqAndPositions);
}
if (input.GetFilePointer() != input.Length())
{
throw new CorruptIndexException("did not read all bytes from file \"" + fileName + "\": read " + input.GetFilePointer() + " vs size " + input.Length());
}
}
示例7: AssertSameSeekBehavior
private void AssertSameSeekBehavior(System.String msg, IndexInput expected, IndexInput actual)
{
// seek to 0
long point = 0;
AssertSameStreams(msg + ", seek(0)", expected, actual, point);
// seek to middle
point = expected.Length() / 2L;
AssertSameStreams(msg + ", seek(mid)", expected, actual, point);
// seek to end - 2
point = expected.Length() - 2;
AssertSameStreams(msg + ", seek(end-2)", expected, actual, point);
// seek to end - 1
point = expected.Length() - 1;
AssertSameStreams(msg + ", seek(end-1)", expected, actual, point);
// seek to the end
point = expected.Length();
AssertSameStreams(msg + ", seek(end)", expected, actual, point);
// seek past end
point = expected.Length() + 1;
AssertSameStreams(msg + ", seek(end+1)", expected, actual, point);
}
示例8: AssertSameStreams
private void AssertSameStreams(System.String msg, IndexInput expected, IndexInput actual, long seekTo)
{
if (seekTo >= 0 && seekTo < expected.Length())
{
expected.Seek(seekTo);
actual.Seek(seekTo);
AssertSameStreams(msg + ", seek(mid)", expected, actual);
}
}
示例9: CompoundFileReader
public CompoundFileReader(Directory dir, System.String name)
{
directory = dir;
fileName = name;
bool success = false;
try
{
stream = dir.OpenInput(name);
// read the directory and init files
int count = stream.ReadVInt();
FileEntry entry = null;
for (int i = 0; i < count; i++)
{
long offset = stream.ReadLong();
System.String id = stream.ReadString();
if (entry != null)
{
// set length of the previous entry
entry.length = offset - entry.offset;
}
entry = new FileEntry();
entry.offset = offset;
entries[id] = entry;
}
// set the length of the final entry
if (entry != null)
{
entry.length = stream.Length() - entry.offset;
}
success = true;
}
finally
{
if (!success && (stream != null))
{
try
{
stream.Close();
}
catch (System.IO.IOException)
{
}
}
}
}
示例10: TermVectorsReader
internal TermVectorsReader(Directory d, System.String segment, FieldInfos fieldInfos, int readBufferSize, int docStoreOffset, int size)
{
bool success = false;
try
{
if (d.FileExists(segment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION))
{
tvx = d.OpenInput(segment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION, readBufferSize);
format = CheckValidFormat(tvx);
tvd = d.OpenInput(segment + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION, readBufferSize);
int tvdFormat = CheckValidFormat(tvd);
tvf = d.OpenInput(segment + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION, readBufferSize);
int tvfFormat = CheckValidFormat(tvf);
System.Diagnostics.Debug.Assert(format == tvdFormat);
System.Diagnostics.Debug.Assert(format == tvfFormat);
if (format >= FORMAT_VERSION2)
{
System.Diagnostics.Debug.Assert((tvx.Length() - FORMAT_SIZE) % 16 == 0);
numTotalDocs = (int)(tvx.Length() >> 4);
}
else
{
System.Diagnostics.Debug.Assert((tvx.Length() - FORMAT_SIZE) % 8 == 0);
numTotalDocs = (int)(tvx.Length() >> 3);
}
if (-1 == docStoreOffset)
{
this.docStoreOffset = 0;
this.size = numTotalDocs;
System.Diagnostics.Debug.Assert(size == 0 || numTotalDocs == size);
}
else
{
this.docStoreOffset = docStoreOffset;
this.size = size;
// Verify the file is long enough to hold all of our
// docs
System.Diagnostics.Debug.Assert(numTotalDocs >= size + docStoreOffset, "numTotalDocs=" + numTotalDocs + " size=" + size + " docStoreOffset=" + docStoreOffset);
}
}
else
{
// If all documents flushed in a segment had hit
// non-aborting exceptions, it's possible that
// FieldInfos.hasVectors returns true yet the term
// vector files don't exist.
format = 0;
}
this.fieldInfos = fieldInfos;
success = true;
}
finally
{
// With lock-less commits, it's entirely possible (and
// fine) to hit a FileNotFound exception above. In
// this case, we want to explicitly close any subset
// of things that were opened so that we don't have to
// wait for a GC to do so.
if (!success)
{
Close();
}
}
}
示例11: FieldsReader
internal FieldsReader(Directory d, System.String segment, FieldInfos fn, int readBufferSize, int docStoreOffset, int size)
{
bool success = false;
try
{
fieldInfos = fn;
cloneableFieldsStream = d.OpenInput(segment + ".fdt", readBufferSize);
fieldsStream = (IndexInput) cloneableFieldsStream.Clone();
indexStream = d.OpenInput(segment + ".fdx", readBufferSize);
if (docStoreOffset != - 1)
{
// We read only a slice out of this shared fields file
this.docStoreOffset = docStoreOffset;
this.size = size;
// Verify the file is long enough to hold all of our
// docs
System.Diagnostics.Debug.Assert(((int)(indexStream.Length() / 8)) >= size + this.docStoreOffset);
}
else
{
this.docStoreOffset = 0;
this.size = (int) (indexStream.Length() >> 3);
}
numTotalDocs = (int) (indexStream.Length() >> 3);
success = true;
}
finally
{
// With lock-less commits, it's entirely possible (and
// fine) to hit a FileNotFound exception above. In
// this case, we want to explicitly close any subset
// of things that were opened so that we don't have to
// wait for a GC to do so.
if (!success)
{
Close();
}
}
}