本文整理汇总了C#中Directory.OpenInput方法的典型用法代码示例。如果您正苦于以下问题:C# Directory.OpenInput方法的具体用法?C# Directory.OpenInput怎么用?C# Directory.OpenInput使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Directory
的用法示例。
在下文中一共展示了Directory.OpenInput方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: SepPostingsReader
public SepPostingsReader(Directory dir, FieldInfos fieldInfos, SegmentInfo segmentInfo, IOContext context,
IntStreamFactory intFactory, string segmentSuffix)
{
var success = false;
try
{
var docFileName = IndexFileNames.SegmentFileName(segmentInfo.Name, segmentSuffix,
SepPostingsWriter.DOC_EXTENSION);
_docIn = intFactory.OpenInput(dir, docFileName, context);
_skipIn =
dir.OpenInput(
IndexFileNames.SegmentFileName(segmentInfo.Name, segmentSuffix, SepPostingsWriter.SKIP_EXTENSION),
context);
if (fieldInfos.HasFreq())
{
_freqIn = intFactory.OpenInput(dir,
IndexFileNames.SegmentFileName(segmentInfo.Name, segmentSuffix, SepPostingsWriter.FREQ_EXTENSION),
context);
}
else
{
_freqIn = null;
}
if (fieldInfos.HasProx())
{
_posIn = intFactory.OpenInput(dir,
IndexFileNames.SegmentFileName(segmentInfo.Name, segmentSuffix, SepPostingsWriter.POS_EXTENSION),
context);
_payloadIn =
dir.OpenInput(
IndexFileNames.SegmentFileName(segmentInfo.Name, segmentSuffix,
SepPostingsWriter.PAYLOAD_EXTENSION), context);
}
else
{
_posIn = null;
_payloadIn = null;
}
success = true;
}
finally
{
if (!success)
{
Dispose();
}
}
}
示例2: VariableGapTermsIndexReader
public VariableGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, String segment, int indexDivisor,
String segmentSuffix, IOContext context)
{
_input =
dir.OpenInput(
IndexFileNames.SegmentFileName(segment, segmentSuffix,
VariableGapTermsIndexWriter.TERMS_INDEX_EXTENSION), new IOContext(context, true));
var success = false;
Debug.Assert(indexDivisor == -1 || indexDivisor > 0);
try
{
_version = ReadHeader(_input);
_indexDivisor = indexDivisor;
if (_version >= VariableGapTermsIndexWriter.VERSION_CHECKSUM)
CodecUtil.ChecksumEntireFile(_input);
SeekDir(_input, _dirOffset);
// Read directory
var numFields = _input.ReadVInt();
if (numFields < 0)
{
throw new CorruptIndexException("invalid numFields: " + numFields + " (resource=" + _input + ")");
}
for (var i = 0; i < numFields; i++)
{
var field = _input.ReadVInt();
var indexStart = _input.ReadVLong();
var fieldInfo = fieldInfos.FieldInfo(field);
try
{
_fields.Add(fieldInfo, new FieldIndexData(indexStart, this));
}
catch (ArgumentException)
{
throw new CorruptIndexException(String.Format("Duplicate Field: {0}, Resource: {1}",
fieldInfo.Name, _input));
}
}
success = true;
}
finally
{
if (indexDivisor > 0)
{
_input.Dispose();
_input = null;
if (success)
{
_indexLoaded = true;
}
}
}
}
示例3: Lucene40StoredFieldsReader
/// <summary>
/// Sole constructor. </summary>
public Lucene40StoredFieldsReader(Directory d, SegmentInfo si, FieldInfos fn, IOContext context)
{
string segment = si.Name;
bool success = false;
FieldInfos = fn;
try
{
FieldsStream = d.OpenInput(IndexFileNames.SegmentFileName(segment, "", Lucene40StoredFieldsWriter.FIELDS_EXTENSION), context);
string indexStreamFN = IndexFileNames.SegmentFileName(segment, "", Lucene40StoredFieldsWriter.FIELDS_INDEX_EXTENSION);
IndexStream = d.OpenInput(indexStreamFN, context);
CodecUtil.CheckHeader(IndexStream, Lucene40StoredFieldsWriter.CODEC_NAME_IDX, Lucene40StoredFieldsWriter.VERSION_START, Lucene40StoredFieldsWriter.VERSION_CURRENT);
CodecUtil.CheckHeader(FieldsStream, Lucene40StoredFieldsWriter.CODEC_NAME_DAT, Lucene40StoredFieldsWriter.VERSION_START, Lucene40StoredFieldsWriter.VERSION_CURRENT);
Debug.Assert(Lucene40StoredFieldsWriter.HEADER_LENGTH_DAT == FieldsStream.FilePointer);
Debug.Assert(Lucene40StoredFieldsWriter.HEADER_LENGTH_IDX == IndexStream.FilePointer);
long indexSize = IndexStream.Length() - Lucene40StoredFieldsWriter.HEADER_LENGTH_IDX;
this.Size_Renamed = (int)(indexSize >> 3);
// Verify two sources of "maxDoc" agree:
if (this.Size_Renamed != si.DocCount)
{
throw new CorruptIndexException("doc counts differ for segment " + segment + ": fieldsReader shows " + this.Size_Renamed + " but segmentInfo shows " + si.DocCount);
}
NumTotalDocs = (int)(indexSize >> 3);
success = true;
}
finally
{
// With lock-less commits, it's entirely possible (and
// fine) to hit a FileNotFound exception above. In
// this case, we want to explicitly close any subset
// of things that were opened so that we don't have to
// wait for a GC to do so.
if (!success)
{
try
{
Dispose();
} // ensure we throw our original exception
catch (Exception)
{
}
}
}
}
示例4: Read
public override SegmentInfo Read(Directory dir, string segment, IOContext context)
{
string fileName = IndexFileNames.SegmentFileName(segment, "", Lucene40SegmentInfoFormat.SI_EXTENSION);
IndexInput input = dir.OpenInput(fileName, context);
bool success = false;
try
{
CodecUtil.CheckHeader(input, Lucene40SegmentInfoFormat.CODEC_NAME, Lucene40SegmentInfoFormat.VERSION_START, Lucene40SegmentInfoFormat.VERSION_CURRENT);
string version = input.ReadString();
int docCount = input.ReadInt();
if (docCount < 0)
{
throw new CorruptIndexException("invalid docCount: " + docCount + " (resource=" + input + ")");
}
bool isCompoundFile = input.ReadByte() == SegmentInfo.YES;
IDictionary<string, string> diagnostics = input.ReadStringStringMap();
input.ReadStringStringMap(); // read deprecated attributes
ISet<string> files = input.ReadStringSet();
CodecUtil.CheckEOF(input);
SegmentInfo si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile, null, diagnostics);
si.Files = files;
success = true;
return si;
}
finally
{
if (!success)
{
IOUtils.CloseWhileHandlingException(input);
}
else
{
input.Dispose();
}
}
}
示例5: Lucene40PostingsReader
// private String segment;
/// <summary>
/// Sole constructor. </summary>
public Lucene40PostingsReader(Directory dir, FieldInfos fieldInfos, SegmentInfo segmentInfo, IOContext ioContext, string segmentSuffix)
{
bool success = false;
IndexInput freqIn = null;
IndexInput proxIn = null;
try
{
freqIn = dir.OpenInput(IndexFileNames.SegmentFileName(segmentInfo.Name, segmentSuffix, Lucene40PostingsFormat.FREQ_EXTENSION), ioContext);
CodecUtil.CheckHeader(freqIn, FRQ_CODEC, VERSION_START, VERSION_CURRENT);
// TODO: hasProx should (somehow!) become codec private,
// but it's tricky because 1) FIS.hasProx is global (it
// could be all fields that have prox are written by a
// different codec), 2) the field may have had prox in
// the past but all docs w/ that field were deleted.
// Really we'd need to init prxOut lazily on write, and
// then somewhere record that we actually wrote it so we
// know whether to open on read:
if (fieldInfos.HasProx())
{
proxIn = dir.OpenInput(IndexFileNames.SegmentFileName(segmentInfo.Name, segmentSuffix, Lucene40PostingsFormat.PROX_EXTENSION), ioContext);
CodecUtil.CheckHeader(proxIn, PRX_CODEC, VERSION_START, VERSION_CURRENT);
}
else
{
proxIn = null;
}
this.FreqIn = freqIn;
this.ProxIn = proxIn;
success = true;
}
finally
{
if (!success)
{
IOUtils.CloseWhileHandlingException(freqIn, proxIn);
}
}
}
示例6: SimpleTextTermVectorsReader
public SimpleTextTermVectorsReader(Directory directory, SegmentInfo si, IOContext context)
{
bool success = false;
try
{
_input = directory.OpenInput(IndexFileNames.SegmentFileName(si.Name, "", SimpleTextTermVectorsWriter.VECTORS_EXTENSION), context);
success = true;
}
finally
{
if (!success)
{
try
{
Dispose();
}
catch (Exception)
{
// ensure we throw our original exception
}
}
}
ReadIndex(si.DocCount);
}
示例7: Read
public override SegmentInfo Read(Directory directory, string segmentName, IOContext context)
{
// NOTE: this is NOT how 3.x is really written...
string fileName = IndexFileNames.SegmentFileName(segmentName, "", Lucene3xSegmentInfoFormat.UPGRADED_SI_EXTENSION);
bool success = false;
IndexInput input = directory.OpenInput(fileName, context);
try
{
SegmentInfo si = ReadUpgradedSegmentInfo(segmentName, directory, input);
success = true;
return si;
}
finally
{
if (!success)
{
IOUtils.CloseWhileHandlingException(input);
}
else
{
input.Dispose();
}
}
}
示例8: BlockTermsReader
public BlockTermsReader(TermsIndexReaderBase indexReader, Directory dir, FieldInfos fieldInfos, SegmentInfo info,
PostingsReaderBase postingsReader, IOContext context,
String segmentSuffix)
{
_postingsReader = postingsReader;
_input =
dir.OpenInput(
IndexFileNames.SegmentFileName(info.Name, segmentSuffix, BlockTermsWriter.TERMS_EXTENSION),
context);
var success = false;
try
{
_version = ReadHeader(_input);
// Have PostingsReader init itself
postingsReader.Init(_input);
// Read per-field details
SeekDir(_input, _dirOffset);
int numFields = _input.ReadVInt();
if (numFields < 0)
{
throw new CorruptIndexException(String.Format("Invalid number of fields: {0}, Resource: {1}",
numFields, _input));
}
for (var i = 0; i < numFields; i++)
{
var field = _input.ReadVInt();
var numTerms = _input.ReadVLong();
Debug.Assert(numTerms >= 0);
var termsStartPointer = _input.ReadVLong();
var fieldInfo = fieldInfos.FieldInfo(field);
var sumTotalTermFreq = fieldInfo.FieldIndexOptions == FieldInfo.IndexOptions.DOCS_ONLY
? -1
: _input.ReadVLong();
var sumDocFreq = _input.ReadVLong();
var docCount = _input.ReadVInt();
var longsSize = _version >= BlockTermsWriter.VERSION_META_ARRAY ? _input.ReadVInt() : 0;
if (docCount < 0 || docCount > info.DocCount)
{
// #docs with field must be <= #docs
throw new CorruptIndexException(
String.Format("Invalid DocCount: {0}, MaxDoc: {1}, Resource: {2}", docCount, info.DocCount,
_input));
}
if (sumDocFreq < docCount)
{
// #postings must be >= #docs with field
throw new CorruptIndexException(
String.Format("Invalid sumDocFreq: {0}, DocCount: {1}, Resource: {2}", sumDocFreq, docCount,
_input));
}
if (sumTotalTermFreq != -1 && sumTotalTermFreq < sumDocFreq)
{
// #positions must be >= #postings
throw new CorruptIndexException(
String.Format("Invalid sumTotalTermFreq: {0}, sumDocFreq: {1}, Resource: {2}",
sumTotalTermFreq, sumDocFreq, _input));
}
try
{
_fields.Add(fieldInfo.Name,
new FieldReader(fieldInfo, this, numTerms, termsStartPointer, sumTotalTermFreq, sumDocFreq,
docCount,
longsSize));
}
catch (ArgumentException)
{
throw new CorruptIndexException(String.Format("Duplicate fields: {0}, Resource: {1}",
fieldInfo.Name, _input));
}
}
success = true;
}
finally
{
if (!success)
{
_input.Dispose();
}
}
_indexReader = indexReader;
}
示例9: BeforeClass
public void BeforeClass()
{
// NOTE: turn off compound file, this test will open some index files directly.
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true;
IndexWriterConfig config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.KEYWORD, false)).SetUseCompoundFile(false);
TermIndexInterval = config.TermIndexInterval;
IndexDivisor = TestUtil.NextInt(Random(), 1, 10);
NUMBER_OF_DOCUMENTS = AtLeast(100);
NUMBER_OF_FIELDS = AtLeast(Math.Max(10, 3 * TermIndexInterval * IndexDivisor / NUMBER_OF_DOCUMENTS));
Directory = NewDirectory();
config.SetCodec(new PreFlexRWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE));
LogMergePolicy mp = NewLogMergePolicy();
// NOTE: turn off compound file, this test will open some index files directly.
mp.NoCFSRatio = 0.0;
config.SetMergePolicy(mp);
Populate(Directory, config);
DirectoryReader r0 = IndexReader.Open(Directory);
SegmentReader r = LuceneTestCase.GetOnlySegmentReader(r0);
string segment = r.SegmentName;
r.Dispose();
FieldInfosReader infosReader = (new PreFlexRWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE)).FieldInfosFormat().FieldInfosReader;
FieldInfos fieldInfos = infosReader.Read(Directory, segment, "", IOContext.READONCE);
string segmentFileName = IndexFileNames.SegmentFileName(segment, "", Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION);
long tiiFileLength = Directory.FileLength(segmentFileName);
IndexInput input = Directory.OpenInput(segmentFileName, NewIOContext(Random()));
TermEnum = new SegmentTermEnum(Directory.OpenInput(IndexFileNames.SegmentFileName(segment, "", Lucene3xPostingsFormat.TERMS_EXTENSION), NewIOContext(Random())), fieldInfos, false);
int totalIndexInterval = TermEnum.IndexInterval * IndexDivisor;
SegmentTermEnum indexEnum = new SegmentTermEnum(input, fieldInfos, true);
Index = new TermInfosReaderIndex(indexEnum, IndexDivisor, tiiFileLength, totalIndexInterval);
indexEnum.Dispose();
input.Dispose();
Reader = IndexReader.Open(Directory);
SampleTerms = Sample(Random(), Reader, 1000);
}
示例10: Lucene3xStoredFieldsReader
public Lucene3xStoredFieldsReader(Directory d, SegmentInfo si, FieldInfos fn, IOContext context)
{
string segment = Lucene3xSegmentInfoFormat.GetDocStoreSegment(si);
int docStoreOffset = Lucene3xSegmentInfoFormat.GetDocStoreOffset(si);
int size = si.DocCount;
bool success = false;
FieldInfos = fn;
try
{
if (docStoreOffset != -1 && Lucene3xSegmentInfoFormat.GetDocStoreIsCompoundFile(si))
{
d = StoreCFSReader = new CompoundFileDirectory(si.Dir, IndexFileNames.SegmentFileName(segment, "", Lucene3xCodec.COMPOUND_FILE_STORE_EXTENSION), context, false);
}
else
{
StoreCFSReader = null;
}
FieldsStream = d.OpenInput(IndexFileNames.SegmentFileName(segment, "", FIELDS_EXTENSION), context);
string indexStreamFN = IndexFileNames.SegmentFileName(segment, "", FIELDS_INDEX_EXTENSION);
IndexStream = d.OpenInput(indexStreamFN, context);
Format = IndexStream.ReadInt();
if (Format < FORMAT_MINIMUM)
{
throw new IndexFormatTooOldException(IndexStream, Format, FORMAT_MINIMUM, FORMAT_CURRENT);
}
if (Format > FORMAT_CURRENT)
{
throw new IndexFormatTooNewException(IndexStream, Format, FORMAT_MINIMUM, FORMAT_CURRENT);
}
long indexSize = IndexStream.Length() - FORMAT_SIZE;
if (docStoreOffset != -1)
{
// We read only a slice out of this shared fields file
this.DocStoreOffset = docStoreOffset;
this.Size = size;
// Verify the file is long enough to hold all of our
// docs
Debug.Assert(((int)(indexSize / 8)) >= size + this.DocStoreOffset, "indexSize=" + indexSize + " size=" + size + " docStoreOffset=" + docStoreOffset);
}
else
{
this.DocStoreOffset = 0;
this.Size = (int)(indexSize >> 3);
// Verify two sources of "maxDoc" agree:
if (this.Size != si.DocCount)
{
throw new CorruptIndexException("doc counts differ for segment " + segment + ": fieldsReader shows " + this.Size + " but segmentInfo shows " + si.DocCount);
}
}
NumTotalDocs = (int)(indexSize >> 3);
success = true;
}
finally
{
// With lock-less commits, it's entirely possible (and
// fine) to hit a FileNotFound exception above. In
// this case, we want to explicitly close any subset
// of things that were opened so that we don't have to
// wait for a GC to do so.
if (!success)
{
try
{
Dispose();
} // keep our original exception
catch (Exception t)
{
}
}
}
}
示例11: CheckCodeVersion
/// <summary>
/// Verifies that the code version which wrote the segment is supported. </summary>
public static void CheckCodeVersion(Directory dir, string segment)
{
string indexStreamFN = IndexFileNames.SegmentFileName(segment, "", FIELDS_INDEX_EXTENSION);
IndexInput idxStream = dir.OpenInput(indexStreamFN, IOContext.DEFAULT);
try
{
int format = idxStream.ReadInt();
if (format < FORMAT_MINIMUM)
{
throw new IndexFormatTooOldException(idxStream, format, FORMAT_MINIMUM, FORMAT_CURRENT);
}
if (format > FORMAT_CURRENT)
{
throw new IndexFormatTooNewException(idxStream, format, FORMAT_MINIMUM, FORMAT_CURRENT);
}
}
finally
{
idxStream.Dispose();
}
}
示例12: TermInfosReader
internal TermInfosReader(Directory dir, string seg, FieldInfos fis, IOContext context, int indexDivisor)
{
bool success = false;
if (indexDivisor < 1 && indexDivisor != -1)
{
throw new System.ArgumentException("indexDivisor must be -1 (don't load terms index) or greater than 0: got " + indexDivisor);
}
try
{
Directory = dir;
Segment = seg;
FieldInfos = fis;
OrigEnum = new SegmentTermEnum(Directory.OpenInput(IndexFileNames.SegmentFileName(Segment, "", Lucene3xPostingsFormat.TERMS_EXTENSION), context), FieldInfos, false);
Size_Renamed = OrigEnum.Size;
if (indexDivisor != -1)
{
// Load terms index
TotalIndexInterval = OrigEnum.IndexInterval * indexDivisor;
string indexFileName = IndexFileNames.SegmentFileName(Segment, "", Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION);
SegmentTermEnum indexEnum = new SegmentTermEnum(Directory.OpenInput(indexFileName, context), FieldInfos, true);
try
{
Index = new TermInfosReaderIndex(indexEnum, indexDivisor, dir.FileLength(indexFileName), TotalIndexInterval);
IndexLength = Index.Length();
}
finally
{
indexEnum.Dispose();
}
}
else
{
// Do not load terms index:
TotalIndexInterval = -1;
Index = null;
IndexLength = -1;
}
success = true;
}
finally
{
// With lock-less commits, it's entirely possible (and
// fine) to hit a FileNotFound exception above. In
// this case, we want to explicitly close any subset
// of things that were opened so that we don't have to
// wait for a GC to do so.
if (!success)
{
Dispose();
}
}
}
示例13: FixedGapTermsIndexReader
public FixedGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, String segment, int indexDivisor,
IComparer<BytesRef> termComp, String segmentSuffix, IOContext context)
{
_termComp = termComp;
Debug.Assert(indexDivisor == -1 || indexDivisor > 0);
_input =
dir.OpenInput(
IndexFileNames.SegmentFileName(segment, segmentSuffix,
FixedGapTermsIndexWriter.TERMS_INDEX_EXTENSION),
context);
var success = false;
try
{
_version = ReadHeader(_input);
if (_version >= FixedGapTermsIndexWriter.VERSION_CHECKSUM)
CodecUtil.ChecksumEntireFile(_input);
indexInterval = _input.ReadInt();
if (indexInterval < 1)
{
throw new CorruptIndexException(String.Format("Invalid indexInterval: {0}, Resource: {1}",
indexInterval, _input));
}
_indexDivisor = indexDivisor;
if (indexDivisor < 0)
{
_totalIndexInterval = indexInterval;
}
else
{
// In case terms index gets loaded, later, on demand
_totalIndexInterval = indexInterval*indexDivisor;
}
Debug.Assert(_totalIndexInterval > 0);
SeekDir(_input, _dirOffset);
// Read directory
int numFields = _input.ReadVInt();
if (numFields < 0)
throw new CorruptIndexException(String.Format("Invalid numFields: {0}, Resource: {1}", numFields,
_input));
for (int i = 0; i < numFields; i++)
{
int field = _input.ReadVInt();
int numIndexTerms = _input.ReadVInt();
if (numIndexTerms < 0)
throw new CorruptIndexException(String.Format("Invalid numIndexTerms: {0}, Resource: {1}",
numIndexTerms,
_input));
long termsStart = _input.ReadVLong();
long indexStart = _input.ReadVLong();
long packedIndexStart = _input.ReadVLong();
long packedOffsetsStart = _input.ReadVLong();
if (packedIndexStart < indexStart)
throw new CorruptIndexException(
String.Format(
"Invalid packedIndexStart: {0}, IndexStart: {1}, NumIndexTerms: {2}, Resource: {3}",
packedIndexStart,
indexStart, numIndexTerms, _input));
FieldInfo fieldInfo = fieldInfos.FieldInfo(field);
try
{
_fields.Add(fieldInfo,
new FieldIndexData(numIndexTerms, indexStart, termsStart, packedIndexStart,
packedOffsetsStart, this));
}
catch (ArgumentException)
{
throw new CorruptIndexException(String.Format("Duplicate field: {0}, Resource {1}",
fieldInfo.Name,
_input));
}
}
success = true;
}
finally
{
if (!success)
{
IOUtils.CloseWhileHandlingException(_input);
}
//.........这里部分代码省略.........
示例14: Read
public override FieldInfos Read(Directory directory, string segmentName, string segmentSuffix, IOContext iocontext)
{
string fileName = IndexFileNames.SegmentFileName(segmentName, "", FIELD_INFOS_EXTENSION);
IndexInput input = directory.OpenInput(fileName, iocontext);
bool success = false;
try
{
int format = input.ReadVInt();
if (format > FORMAT_MINIMUM)
{
throw new IndexFormatTooOldException(input, format, FORMAT_MINIMUM, FORMAT_CURRENT);
}
if (format < FORMAT_CURRENT)
{
throw new IndexFormatTooNewException(input, format, FORMAT_MINIMUM, FORMAT_CURRENT);
}
int size = input.ReadVInt(); //read in the size
FieldInfo[] infos = new FieldInfo[size];
for (int i = 0; i < size; i++)
{
string name = input.ReadString();
int fieldNumber = i;
byte bits = input.ReadByte();
bool isIndexed = (bits & IS_INDEXED) != 0;
bool storeTermVector = (bits & STORE_TERMVECTOR) != 0;
bool omitNorms = (bits & OMIT_NORMS) != 0;
bool storePayloads = (bits & STORE_PAYLOADS) != 0;
FieldInfo.IndexOptions indexOptions;
if (!isIndexed)
{
indexOptions = default(FieldInfo.IndexOptions);
}
else if ((bits & OMIT_TERM_FREQ_AND_POSITIONS) != 0)
{
indexOptions = FieldInfo.IndexOptions.DOCS_ONLY;
}
else if ((bits & OMIT_POSITIONS) != 0)
{
if (format <= FORMAT_OMIT_POSITIONS)
{
indexOptions = FieldInfo.IndexOptions.DOCS_AND_FREQS;
}
else
{
throw new CorruptIndexException("Corrupt fieldinfos, OMIT_POSITIONS set but format=" + format + " (resource: " + input + ")");
}
}
else
{
indexOptions = FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
}
// LUCENE-3027: past indices were able to write
// storePayloads=true when omitTFAP is also true,
// which is invalid. We correct that, here:
if (indexOptions != FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS)
{
storePayloads = false;
}
// LUCENE TO-DO
infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, null, isIndexed && !omitNorms ? FieldInfo.DocValuesType_e.NUMERIC : default(FieldInfo.DocValuesType_e), CollectionsHelper.EmptyMap<string, string>());
}
if (input.FilePointer != input.Length())
{
throw new CorruptIndexException("did not read all bytes from file \"" + fileName + "\": read " + input.FilePointer + " vs size " + input.Length() + " (resource: " + input + ")");
}
FieldInfos fieldInfos = new FieldInfos(infos);
success = true;
return fieldInfos;
}
finally
{
if (success)
{
input.Dispose();
}
else
{
IOUtils.CloseWhileHandlingException(input);
}
}
}
示例15: Read
public override FieldInfos Read(Directory directory, string segmentName, string segmentSuffix, IOContext iocontext)
{
string fileName = IndexFileNames.SegmentFileName(segmentName, "", Lucene42FieldInfosFormat.EXTENSION);
IndexInput input = directory.OpenInput(fileName, iocontext);
bool success = false;
try
{
CodecUtil.CheckHeader(input, Lucene42FieldInfosFormat.CODEC_NAME, Lucene42FieldInfosFormat.FORMAT_START, Lucene42FieldInfosFormat.FORMAT_CURRENT);
int size = input.ReadVInt(); //read in the size
FieldInfo[] infos = new FieldInfo[size];
for (int i = 0; i < size; i++)
{
string name = input.ReadString();
int fieldNumber = input.ReadVInt();
byte bits = input.ReadByte();
bool isIndexed = (bits & Lucene42FieldInfosFormat.IS_INDEXED) != 0;
bool storeTermVector = (bits & Lucene42FieldInfosFormat.STORE_TERMVECTOR) != 0;
bool omitNorms = (bits & Lucene42FieldInfosFormat.OMIT_NORMS) != 0;
bool storePayloads = (bits & Lucene42FieldInfosFormat.STORE_PAYLOADS) != 0;
FieldInfo.IndexOptions indexOptions;
if (!isIndexed)
{
indexOptions = default(FieldInfo.IndexOptions);
}
else if ((bits & Lucene42FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS) != 0)
{
indexOptions = FieldInfo.IndexOptions.DOCS_ONLY;
}
else if ((bits & Lucene42FieldInfosFormat.OMIT_POSITIONS) != 0)
{
indexOptions = FieldInfo.IndexOptions.DOCS_AND_FREQS;
}
else if ((bits & Lucene42FieldInfosFormat.STORE_OFFSETS_IN_POSTINGS) != 0)
{
indexOptions = FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
}
else
{
indexOptions = FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
}
// DV Types are packed in one byte
byte val = input.ReadByte();
FieldInfo.DocValuesType_e docValuesType = GetDocValuesType(input, (sbyte)(val & 0x0F));
FieldInfo.DocValuesType_e normsType = GetDocValuesType(input, (sbyte)(((int)((uint)val >> 4)) & 0x0F));
IDictionary<string, string> attributes = input.ReadStringStringMap();
infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, docValuesType, normsType, CollectionsHelper.UnmodifiableMap(attributes));
}
CodecUtil.CheckEOF(input);
FieldInfos fieldInfos = new FieldInfos(infos);
success = true;
return fieldInfos;
}
finally
{
if (success)
{
input.Dispose();
}
else
{
IOUtils.CloseWhileHandlingException(input);
}
}
}