本文整理汇总了C#中Lucene.Net.Index.FieldInfos.HasVectors方法的典型用法代码示例。如果您正苦于以下问题:C# FieldInfos.HasVectors方法的具体用法?C# FieldInfos.HasVectors怎么用?C# FieldInfos.HasVectors使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Index.FieldInfos
的用法示例。
在下文中一共展示了FieldInfos.HasVectors方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: Initialize
private void Initialize(SegmentInfo si)
{
segment = si.name;
// Use compound file directory for some files, if it exists
Directory cfsDir = Directory();
if (Directory().FileExists(segment + ".cfs"))
{
cfsReader = new CompoundFileReader(Directory(), segment + ".cfs");
cfsDir = cfsReader;
}
// No compound file exists - use the multi-file format
fieldInfos = new FieldInfos(cfsDir, segment + ".fnm");
fieldsReader = new FieldsReader(cfsDir, segment, fieldInfos);
tis = new TermInfosReader(cfsDir, segment, fieldInfos);
// NOTE: the bitvector is stored using the regular directory, not cfs
if (HasDeletions(si))
deletedDocs = new BitVector(Directory(), segment + ".del");
// make sure that all index files have been read or are kept open
// so that if an index update removes them we'll still have them
freqStream = cfsDir.OpenInput(segment + ".frq");
proxStream = cfsDir.OpenInput(segment + ".prx");
OpenNorms(cfsDir);
if (fieldInfos.HasVectors())
{
// open term vector files only as needed
termVectorsReaderOrig = new TermVectorsReader(cfsDir, segment, fieldInfos);
}
}
示例2: Initialize
private void Initialize(SegmentInfo si)
{
segment = si.name;
this.si = si;
bool success = false;
try
{
// Use compound file directory for some files, if it exists
Directory cfsDir = Directory();
if (si.GetUseCompoundFile())
{
cfsReader = new CompoundFileReader(Directory(), segment + ".cfs");
cfsDir = cfsReader;
}
// No compound file exists - use the multi-file format
fieldInfos = new FieldInfos(cfsDir, segment + ".fnm");
fieldsReader = new FieldsReader(cfsDir, segment, fieldInfos);
// Verify two sources of "maxDoc" agree:
if (fieldsReader.Size() != si.docCount)
{
throw new System.SystemException("doc counts differ for segment " + si.name + ": fieldsReader shows " + fieldsReader.Size() + " but segmentInfo shows " + si.docCount);
}
tis = new TermInfosReader(cfsDir, segment, fieldInfos);
// NOTE: the bitvector is stored using the regular directory, not cfs
if (HasDeletions(si))
{
deletedDocs = new BitVector(Directory(), si.GetDelFileName());
// Verify # deletes does not exceed maxDoc for this segment:
if (deletedDocs.Count() > MaxDoc())
{
throw new System.SystemException("number of deletes (" + deletedDocs.Count() + ") exceeds max doc (" + MaxDoc() + ") for segment " + si.name);
}
}
// make sure that all index files have been read or are kept open
// so that if an index update removes them we'll still have them
freqStream = cfsDir.OpenInput(segment + ".frq");
proxStream = cfsDir.OpenInput(segment + ".prx");
OpenNorms(cfsDir);
if (fieldInfos.HasVectors())
{
// open term vector files only as needed
termVectorsReaderOrig = new TermVectorsReader(cfsDir, segment, fieldInfos);
}
success = true;
}
finally
{
// With lock-less commits, it's entirely possible (and
// fine) to hit a FileNotFound exception above. In
// this case, we want to explicitly close any subset
// of things that were opened so that we don't have to
// wait for a GC to do so.
if (!success)
{
DoClose();
}
}
}
示例3: Initialize
private void Initialize(SegmentInfo si, int readBufferSize, bool doOpenStores)
{
segment = si.name;
this.si = si;
this.readBufferSize = readBufferSize;
bool success = false;
try
{
// Use compound file directory for some files, if it exists
Directory cfsDir = Directory();
if (si.GetUseCompoundFile())
{
cfsReader = new CompoundFileReader(Directory(), segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize);
cfsDir = cfsReader;
}
Directory storeDir;
if (doOpenStores)
{
if (si.GetDocStoreOffset() != - 1)
{
if (si.GetDocStoreIsCompoundFile())
{
storeCFSReader = new CompoundFileReader(Directory(), si.GetDocStoreSegment() + "." + IndexFileNames.COMPOUND_FILE_STORE_EXTENSION, readBufferSize);
storeDir = storeCFSReader;
}
else
{
storeDir = Directory();
}
}
else
{
storeDir = cfsDir;
}
}
else
storeDir = null;
fieldInfos = new FieldInfos(cfsDir, segment + ".fnm");
bool anyProx = false;
int numFields = fieldInfos.Size();
for (int i = 0; !anyProx && i < numFields; i++)
if (!fieldInfos.FieldInfo(i).omitTf)
anyProx = true;
System.String fieldsSegment;
if (si.GetDocStoreOffset() != - 1)
fieldsSegment = si.GetDocStoreSegment();
else
fieldsSegment = segment;
if (doOpenStores)
{
fieldsReader = new FieldsReader(storeDir, fieldsSegment, fieldInfos, readBufferSize, si.GetDocStoreOffset(), si.docCount);
// Verify two sources of "maxDoc" agree:
if (si.GetDocStoreOffset() == - 1 && fieldsReader.Size() != si.docCount)
{
throw new CorruptIndexException("doc counts differ for segment " + si.name + ": fieldsReader shows " + fieldsReader.Size() + " but segmentInfo shows " + si.docCount);
}
}
tis = new TermInfosReader(cfsDir, segment, fieldInfos, readBufferSize);
LoadDeletedDocs();
// make sure that all index files have been read or are kept open
// so that if an index update removes them we'll still have them
freqStream = cfsDir.OpenInput(segment + ".frq", readBufferSize);
if (anyProx)
proxStream = cfsDir.OpenInput(segment + ".prx", readBufferSize);
OpenNorms(cfsDir, readBufferSize);
if (doOpenStores && fieldInfos.HasVectors())
{
// open term vector files only as needed
System.String vectorsSegment;
if (si.GetDocStoreOffset() != - 1)
vectorsSegment = si.GetDocStoreSegment();
else
vectorsSegment = segment;
termVectorsReaderOrig = new TermVectorsReader(storeDir, vectorsSegment, fieldInfos, readBufferSize, si.GetDocStoreOffset(), si.docCount);
}
success = true;
}
finally
{
// With lock-less commits, it's entirely possible (and
// fine) to hit a FileNotFound exception above. In
// this case, we want to explicitly close any subset
// of things that were opened so that we don't have to
// wait for a GC to do so.
if (!success)
//.........这里部分代码省略.........