本文整理汇总了C#中Lucene.Net.Index.DocumentsWriter.SegmentFileName方法的典型用法代码示例。如果您正苦于以下问题:C# DocumentsWriter.SegmentFileName方法的具体用法?C# DocumentsWriter.SegmentFileName怎么用?C# DocumentsWriter.SegmentFileName使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Index.DocumentsWriter
的用法示例。
在下文中一共展示了DocumentsWriter.SegmentFileName方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: flush
// TODO: would be nice to factor out morme of this, eg the
// FreqProxFieldMergeState, and code to visit all Fields
// under the same FieldInfo together, up into TermsHash*.
// Other writers would presumably share alot of this...
internal override void flush(IDictionary<object, object> threadsAndFields, DocumentsWriter.FlushState state)
{
// Gather all FieldData's that have postings, across all
// ThreadStates
List<object> allFields = new List<object>();
IEnumerator<KeyValuePair<object, object>> it = threadsAndFields.GetEnumerator();
while (it.MoveNext())
{
KeyValuePair<object, object> entry = (KeyValuePair<object, object>)it.Current;
ICollection<object> fields = (ICollection<object>)entry.Value;
IEnumerator<object> fieldsIt = fields.GetEnumerator();
while (fieldsIt.MoveNext())
{
FreqProxTermsWriterPerField perField = (FreqProxTermsWriterPerField)fieldsIt.Current;
if (perField.termsHashPerField.numPostings > 0)
allFields.Add(perField);
}
}
// Sort by field name
allFields.Sort();
int numAllFields = allFields.Count;
TermInfosWriter termsOut = new TermInfosWriter(state.directory,
state.segmentName,
fieldInfos,
state.docWriter.writer.GetTermIndexInterval());
IndexOutput freqOut = state.directory.CreateOutput(state.SegmentFileName(IndexFileNames.FREQ_EXTENSION));
IndexOutput proxOut;
if (fieldInfos.HasProx())
proxOut = state.directory.CreateOutput(state.SegmentFileName(IndexFileNames.PROX_EXTENSION));
else
proxOut = null;
DefaultSkipListWriter skipListWriter = new DefaultSkipListWriter(termsOut.skipInterval,
termsOut.maxSkipLevels,
state.numDocsInRAM, freqOut, proxOut);
int start = 0;
while (start < numAllFields)
{
FieldInfo fieldInfo = ((FreqProxTermsWriterPerField)allFields[start]).fieldInfo;
string fieldName = fieldInfo.name;
int end = start + 1;
while (end < numAllFields && ((FreqProxTermsWriterPerField)allFields[end]).fieldInfo.name.Equals(fieldName))
end++;
FreqProxTermsWriterPerField[] fields = new FreqProxTermsWriterPerField[end - start];
for (int i = start; i < end; i++)
{
fields[i - start] = (FreqProxTermsWriterPerField)allFields[i];
// Aggregate the storePayload as seen by the same
// field across multiple threads
fieldInfo.storePayloads |= fields[i - start].hasPayloads;
}
// If this field has postings then add them to the
// segment
AppendPostings(state, fields, termsOut, freqOut, proxOut, skipListWriter);
for (int i = 0; i < fields.Length; i++)
{
TermsHashPerField perField = fields[i].termsHashPerField;
int numPostings = perField.numPostings;
perField.reset();
perField.shrinkHash(numPostings);
fields[i].reset();
}
start = end;
}
it = threadsAndFields.GetEnumerator();
while (it.MoveNext())
{
KeyValuePair<object, object> entry = (KeyValuePair<object, object>)it.Current;
FreqProxTermsWriterPerThread perThread = (FreqProxTermsWriterPerThread)entry.Key;
perThread.termsHashPerThread.reset(true);
}
freqOut.Close();
if (proxOut != null)
{
state.flushedFiles[state.SegmentFileName(IndexFileNames.PROX_EXTENSION)] = state.SegmentFileName(IndexFileNames.PROX_EXTENSION);
proxOut.Close();
}
termsOut.Close();
//.........这里部分代码省略.........