本文整理汇总了C#中Lucene.Net.Index.SegmentInfo.SetUseCompoundFile方法的典型用法代码示例。如果您正苦于以下问题:C# SegmentInfo.SetUseCompoundFile方法的具体用法?C# SegmentInfo.SetUseCompoundFile怎么用?C# SegmentInfo.SetUseCompoundFile使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Index.SegmentInfo
的用法示例。
在下文中一共展示了SegmentInfo.SetUseCompoundFile方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: AddIndexes
//.........这里部分代码省略.........
{
if (segmentInfos.Count == 1)
{
// add existing index, if any
sReader = SegmentReader.Get(segmentInfos.Info(0));
merger.Add(sReader);
}
}
for (int i = 0; i < readers.Length; i++)
// add new indexes
merger.Add(readers[i]);
bool success = false;
StartTransaction();
try
{
int docCount = merger.Merge(); // merge 'em
if (sReader != null)
{
sReader.Close();
sReader = null;
}
lock (this)
{
segmentInfos.RemoveRange(0, segmentInfos.Count); // pop old infos & add new
info = new SegmentInfo(mergedName, docCount, directory, false, true, -1, null, false);
segmentInfos.Add(info);
}
success = true;
}
finally
{
if (!success)
{
if (infoStream != null)
Message("hit exception in addIndexes during merge");
RollbackTransaction();
}
else
{
CommitTransaction();
}
}
}
finally
{
if (sReader != null)
{
sReader.Close();
}
}
if (mergePolicy is LogMergePolicy && GetUseCompoundFile())
{
bool success = false;
StartTransaction();
try
{
merger.CreateCompoundFile(mergedName + ".cfs");
lock (this)
{
info.SetUseCompoundFile(true);
}
}
finally
{
if (!success)
{
if (infoStream != null)
Message("hit exception building compound file in addIndexes during merge");
RollbackTransaction();
}
else
{
CommitTransaction();
}
}
}
}
catch (OutOfMemoryException oom)
{
hitOOM = true;
throw oom;
}
finally
{
docWriter.ResumeAllThreads();
}
}
示例2: AddIndexes
//.........这里部分代码省略.........
SetDiagnostics(info, "addIndexes(IndexReader[])");
segmentInfos.Add(info);
}
// Notify DocumentsWriter that the flushed count just increased
docWriter.UpdateFlushedDocCount(docCount);
success = true;
}
finally
{
if (sReader != null)
{
readerPool.Release(sReader);
}
}
}
finally
{
if (!success)
{
if (infoStream != null)
Message("hit exception in addIndexes during merge");
RollbackTransaction();
}
else
{
CommitTransaction();
}
}
if (mergePolicy is LogMergePolicy && GetUseCompoundFile())
{
System.Collections.Generic.IList<string> files = null;
lock (this)
{
// Must incRef our files so that if another thread
// is running merge/optimize, it doesn't delete our
// segment's files before we have a change to
// finish making the compound file.
if (segmentInfos.Contains(info))
{
files = info.Files();
deleter.IncRef(files);
}
}
if (files != null)
{
success = false;
StartTransaction(false);
try
{
merger.CreateCompoundFile(mergedName + ".cfs");
lock (this)
{
info.SetUseCompoundFile(true);
}
success = true;
}
finally
{
lock (this)
{
deleter.DecRef(files);
}
if (!success)
{
if (infoStream != null)
Message("hit exception building compound file in addIndexes during merge");
RollbackTransaction();
}
else
{
CommitTransaction();
}
}
}
}
}
catch (System.OutOfMemoryException oom)
{
HandleOOM(oom, "addIndexes(IndexReader[])");
}
finally
{
if (docWriter != null)
{
docWriter.ResumeAllThreads();
}
}
}
示例3: AddIndexes
/// <summary> Merges the provided indexes into this index.
/// <p>
/// After this completes, the index is optimized.
/// </p>
/// <p>
/// The provided IndexReaders are not closed.
/// </p>
///
/// <p>
/// See {@link #AddIndexes(Directory[])} for details on transactional
/// semantics, temporary free space required in the Directory, and non-CFS
/// segments on an Exception.
/// </p>
/// </summary>
public virtual void AddIndexes(IndexReader[] readers)
{
lock (this)
{
Optimize(); // start with zero or 1 seg
System.String mergedName = NewSegmentName();
SegmentMerger merger = new SegmentMerger(this, mergedName);
System.Collections.ArrayList segmentsToDelete = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(10));
IndexReader sReader = null;
if (segmentInfos.Count == 1)
{
// add existing index, if any
sReader = SegmentReader.Get(segmentInfos.Info(0));
merger.Add(sReader);
segmentsToDelete.Add(sReader); // queue segment for
// deletion
}
for (int i = 0; i < readers.Length; i++)
// add new indexes
merger.Add(readers[i]);
SegmentInfo info;
System.String segmentsInfosFileName = segmentInfos.GetCurrentSegmentFileName();
bool success = false;
StartTransaction();
try
{
int docCount = merger.Merge(); // merge 'em
segmentInfos.RemoveRange(0, segmentInfos.Count); // pop old infos & add new
info = new SegmentInfo(mergedName, docCount, directory, false, true);
segmentInfos.Add(info);
commitPending = true;
if (sReader != null)
sReader.Close();
success = true;
}
finally
{
if (!success)
{
RollbackTransaction();
}
else
{
CommitTransaction();
}
}
deleter.DeleteFile(segmentsInfosFileName); // delete old segments_N
// file
deleter.DeleteSegments(segmentsToDelete); // delete now-unused
// segments
if (useCompoundFile)
{
success = false;
segmentsInfosFileName = segmentInfos.GetCurrentSegmentFileName();
System.Collections.ArrayList filesToDelete;
StartTransaction();
try
{
filesToDelete = merger.CreateCompoundFile(mergedName + ".cfs");
info.SetUseCompoundFile(true);
commitPending = true;
success = true;
}
finally
{
if (!success)
{
//.........这里部分代码省略.........