本文整理汇总了C#中Lucene.Net.Index.IndexWriter.AddIndexesNoOptimize方法的典型用法代码示例。如果您正苦于以下问题:C# IndexWriter.AddIndexesNoOptimize方法的具体用法?C# IndexWriter.AddIndexesNoOptimize怎么用?C# IndexWriter.AddIndexesNoOptimize使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Index.IndexWriter
的用法示例。
在下文中一共展示了IndexWriter.AddIndexesNoOptimize方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: TestLucene
public virtual void TestLucene()
{
int num = 100;
Directory indexA = new MockRAMDirectory();
Directory indexB = new MockRAMDirectory();
FillIndex(indexA, 0, num);
Assert.IsFalse(VerifyIndex(indexA, 0), "Index a is invalid");
FillIndex(indexB, num, num);
Assert.IsFalse(VerifyIndex(indexB, num), "Index b is invalid");
Directory merged = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(merged, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.MergeFactor = 2;
writer.AddIndexesNoOptimize(new []{indexA, indexB});
writer.Optimize();
writer.Close();
var fail = VerifyIndex(merged, 0);
merged.Close();
Assert.IsFalse(fail, "The merged index is invalid");
}
示例2: TestNorms
public virtual void TestNorms()
{
// tmp dir
System.String tempDir = System.IO.Path.GetTempPath();
if (tempDir == null)
{
throw new System.IO.IOException("java.io.tmpdir undefined, cannot run test");
}
// test with a single index: index1
System.IO.DirectoryInfo indexDir1 = new System.IO.DirectoryInfo(System.IO.Path.Combine(tempDir, "lucenetestindex1"));
Directory dir1 = FSDirectory.Open(indexDir1);
IndexWriter.Unlock(dir1);
norms = new System.Collections.ArrayList();
modifiedNorms = new System.Collections.ArrayList();
CreateIndex(dir1);
DoTestNorms(dir1);
// test with a single index: index2
System.Collections.ArrayList norms1 = norms;
System.Collections.ArrayList modifiedNorms1 = modifiedNorms;
int numDocNorms1 = numDocNorms;
norms = new System.Collections.ArrayList();
modifiedNorms = new System.Collections.ArrayList();
numDocNorms = 0;
System.IO.DirectoryInfo indexDir2 = new System.IO.DirectoryInfo(System.IO.Path.Combine(tempDir, "lucenetestindex2"));
Directory dir2 = FSDirectory.Open(indexDir2);
CreateIndex(dir2);
DoTestNorms(dir2);
// add index1 and index2 to a third index: index3
System.IO.DirectoryInfo indexDir3 = new System.IO.DirectoryInfo(System.IO.Path.Combine(tempDir, "lucenetestindex3"));
Directory dir3 = FSDirectory.Open(indexDir3);
CreateIndex(dir3);
IndexWriter iw = new IndexWriter(dir3, anlzr, false, IndexWriter.MaxFieldLength.LIMITED);
iw.SetMaxBufferedDocs(5);
iw.MergeFactor = 3;
iw.AddIndexesNoOptimize(new Directory[]{dir1, dir2});
iw.Optimize();
iw.Close();
norms1.AddRange(norms);
norms = norms1;
modifiedNorms1.AddRange(modifiedNorms);
modifiedNorms = modifiedNorms1;
numDocNorms += numDocNorms1;
// test with index3
VerifyIndex(dir3);
DoTestNorms(dir3);
// now with optimize
iw = new IndexWriter(dir3, anlzr, false, IndexWriter.MaxFieldLength.LIMITED);
iw.SetMaxBufferedDocs(5);
iw.MergeFactor = 3;
iw.Optimize();
iw.Close();
VerifyIndex(dir3);
dir1.Close();
dir2.Close();
dir3.Close();
}
示例3: TestAddIndexOnDiskFull
//.........这里部分代码省略.........
System.Console.Out.WriteLine("\ncycle: " + testName);
dir.SetMaxSizeInBytes(thisDiskFree);
dir.SetRandomIOExceptionRate(rate, diskFree);
try
{
if (0 == method)
{
writer2.AddIndexes(dirs);
}
else if (1 == method)
{
IndexReader[] readers = new IndexReader[dirs.Length];
for (int i = 0; i < dirs.Length; i++)
{
readers[i] = IndexReader.Open(dirs[i]);
}
try
{
writer2.AddIndexes(readers);
}
finally
{
for (int i = 0; i < dirs.Length; i++)
{
readers[i].Close();
}
}
}
else
{
writer2.AddIndexesNoOptimize(dirs);
}
success = true;
if (debug)
{
System.Console.Out.WriteLine(" success!");
}
if (0 == x)
{
done = true;
}
}
catch (System.IO.IOException e)
{
success = false;
err = e;
if (debug)
{
System.Console.Out.WriteLine(" hit IOException: " + e);
System.Console.Out.WriteLine(e.StackTrace);
}
if (1 == x)
{
System.Console.Out.WriteLine(e.StackTrace);
Assert.Fail(methodName + " hit IOException after disk space was freed up");
}
}
// Make sure all threads from
// ConcurrentMergeScheduler are done
示例4: TestAddIndexes2
public virtual void TestAddIndexes2()
{
bool optimize = false;
Directory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
writer.SetInfoStream(infoStream);
// create a 2nd index
Directory dir2 = new MockRAMDirectory();
IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
writer2.SetInfoStream(infoStream);
CreateIndexNoClose(!optimize, "index2", writer2);
writer2.Close();
writer.AddIndexesNoOptimize(new Directory[]{dir2});
writer.AddIndexesNoOptimize(new Directory[]{dir2});
writer.AddIndexesNoOptimize(new Directory[]{dir2});
writer.AddIndexesNoOptimize(new Directory[]{dir2});
writer.AddIndexesNoOptimize(new Directory[]{dir2});
IndexReader r1 = writer.GetReader();
Assert.AreEqual(500, r1.MaxDoc);
r1.Close();
writer.Close();
dir1.Close();
}
示例5: TestAddIndexes
public virtual void TestAddIndexes()
{
bool optimize = false;
Directory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
writer.SetInfoStream(infoStream);
// create the index
CreateIndexNoClose(!optimize, "index1", writer);
writer.Flush(false, true, true);
// create a 2nd index
Directory dir2 = new MockRAMDirectory();
IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
writer2.SetInfoStream(infoStream);
CreateIndexNoClose(!optimize, "index2", writer2);
writer2.Close();
IndexReader r0 = writer.GetReader();
Assert.IsTrue(r0.IsCurrent());
writer.AddIndexesNoOptimize(new Directory[]{dir2});
Assert.IsFalse(r0.IsCurrent());
r0.Close();
IndexReader r1 = writer.GetReader();
Assert.IsTrue(r1.IsCurrent());
writer.Commit();
Assert.IsFalse(r1.IsCurrent());
Assert.AreEqual(200, r1.MaxDoc);
int index2df = r1.DocFreq(new Term("indexname", "index2"));
Assert.AreEqual(100, index2df);
// verify the docs are from different indexes
Document doc5 = r1.Document(5);
Assert.AreEqual("index1", doc5.Get("indexname"));
Document doc150 = r1.Document(150);
Assert.AreEqual("index2", doc150.Get("indexname"));
r1.Close();
writer.Close();
dir1.Close();
}
示例6: TestHangOnClose
public virtual void TestHangOnClose()
{
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.SetMergePolicy(new LogByteSizeMergePolicy(writer));
writer.SetMaxBufferedDocs(5);
writer.UseCompoundFile = false;
writer.MergeFactor = 100;
Document doc = new Document();
doc.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
for (int i = 0; i < 60; i++)
writer.AddDocument(doc);
writer.SetMaxBufferedDocs(200);
Document doc2 = new Document();
doc2.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO));
doc2.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO));
doc2.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO));
doc2.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO));
for (int i = 0; i < 10; i++)
writer.AddDocument(doc2);
writer.Close();
Directory dir2 = new MockRAMDirectory();
writer = new IndexWriter(dir2, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy(writer);
lmp.MinMergeMB = 0.0001;
writer.SetMergePolicy(lmp);
writer.MergeFactor = 4;
writer.UseCompoundFile = false;
writer.SetMergeScheduler(new SerialMergeScheduler());
writer.AddIndexesNoOptimize(new Directory[]{dir});
writer.Close();
dir.Close();
dir2.Close();
}
示例7: TestDuringAddIndexes_LuceneNet
public virtual void TestDuringAddIndexes_LuceneNet()
{
MockRAMDirectory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
writer.SetInfoStream(infoStream);
writer.SetMergeFactor(2);
// create the index
CreateIndexNoClose(false, "test", writer);
writer.Commit();
Directory[] dirs = new Directory[10];
for (int i = 0; i < 10; i++)
{
dirs[i] = new MockRAMDirectory(dir1);
}
IndexReader r = writer.GetReader();
int NUM_THREAD = 5;
float SECONDS = 3;
long endTime = (long)((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) + 1000.0 * SECONDS);
System.Collections.IList excs = (System.Collections.IList)System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(new System.Collections.ArrayList()));
System.Threading.Thread[] threads = new System.Threading.Thread[NUM_THREAD];
for (int i = 0; i < NUM_THREAD; i++)
{
threads[i] = new System.Threading.Thread(() =>
{
while ((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) < endTime)
{
try
{
writer.AddIndexesNoOptimize(dirs);
}
catch (System.Exception t)
{
excs.Add(t);
throw new System.SystemException("", t);
}
}
});
threads[i].IsBackground = true;
threads[i].Start();
}
int lastCount = 0;
while ((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) < endTime)
{
using (IndexReader r2 = writer.GetReader())
{
Query q = new TermQuery(new Term("indexname", "test"));
int count = new IndexSearcher(r2).Search(q, 10).TotalHits;
Assert.IsTrue(count >= lastCount);
lastCount = count;
}
}
for (int i = 0; i < NUM_THREAD; i++)
{
threads[i].Join();
}
Assert.AreEqual(0, excs.Count);
r.Close();
Assert.AreEqual(0, dir1.GetOpenDeletedFiles().Count);
writer.Close();
_TestUtil.CheckIndex(dir1);
dir1.Close();
}
示例8: TestDuringAddIndexes
public virtual void TestDuringAddIndexes()
{
MockRAMDirectory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
writer.SetInfoStream(infoStream);
writer.SetMergeFactor(2);
// create the index
CreateIndexNoClose(false, "test", writer);
writer.Commit();
Directory[] dirs = new Directory[10];
for (int i = 0; i < 10; i++)
{
dirs[i] = new MockRAMDirectory(dir1);
}
IndexReader r = writer.GetReader();
int NUM_THREAD = 5;
float SECONDS = 3;
long endTime = (long) ((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) + 1000.0 * SECONDS);
System.Collections.IList excs = (System.Collections.IList) System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(new System.Collections.ArrayList()));
System.Threading.Thread[] threads = new System.Threading.Thread[NUM_THREAD];
for (int i = 0; i < NUM_THREAD; i++)
{
threads[i] = new System.Threading.Thread(() =>
{
while ((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) < endTime)
{
try
{
writer.AddIndexesNoOptimize(dirs);
}
catch (System.Exception t)
{
excs.Add(t);
throw new System.SystemException("", t);
}
}
});
threads[i].IsBackground = true;
threads[i].Start();
}
int lastCount = 0;
while ((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) < endTime)
{
IndexReader r2 = r.Reopen();
if (r2 != r)
{
r.Close();
r = r2;
}
Query q = new TermQuery(new Term("indexname", "test"));
int count = new IndexSearcher(r).Search(q, 10).TotalHits;
Assert.IsTrue(count >= lastCount);
lastCount = count;
}
for (int i = 0; i < NUM_THREAD; i++)
{
threads[i].Join();
}
Assert.AreEqual(0, excs.Count);
r.Close();
try
{
Assert.AreEqual(0, dir1.GetOpenDeletedFiles().Count);
}
catch
{
//DIGY:
//I think this is an expected behaviour.
//There isn't any pending files to be deleted after "writer.Close()".
//But, since lucene.java's test case is designed that way
//and I might be wrong, I will add a warning
Assert.Inconclusive("Is this really a bug?", 0, dir1.GetOpenDeletedFiles().Count);
}
writer.Close();
_TestUtil.CheckIndex(dir1);
dir1.Close();
}
示例9: TestHangOnClose
public void TestHangOnClose()
{
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, false, new WhitespaceAnalyzer(), true);
writer.SetMergePolicy(new LogByteSizeMergePolicy());
writer.SetMaxBufferedDocs(5);
writer.SetUseCompoundFile(false);
writer.SetMergeFactor(100);
Document doc = new Document();
doc.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO));
for (int i = 0; i < 60; i++)
writer.AddDocument(doc);
writer.SetMaxBufferedDocs(200);
Document doc2 = new Document();
doc2.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO));
doc2.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO));
doc2.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO));
doc2.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO));
for (int i = 0; i < 10; i++)
writer.AddDocument(doc2);
writer.Close();
Directory dir2 = new MockRAMDirectory();
writer = new IndexWriter(dir2, false, new WhitespaceAnalyzer(), true);
LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy();
lmp.SetMinMergeMB(0.0001);
writer.SetMergePolicy(lmp);
writer.SetMergeFactor(4);
writer.SetUseCompoundFile(false);
writer.SetMergeScheduler(new SerialMergeScheduler());
writer.AddIndexesNoOptimize(new Directory[] { dir });
writer.Close();
dir.Close();
dir2.Close();
}
示例10: SetUp
public override void SetUp()
{
base.SetUp();
RAMDirectory directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < docFields.Length; i++)
{
Document document = new Document();
document.Add(new Field(field, docFields[i], Field.Store.NO, Field.Index.ANALYZED));
writer.AddDocument(document);
}
writer.Close();
searcher = new IndexSearcher(directory, true);
// Make big index
dir2 = new MockRAMDirectory(directory);
// First multiply small test index:
mulFactor = 1;
int docCount = 0;
do
{
Directory copy = new RAMDirectory(dir2);
IndexWriter indexWriter = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
indexWriter.AddIndexesNoOptimize(new[] {copy});
docCount = indexWriter.MaxDoc();
indexWriter.Close();
mulFactor *= 2;
} while (docCount < 3000);
IndexWriter w = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
Document doc = new Document();
doc.Add(new Field("field2", "xxx", Field.Store.NO, Field.Index.ANALYZED));
for (int i = 0; i < NUM_EXTRA_DOCS / 2; i++)
{
w.AddDocument(doc);
}
doc = new Document();
doc.Add(new Field("field2", "big bad bug", Field.Store.NO, Field.Index.ANALYZED));
for (int i = 0; i < NUM_EXTRA_DOCS / 2; i++)
{
w.AddDocument(doc);
}
// optimize to 1 segment
w.Optimize();
reader = w.GetReader();
w.Close();
bigSearcher = new IndexSearcher(reader);
}
示例11: MoveIndexFiles
private static void MoveIndexFiles(string impDomain, string impUser, string impPass, string destIndexPath, string tempIndexPath, Analyzer analyzer)
{
Directory[] readers = new Directory[1];
IndexWriter writer = null;
Directory finalIndexDir = FSDirectory.Open(new System.IO.DirectoryInfo(destIndexPath));
System.IO.DirectoryInfo tempIndexDir = new System.IO.DirectoryInfo(tempIndexPath);
try
{
if (IndexWriter.IsLocked(finalIndexDir)) IndexWriter.Unlock(finalIndexDir);
// re-generate the index
writer = new IndexWriter(finalIndexDir, analyzer, true, new IndexWriter.MaxFieldLength(2500000));
readers[0] = FSDirectory.Open(tempIndexDir);
writer.AddIndexesNoOptimize(readers);
// optimize and close
if (writer != null)
{
try
{
writer.Optimize();
}
catch { }
}
if (writer != null)
{
try
{
writer.Close();
}
catch { }
}
}
catch (Exception ex)
{
if (writer != null)
{
writer.Optimize();
writer.Commit();
writer.Close();
}
throw ex;
}
}
示例12: CreateIndexAsync
//.........这里部分代码省略.........
int bytesUsed = 0;
int charsUsed = 0;
bool completed = false;
// Convert the text to UTF8
utf8.Convert(blockBuf, 0, (int)loadedLength, charBuf, 0, charBuf.Length, i == totalBlocks - 1, out bytesUsed, out charsUsed, out completed);
if (!completed)
{
throw new Exception(Properties.Resources.UTFDecoderError);
}
// Construct a current string
sb.Length = 0;
if (charCarryOver.Length > 0)
{
sb.Append(charCarryOver);
}
sb.Append(charBuf, 0, charsUsed);
int carryOverLength = charCarryOver.Length;
int charsMatched = IndexString(sb.ToString(), beginnings[i], ends[i], carryOverLength, i == totalBlocks - 1);
// There's a Wiki topic carryover, let's store the characters which need to be carried over
if (charsMatched > 0)
{
charCarryOver = new char[charsMatched];
sb.CopyTo(charsUsed + carryOverLength - charsMatched, charCarryOver, 0, charsMatched);
}
else
{
charCarryOver = new char[0];
}
#endregion
}
// Wait till all the threads finish
while (activeThreads != 0)
{
ReportProgress(0, IndexingProgress.State.Running, String.Format(Properties.Resources.WaitingForTokenizers, activeThreads));
Thread.Sleep(TimeSpan.FromSeconds(5));
}
ReportProgress(0, IndexingProgress.State.Running, Properties.Resources.FlushingDocumentsToDisk);
Lucene.Net.Store.Directory dir = memoryIndexer.GetDirectory();
memoryIndexer.Close();
indexer.AddIndexesNoOptimize(new Lucene.Net.Store.Directory[] { dir });
memoryIndexer = null;
ReportProgress(0, IndexingProgress.State.Running, Properties.Resources.OptimizingIndex);
indexer.Optimize();
indexExists = true;
}
catch (Exception ex)
{
ReportProgress(0, IndexingProgress.State.Failure, ex.ToString());
failed = true;
}
// Try to release some memory
if (indexer != null)
{
indexer.Close();
indexer = null;
}
if (failed ||
abortIndexing)
{
Directory.Delete(indexPath, true);
indexExists = false;
}
else
{
if (indexExists)
{
FSDirectory idxDir = FSDirectory.Open(new DirectoryInfo(indexPath));
searcher = new IndexSearcher(idxDir, true);
}
}
ReportProgress(0, IndexingProgress.State.Finished, String.Empty);
}