本文整理汇总了C#中Lucene.Net.Index.IndexWriter.AddIndexes方法的典型用法代码示例。如果您正苦于以下问题:C# IndexWriter.AddIndexes方法的具体用法?C# IndexWriter.AddIndexes怎么用?C# IndexWriter.AddIndexes使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Index.IndexWriter
的用法示例。
在下文中一共展示了IndexWriter.AddIndexes方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: TestLucene
public virtual void TestLucene()
{
int num = 100;
Directory indexA = new MockRAMDirectory();
Directory indexB = new MockRAMDirectory();
FillIndex(indexA, 0, num);
bool fail = VerifyIndex(indexA, 0);
if (fail)
{
Assert.Fail("Index a is invalid");
}
FillIndex(indexB, num, num);
fail = VerifyIndex(indexB, num);
if (fail)
{
Assert.Fail("Index b is invalid");
}
Directory merged = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(merged, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.SetMergeFactor(2);
writer.AddIndexes(new Directory[]{indexA, indexB});
writer.Close();
fail = VerifyIndex(merged, 0);
merged.Close();
Assert.IsFalse(fail, "The merged index is invalid");
}
示例2: Main
public static void Main(string[] args)
{
if (args.Length < 3)
{
Console.Error.WriteLine("Usage: IndexMergeTool <mergedIndex> <index1> <index2> [index3] ...");
Environment.Exit(1);
}
FSDirectory mergedIndex = FSDirectory.Open(new System.IO.DirectoryInfo(args[0]));
#pragma warning disable 612, 618
using (IndexWriter writer = new IndexWriter(mergedIndex, new IndexWriterConfig(LuceneVersion.LUCENE_CURRENT, null)
.SetOpenMode(IndexWriterConfig.OpenMode_e.CREATE)))
#pragma warning restore 612, 618
{
Directory[] indexes = new Directory[args.Length - 1];
for (int i = 1; i < args.Length; i++)
{
indexes[i - 1] = FSDirectory.Open(new System.IO.DirectoryInfo(args[i]));
}
Console.WriteLine("Merging...");
writer.AddIndexes(indexes);
Console.WriteLine("Full merge...");
writer.ForceMerge(1);
}
Console.WriteLine("Done.");
}
示例3: TestLucene
public virtual void TestLucene()
{
int num = 100;
Directory indexA = NewDirectory();
Directory indexB = NewDirectory();
FillIndex(Random(), indexA, 0, num);
bool fail = VerifyIndex(indexA, 0);
if (fail)
{
Assert.Fail("Index a is invalid");
}
FillIndex(Random(), indexB, num, num);
fail = VerifyIndex(indexB, num);
if (fail)
{
Assert.Fail("Index b is invalid");
}
Directory merged = NewDirectory();
IndexWriter writer = new IndexWriter(merged, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy(2)));
writer.AddIndexes(indexA, indexB);
writer.ForceMerge(1);
writer.Dispose();
fail = VerifyIndex(merged, 0);
Assert.IsFalse(fail, "The merged index is invalid");
indexA.Dispose();
indexB.Dispose();
merged.Dispose();
}
示例4: hb
public void hb()
{
Directory d = FSDirectory.GetDirectory(this.textBox1.Text, false);
Directory directory = FSDirectory.GetDirectory(this.textBox2.Text, false);
IndexWriter writer = new IndexWriter(d, new KTDictSegAnalyzer(), false);
writer.AddIndexes(new Directory[] { directory });
writer.Close();
}
示例5: TestFilterIndexReader
public virtual void TestFilterIndexReader()
{
Directory directory = NewDirectory();
IndexWriter writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
Document d1 = new Document();
d1.Add(NewTextField("default", "one two", Field.Store.YES));
writer.AddDocument(d1);
Document d2 = new Document();
d2.Add(NewTextField("default", "one three", Field.Store.YES));
writer.AddDocument(d2);
Document d3 = new Document();
d3.Add(NewTextField("default", "two four", Field.Store.YES));
writer.AddDocument(d3);
writer.Dispose();
Directory target = NewDirectory();
// We mess with the postings so this can fail:
((BaseDirectoryWrapper)target).CrossCheckTermVectorsOnClose = false;
writer = new IndexWriter(target, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
IndexReader reader = new TestReader(DirectoryReader.Open(directory));
writer.AddIndexes(reader);
writer.Dispose();
reader.Dispose();
reader = DirectoryReader.Open(target);
TermsEnum terms = MultiFields.GetTerms(reader, "default").Iterator(null);
while (terms.Next() != null)
{
Assert.IsTrue(terms.Term().Utf8ToString().IndexOf('e') != -1);
}
Assert.AreEqual(TermsEnum.SeekStatus.FOUND, terms.SeekCeil(new BytesRef("one")));
DocsAndPositionsEnum positions = terms.DocsAndPositions(MultiFields.GetLiveDocs(reader), null);
while (positions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
{
Assert.IsTrue((positions.DocID() % 2) == 1);
}
reader.Dispose();
directory.Dispose();
target.Dispose();
}
示例6: TestEmptyIndex
public virtual void TestEmptyIndex()
{
Directory rd1 = NewDirectory();
IndexWriter iw = new IndexWriter(rd1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
iw.Dispose();
// create a copy:
Directory rd2 = NewDirectory(rd1);
Directory rdOut = NewDirectory();
IndexWriter iwOut = new IndexWriter(rdOut, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
ParallelAtomicReader apr = new ParallelAtomicReader(SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(rd1)), SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(rd2)));
// When unpatched, Lucene crashes here with a NoSuchElementException (caused by ParallelTermEnum)
iwOut.AddIndexes(apr);
iwOut.ForceMerge(1);
// 2nd try with a readerless parallel reader
iwOut.AddIndexes(new ParallelAtomicReader());
iwOut.ForceMerge(1);
ParallelCompositeReader cpr = new ParallelCompositeReader(DirectoryReader.Open(rd1), DirectoryReader.Open(rd2));
// When unpatched, Lucene crashes here with a NoSuchElementException (caused by ParallelTermEnum)
iwOut.AddIndexes(cpr);
iwOut.ForceMerge(1);
// 2nd try with a readerless parallel reader
iwOut.AddIndexes(new ParallelCompositeReader());
iwOut.ForceMerge(1);
iwOut.Dispose();
rdOut.Dispose();
rd1.Dispose();
rd2.Dispose();
}
示例7: button1_Click
private void button1_Click(object sender, EventArgs e)
{
//FilterData.PrepareCharMap();
int total = this.databaseDataSet.trans.Count;
int counter = 1;
string fsPath = indexpath;
if (!System.IO.Directory.Exists(fsPath)) System.IO.Directory.CreateDirectory(fsPath);
if (IndexReader.IndexExists(fsPath)) return;
RAMDirectory dir = new RAMDirectory();
IndexWriter ramWriter = new IndexWriter(dir, new DiacriticAnalyzer(FilterData.stopWords), true);
IndexWriter fsWriter = new IndexWriter(fsPath, new DiacriticAnalyzer(FilterData.stopWords), !IndexReader.IndexExists(fsPath));
ramWriter.SetUseCompoundFile(false);
fsWriter.SetUseCompoundFile(false);
foreach (DataRow row in this.databaseDataSet.trans.Rows)
{
Document doc = new Document();
string pid = row[this.databaseDataSet.trans.pidColumn].ToString();
string sid = row[this.databaseDataSet.trans.sidColumn].ToString();
string ayatno = row[this.databaseDataSet.trans.ayatnoColumn].ToString();
string arabic = row[this.databaseDataSet.trans.ayat_arabicColumn].ToString();
string urdu = row[this.databaseDataSet.trans.ayat_urduColumn].ToString();
string english = row[this.databaseDataSet.trans.ayat_descColumn].ToString();
doc.Add(Field.Keyword("pid", pid));
doc.Add(Field.Keyword("sid", sid));
doc.Add(Field.Keyword("ayatno", ayatno));
doc.Add(Field.Text("ayat_desc", english));
doc.Add(Field.Text("ayat_arabic", arabic));
doc.Add(Field.Text("ayat_urdu", urdu));
doc.Add(Field.Text("contents", arabic + Environment.NewLine + urdu + Environment.NewLine + english));
ramWriter.AddDocument(doc);
int percent = counter * 100 / total;
this.progressBar1.Value = percent;
label1.Text = percent.ToString() + "%";
counter++;
Application.DoEvents();
}
ramWriter.Optimize();
fsWriter.AddIndexes(new Lucene.Net.Store.Directory[] { dir });
ramWriter.Close();
fsWriter.Close();
MessageBox.Show("Done Indexing!");
}
示例8: TestAddIndexes
public virtual void TestAddIndexes()
{
Directory dir1 = NewDirectory();
Directory dir2 = NewDirectory();
IndexWriter writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NoMergePolicy.COMPOUND_FILES));
Document d1 = new Document();
d1.Add(new TextField("f1", "first field", Field.Store.YES));
d1.Add(new TextField("f2", "second field", Field.Store.YES));
writer.AddDocument(d1);
writer.Dispose();
writer = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NoMergePolicy.COMPOUND_FILES));
Document d2 = new Document();
FieldType customType2 = new FieldType(TextField.TYPE_STORED);
customType2.StoreTermVectors = true;
d2.Add(new TextField("f2", "second field", Field.Store.YES));
d2.Add(new Field("f1", "first field", customType2));
d2.Add(new TextField("f3", "third field", Field.Store.YES));
d2.Add(new TextField("f4", "fourth field", Field.Store.YES));
writer.AddDocument(d2);
writer.Dispose();
writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NoMergePolicy.COMPOUND_FILES));
writer.AddIndexes(dir2);
writer.Dispose();
SegmentInfos sis = new SegmentInfos();
sis.Read(dir1);
Assert.AreEqual(2, sis.Size());
FieldInfos fis1 = SegmentReader.ReadFieldInfos(sis.Info(0));
FieldInfos fis2 = SegmentReader.ReadFieldInfos(sis.Info(1));
Assert.AreEqual("f1", fis1.FieldInfo(0).Name);
Assert.AreEqual("f2", fis1.FieldInfo(1).Name);
// make sure the ordering of the "external" segment is preserved
Assert.AreEqual("f2", fis2.FieldInfo(0).Name);
Assert.AreEqual("f1", fis2.FieldInfo(1).Name);
Assert.AreEqual("f3", fis2.FieldInfo(2).Name);
Assert.AreEqual("f4", fis2.FieldInfo(3).Name);
dir1.Dispose();
dir2.Dispose();
}
示例9: Syncronize
public static void Syncronize(DirectoryInfo sessionDirectory, IDocumentFileNameProvider fileNameProvider)
{
using (Mutex)
{
using (var fsDirectory = FSDirectory.Open(fileNameProvider.GetLuceneDirectory()))
{
using (var writer = new IndexWriter(fsDirectory, Analyser, IndexWriter.MaxFieldLength.UNLIMITED))
{
using (var readerDirectory = FSDirectory.Open(sessionDirectory))
{
using (var reader = IndexReader.Open(readerDirectory, true))
{
writer.AddIndexes(reader);
}
}
}
}
}
}
示例10: Act
public override void Act()
{
var indexDirectory = Path.Combine(CoreConfiguration.DataDirectory, "MergedIndexes");
var directoryInfo = new DirectoryInfo(indexDirectory);
if(directoryInfo.Exists)
{
directoryInfo.Delete(true);
directoryInfo.Refresh();
}
var mergedDirectory = new SimpleFSDirectory(directoryInfo);
var mergedIndex = new IndexWriter(mergedDirectory, new SimpleAnalyzer(), true,
IndexWriter.MaxFieldLength.UNLIMITED);
var directoryFactory = AutofacContainer.Resolve<IDirectoryFactory>();
mergedIndex.AddIndexes(directoryFactory.GetAllDirectories().Select(d => IndexReader.Open(d, true)).ToArray());
mergedIndex.Commit();
}
示例11: TestNorms_Renamed
public virtual void TestNorms_Renamed()
{
// tmp dir
System.String tempDir = System.IO.Path.GetTempPath();
if (tempDir == null)
{
throw new System.IO.IOException("java.io.tmpdir undefined, cannot run test");
}
// test with a single index: index1
System.IO.FileInfo indexDir1 = new System.IO.FileInfo(System.IO.Path.Combine(tempDir, "lucenetestindex1"));
Directory dir1 = FSDirectory.Open(indexDir1);
norms = new System.Collections.ArrayList();
modifiedNorms = new System.Collections.ArrayList();
CreateIndex(dir1);
DoTestNorms(dir1);
// test with a single index: index2
System.Collections.ArrayList norms1 = norms;
System.Collections.ArrayList modifiedNorms1 = modifiedNorms;
int numDocNorms1 = numDocNorms;
norms = new System.Collections.ArrayList();
modifiedNorms = new System.Collections.ArrayList();
numDocNorms = 0;
System.IO.FileInfo indexDir2 = new System.IO.FileInfo(System.IO.Path.Combine(tempDir, "lucenetestindex2"));
Directory dir2 = FSDirectory.Open(indexDir2);
CreateIndex(dir2);
DoTestNorms(dir2);
// add index1 and index2 to a third index: index3
System.IO.FileInfo indexDir3 = new System.IO.FileInfo(System.IO.Path.Combine(tempDir, "lucenetestindex3"));
Directory dir3 = FSDirectory.Open(indexDir3);
CreateIndex(dir3);
IndexWriter iw = new IndexWriter(dir3, anlzr, false, IndexWriter.MaxFieldLength.LIMITED);
iw.SetMaxBufferedDocs(5);
iw.SetMergeFactor(3);
iw.AddIndexes(new Directory[]{dir1, dir2});
iw.Close();
norms1.AddRange(norms);
norms = norms1;
modifiedNorms1.AddRange(modifiedNorms);
modifiedNorms = modifiedNorms1;
numDocNorms += numDocNorms1;
// test with index3
VerifyIndex(dir3);
DoTestNorms(dir3);
// now with optimize
iw = new IndexWriter(dir3, anlzr, false, IndexWriter.MaxFieldLength.LIMITED);
iw.SetMaxBufferedDocs(5);
iw.SetMergeFactor(3);
iw.Optimize();
iw.Close();
VerifyIndex(dir3);
dir1.Close();
dir2.Close();
dir3.Close();
}
示例12: TestAddIndexes
public virtual void TestAddIndexes()
{
Directory dir1 = NewDirectory();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
IndexWriter writer = new IndexWriter(dir1, conf);
int numDocs = AtLeast(50);
int numTerms = TestUtil.NextInt(Random(), 1, numDocs / 5);
HashSet<string> randomTerms = new HashSet<string>();
while (randomTerms.Count < numTerms)
{
randomTerms.Add(TestUtil.RandomSimpleString(Random()));
}
// create first index
for (int i = 0; i < numDocs; i++)
{
Document doc = new Document();
doc.Add(new StringField("id", RandomInts.RandomFrom(Random(), randomTerms), Store.NO));
doc.Add(new NumericDocValuesField("ndv", 4L));
doc.Add(new NumericDocValuesField("control", 8L));
writer.AddDocument(doc);
}
if (Random().NextBoolean())
{
writer.Commit();
}
// update some docs to a random value
long value = Random().Next();
Term term = new Term("id", RandomInts.RandomFrom(Random(), randomTerms));
writer.UpdateNumericDocValue(term, "ndv", value);
writer.UpdateNumericDocValue(term, "control", value * 2);
writer.Dispose();
Directory dir2 = NewDirectory();
conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
writer = new IndexWriter(dir2, conf);
if (Random().NextBoolean())
{
writer.AddIndexes(dir1);
}
else
{
DirectoryReader reader = DirectoryReader.Open(dir1);
writer.AddIndexes(reader);
reader.Dispose();
}
writer.Dispose();
DirectoryReader reader_ = DirectoryReader.Open(dir2);
foreach (AtomicReaderContext context in reader_.Leaves)
{
AtomicReader r = context.AtomicReader;
NumericDocValues ndv = r.GetNumericDocValues("ndv");
NumericDocValues control = r.GetNumericDocValues("control");
for (int i = 0; i < r.MaxDoc; i++)
{
Assert.AreEqual(ndv.Get(i) * 2, control.Get(i));
}
}
reader_.Dispose();
IOUtils.Close(dir1, dir2);
}
示例13: TestAddIndexes2
public virtual void TestAddIndexes2()
{
bool doFullMerge = false;
Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
IndexWriter writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
// create a 2nd index
Directory dir2 = NewDirectory();
IndexWriter writer2 = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
CreateIndexNoClose(!doFullMerge, "index2", writer2);
writer2.Dispose();
writer.AddIndexes(dir2);
writer.AddIndexes(dir2);
writer.AddIndexes(dir2);
writer.AddIndexes(dir2);
writer.AddIndexes(dir2);
IndexReader r1 = writer.Reader;
Assert.AreEqual(500, r1.MaxDoc());
r1.Dispose();
writer.Dispose();
dir1.Dispose();
dir2.Dispose();
}
示例14: TestHangOnClose
public virtual void TestHangOnClose()
{
Directory dir = NewDirectory();
LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy();
lmp.NoCFSRatio = 0.0;
lmp.MergeFactor = 100;
IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(5).SetMergePolicy(lmp));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.StoreTermVectors = true;
customType.StoreTermVectorPositions = true;
customType.StoreTermVectorOffsets = true;
doc.Add(NewField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType));
for (int i = 0; i < 60; i++)
{
writer.AddDocument(doc);
}
Document doc2 = new Document();
FieldType customType2 = new FieldType();
customType2.Stored = true;
doc2.Add(NewField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType2));
doc2.Add(NewField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType2));
doc2.Add(NewField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType2));
doc2.Add(NewField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType2));
for (int i = 0; i < 10; i++)
{
writer.AddDocument(doc2);
}
writer.Dispose();
Directory dir2 = NewDirectory();
lmp = new LogByteSizeMergePolicy();
lmp.MinMergeMB = 0.0001;
lmp.NoCFSRatio = 0.0;
lmp.MergeFactor = 4;
writer = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(lmp));
writer.AddIndexes(dir);
writer.Dispose();
dir.Dispose();
dir2.Dispose();
}
示例15: TestMoreMerges
public virtual void TestMoreMerges()
{
// main directory
Directory dir = NewDirectory();
// auxiliary directory
Directory aux = NewDirectory();
Directory aux2 = NewDirectory();
SetUpDirs(dir, aux, true);
IndexWriter writer = NewWriter(aux2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.CREATE).SetMaxBufferedDocs(100).SetMergePolicy(NewLogMergePolicy(10)));
writer.AddIndexes(aux);
Assert.AreEqual(30, writer.MaxDoc);
Assert.AreEqual(3, writer.SegmentCount);
writer.Dispose();
IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
writer = new IndexWriter(aux, dontMergeConfig);
for (int i = 0; i < 27; i++)
{
writer.DeleteDocuments(new Term("id", "" + i));
}
writer.Dispose();
IndexReader reader = DirectoryReader.Open(aux);
Assert.AreEqual(3, reader.NumDocs);
reader.Dispose();
dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
writer = new IndexWriter(aux2, dontMergeConfig);
for (int i = 0; i < 8; i++)
{
writer.DeleteDocuments(new Term("id", "" + i));
}
writer.Dispose();
reader = DirectoryReader.Open(aux2);
Assert.AreEqual(22, reader.NumDocs);
reader.Dispose();
writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.APPEND).SetMaxBufferedDocs(6).SetMergePolicy(NewLogMergePolicy(4)));
writer.AddIndexes(aux, aux2);
Assert.AreEqual(1040, writer.MaxDoc);
Assert.AreEqual(1000, writer.GetDocCount(0));
writer.Dispose();
dir.Dispose();
aux.Dispose();
aux2.Dispose();
}