当前位置: 首页>>代码示例>>C#>>正文


C# IndexWriter.ForceMerge方法代码示例

本文整理汇总了C#中Lucene.Net.Index.IndexWriter.ForceMerge方法的典型用法代码示例。如果您正苦于以下问题:C# IndexWriter.ForceMerge方法的具体用法?C# IndexWriter.ForceMerge怎么用?C# IndexWriter.ForceMerge使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在Lucene.Net.Index.IndexWriter的用法示例。


在下文中一共展示了IndexWriter.ForceMerge方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: TestDateCompression

        public virtual void TestDateCompression()
        {
            Directory dir = new RAMDirectory();
            IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
            IndexWriter iwriter = new IndexWriter(dir, iwc);

            const long @base = 13; // prime
            long day = 1000L * 60 * 60 * 24;

            Document doc = new Document();
            NumericDocValuesField dvf = new NumericDocValuesField("dv", 0);
            doc.Add(dvf);
            for (int i = 0; i < 300; ++i)
            {
                dvf.LongValue = @base + Random().Next(1000) * day;
                iwriter.AddDocument(doc);
            }
            iwriter.ForceMerge(1);
            long size1 = DirSize(dir);
            for (int i = 0; i < 50; ++i)
            {
                dvf.LongValue = @base + Random().Next(1000) * day;
                iwriter.AddDocument(doc);
            }
            iwriter.ForceMerge(1);
            long size2 = DirSize(dir);
            // make sure the new longs costed less than if they had only been packed
            Assert.IsTrue(size2 < size1 + (PackedInts.BitsRequired(day) * 50) / 8);
        }
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:29,代码来源:BaseCompressingDocValuesFormatTestCase.cs

示例2: Main

        public static void Main(string[] args)
        {
            if (args.Length < 3)
            {
                Console.Error.WriteLine("Usage: IndexMergeTool <mergedIndex> <index1> <index2> [index3] ...");
                Environment.Exit(1);
            }
            FSDirectory mergedIndex = FSDirectory.Open(new System.IO.DirectoryInfo(args[0]));

#pragma warning disable 612, 618
            using (IndexWriter writer = new IndexWriter(mergedIndex, new IndexWriterConfig(LuceneVersion.LUCENE_CURRENT, null)
               .SetOpenMode(IndexWriterConfig.OpenMode_e.CREATE)))
#pragma warning restore 612, 618
            {

                Directory[] indexes = new Directory[args.Length - 1];
                for (int i = 1; i < args.Length; i++)
                {
                    indexes[i - 1] = FSDirectory.Open(new System.IO.DirectoryInfo(args[i]));
                }

                Console.WriteLine("Merging...");
                writer.AddIndexes(indexes);

                Console.WriteLine("Full merge...");
                writer.ForceMerge(1);
            }
            Console.WriteLine("Done.");
        }
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:29,代码来源:IndexMergeTool.cs

示例3: TestAllSegmentsLarge

        public virtual void TestAllSegmentsLarge()
        {
            Directory dir = new RAMDirectory();

            IndexWriterConfig conf = NewWriterConfig();
            IndexWriter writer = new IndexWriter(dir, conf);

            AddDocs(writer, 3);
            AddDocs(writer, 3);
            AddDocs(writer, 3);

            writer.Dispose();

            conf = NewWriterConfig();
            LogMergePolicy lmp = new LogDocMergePolicy();
            lmp.MaxMergeDocs = 2;
            conf.SetMergePolicy(lmp);

            writer = new IndexWriter(dir, conf);
            writer.ForceMerge(1);
            writer.Dispose();

            SegmentInfos sis = new SegmentInfos();
            sis.Read(dir);
            Assert.AreEqual(3, sis.Size());
        }
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:26,代码来源:TestSizeBoundedForceMerge.cs

示例4: TestTermEnum

        public virtual void TestTermEnum()
        {
            IndexWriter writer = null;

            writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));

            // ADD 100 documents with term : aaa
            // add 100 documents with terms: aaa bbb
            // Therefore, term 'aaa' has document frequency of 200 and term 'bbb' 100
            for (int i = 0; i < 100; i++)
            {
                AddDoc(writer, "aaa");
                AddDoc(writer, "aaa bbb");
            }

            writer.Dispose();

            // verify document frequency of terms in an multi segment index
            VerifyDocFreq();

            // merge segments
            writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.APPEND));
            writer.ForceMerge(1);
            writer.Dispose();

            // verify document frequency of terms in a single segment index
            VerifyDocFreq();
        }
开发者ID:joyanta,项目名称:lucene.net,代码行数:28,代码来源:TestSegmentTermEnum.cs

示例5: TestForceMergeNotNeeded

 public virtual void TestForceMergeNotNeeded()
 {
     Directory dir = NewDirectory();
     AtomicBoolean mayMerge = new AtomicBoolean(true);
     MergeScheduler mergeScheduler = new SerialMergeSchedulerAnonymousInnerClassHelper(this, mayMerge);
     IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergeScheduler(mergeScheduler).SetMergePolicy(MergePolicy()));
     writer.Config.MergePolicy.NoCFSRatio = Random().NextBoolean() ? 0 : 1;
     int numSegments = TestUtil.NextInt(Random(), 2, 20);
     for (int i = 0; i < numSegments; ++i)
     {
         int numDocs = TestUtil.NextInt(Random(), 1, 5);
         for (int j = 0; j < numDocs; ++j)
         {
             writer.AddDocument(new Document());
         }
         writer.Reader.Dispose();
     }
     for (int i = 5; i >= 0; --i)
     {
         int segmentCount = writer.SegmentCount;
         int maxNumSegments = i == 0 ? 1 : TestUtil.NextInt(Random(), 1, 10);
         mayMerge.Set(segmentCount > maxNumSegments);
         writer.ForceMerge(maxNumSegments);
     }
     writer.Dispose();
     dir.Dispose();
 }
开发者ID:WakeflyCBass,项目名称:lucenenet,代码行数:27,代码来源:BaseMergePolicyTestCase.cs

示例6: TestLucene

        public virtual void TestLucene()
        {
            int num = 100;

            Directory indexA = NewDirectory();
            Directory indexB = NewDirectory();

            FillIndex(Random(), indexA, 0, num);
            bool fail = VerifyIndex(indexA, 0);
            if (fail)
            {
                Assert.Fail("Index a is invalid");
            }

            FillIndex(Random(), indexB, num, num);
            fail = VerifyIndex(indexB, num);
            if (fail)
            {
                Assert.Fail("Index b is invalid");
            }

            Directory merged = NewDirectory();

            IndexWriter writer = new IndexWriter(merged, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy(2)));
            writer.AddIndexes(indexA, indexB);
            writer.ForceMerge(1);
            writer.Dispose();

            fail = VerifyIndex(merged, 0);

            Assert.IsFalse(fail, "The merged index is invalid");
            indexA.Dispose();
            indexB.Dispose();
            merged.Dispose();
        }
开发者ID:joyanta,项目名称:lucene.net,代码行数:35,代码来源:TestIndexWriterMerging.cs

示例7: SetUp

        public override void SetUp()
        {
            base.SetUp();
            store = NewDirectory();
            IndexWriter writer = new IndexWriter(store, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)));

            Document doc;

            doc = new Document();
            doc.Add(NewTextField("aaa", "foo", Field.Store.YES));
            writer.AddDocument(doc);

            doc = new Document();
            doc.Add(NewTextField("aaa", "foo", Field.Store.YES));
            writer.AddDocument(doc);

            doc = new Document();
            doc.Add(NewTextField("contents", "Tom", Field.Store.YES));
            writer.AddDocument(doc);

            doc = new Document();
            doc.Add(NewTextField("contents", "Jerry", Field.Store.YES));
            writer.AddDocument(doc);

            doc = new Document();
            doc.Add(NewTextField("zzz", "bar", Field.Store.YES));
            writer.AddDocument(doc);

            writer.ForceMerge(1);
            writer.Dispose();
        }
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:31,代码来源:TestLuceneDictionary.cs

示例8: TestFixedBinary

        public virtual void TestFixedBinary()
        {
            BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BFixedBinary"));
            if (dir is MockDirectoryWrapper)
            {
                ((MockDirectoryWrapper)dir).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
            }

            IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
               .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetRAMBufferSizeMB(256.0).SetMergeScheduler(new ConcurrentMergeScheduler()).SetMergePolicy(NewLogMergePolicy(false, 10)).SetOpenMode(IndexWriterConfig.OpenMode_e.CREATE));

            Document doc = new Document();
            var bytes = new byte[4];
            BytesRef data = new BytesRef(bytes);
            BinaryDocValuesField dvField = new BinaryDocValuesField("dv", data);
            doc.Add(dvField);

            for (int i = 0; i < int.MaxValue; i++)
            {
                bytes[0] = (byte)(i >> 24);
                bytes[1] = (byte)(i >> 16);
                bytes[2] = (byte)(i >> 8);
                bytes[3] = (byte)i;
                w.AddDocument(doc);
                if (i % 100000 == 0)
                {
                    Console.WriteLine("indexed: " + i);
                    Console.Out.Flush();
                }
            }

            w.ForceMerge(1);
            w.Dispose();

            Console.WriteLine("verifying...");
            Console.Out.Flush();

            DirectoryReader r = DirectoryReader.Open(dir);
            int expectedValue = 0;
            foreach (AtomicReaderContext context in r.Leaves)
            {
                AtomicReader reader = context.AtomicReader;
                BytesRef scratch = new BytesRef();
                BinaryDocValues dv = reader.GetBinaryDocValues("dv");
                for (int i = 0; i < reader.MaxDoc; i++)
                {
                    bytes[0] = (byte)(expectedValue >> 24);
                    bytes[1] = (byte)(expectedValue >> 16);
                    bytes[2] = (byte)(expectedValue >> 8);
                    bytes[3] = (byte)expectedValue;
                    dv.Get(i, scratch);
                    Assert.AreEqual(data, scratch);
                    expectedValue++;
                }
            }

            r.Dispose();
            dir.Dispose();
        }
开发者ID:WakeflyCBass,项目名称:lucenenet,代码行数:59,代码来源:Test2BBinaryDocValues.cs

示例9: TestOmitNorms_Mem

        public virtual void TestOmitNorms_Mem()
        {
            Directory ram = NewDirectory();
            Analyzer analyzer = new MockAnalyzer(Random());
            IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
            Document d = new Document();

            // this field will have norms
            Field f1 = NewTextField("f1", "this field has norms", Field.Store.NO);
            d.Add(f1);

            // this field will NOT have norms
            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
            customType.OmitNorms = true;
            Field f2 = NewField("f2", "this field has NO norms in all docs", customType);
            d.Add(f2);

            writer.AddDocument(d);
            writer.ForceMerge(1);
            // now we add another document which has term freq for field f2 and not for f1 and verify if the SegmentMerger
            // keep things constant
            d = new Document();

            // Reverse
            d.Add(NewField("f1", "this field has norms", customType));

            d.Add(NewTextField("f2", "this field has NO norms in all docs", Field.Store.NO));

            writer.AddDocument(d);

            // force merge
            writer.ForceMerge(1);
            // flush
            writer.Dispose();

            SegmentReader reader = GetOnlySegmentReader(DirectoryReader.Open(ram));
            FieldInfos fi = reader.FieldInfos;
            Assert.IsTrue(fi.FieldInfo("f1").OmitsNorms(), "OmitNorms field bit should be set.");
            Assert.IsTrue(fi.FieldInfo("f2").OmitsNorms(), "OmitNorms field bit should be set.");

            reader.Dispose();
            ram.Dispose();
        }
开发者ID:joyanta,项目名称:lucene.net,代码行数:43,代码来源:TestOmitNorms.cs

示例10: BeforeClass

 public static void BeforeClass()
 {
     Dir = NewFSDirectory(CreateTempDir("2Bdocs"));
     IndexWriter iw = new IndexWriter(Dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
     Document doc = new Document();
     for (int i = 0; i < 262144; i++)
     {
         iw.AddDocument(doc);
     }
     iw.ForceMerge(1);
     iw.Dispose();
 }
开发者ID:joyanta,项目名称:lucene.net,代码行数:12,代码来源:Test2BDocs.cs

示例11: Test

        public virtual void Test()
        {
            Directory dir = NewDirectory();
            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));

            IList<long?> numbers = new List<long?>();
            IList<BytesRef> binary = new List<BytesRef>();
            IList<BytesRef> sorted = new List<BytesRef>();
            int numDocs = AtLeast(100);
            for (int i = 0; i < numDocs; i++)
            {
                Document d = new Document();
                long number = Random().NextLong();
                d.Add(new NumericDocValuesField("number", number));
                BytesRef bytes = new BytesRef(TestUtil.RandomRealisticUnicodeString(Random()));
                d.Add(new BinaryDocValuesField("bytes", bytes));
                binary.Add(bytes);
                bytes = new BytesRef(TestUtil.RandomRealisticUnicodeString(Random()));
                d.Add(new SortedDocValuesField("sorted", bytes));
                sorted.Add(bytes);
                w.AddDocument(d);
                numbers.Add(number);
            }

            w.ForceMerge(1);
            IndexReader r = w.Reader;
            w.Dispose();

            Assert.AreEqual(1, r.Leaves.Count);
            AtomicReader ar = (AtomicReader)r.Leaves[0].Reader;

            int numThreads = TestUtil.NextInt(Random(), 2, 5);
            IList<ThreadClass> threads = new List<ThreadClass>();
            CountDownLatch startingGun = new CountDownLatch(1);
            for (int t = 0; t < numThreads; t++)
            {
                Random threadRandom = new Random(Random().Next());
                ThreadClass thread = new ThreadAnonymousInnerClassHelper(this, numbers, binary, sorted, numDocs, ar, startingGun, threadRandom);
                thread.Start();
                threads.Add(thread);
            }

            startingGun.countDown();

            foreach (ThreadClass thread in threads)
            {
                thread.Join();
            }

            r.Dispose();
            dir.Dispose();
        }
开发者ID:paulirwin,项目名称:lucene.net,代码行数:52,代码来源:TestDocValuesWithThreads.cs

示例12: TestUniqueValuesCompression

        public virtual void TestUniqueValuesCompression()
        {
            Directory dir = new RAMDirectory();
            IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
            IndexWriter iwriter = new IndexWriter(dir, iwc);

            int uniqueValueCount = TestUtil.NextInt(Random(), 1, 256);
            IList<long> values = new List<long>();

            Document doc = new Document();
            NumericDocValuesField dvf = new NumericDocValuesField("dv", 0);
            doc.Add(dvf);
            for (int i = 0; i < 300; ++i)
            {
                long value;
                if (values.Count < uniqueValueCount)
                {
                    value = Random().NextLong();
                    values.Add(value);
                }
                else
                {
                    value = RandomInts.RandomFrom(Random(), values);
                }
                dvf.LongValue = value;
                iwriter.AddDocument(doc);
            }
            iwriter.ForceMerge(1);
            long size1 = DirSize(dir);
            for (int i = 0; i < 20; ++i)
            {
                dvf.LongValue = RandomInts.RandomFrom(Random(), values);
                iwriter.AddDocument(doc);
            }
            iwriter.ForceMerge(1);
            long size2 = DirSize(dir);
            // make sure the new longs did not cost 8 bytes each
            Assert.IsTrue(size2 < size1 + 8 * 20);
        }
开发者ID:joyanta,项目名称:lucene.net,代码行数:39,代码来源:BaseCompressingDocValuesFormatTestCase.cs

示例13: TestEmptyIndex

        public virtual void TestEmptyIndex()
        {
            Directory rd1 = NewDirectory();
            IndexWriter iw = new IndexWriter(rd1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
            iw.Dispose();
            // create a copy:
            Directory rd2 = NewDirectory(rd1);

            Directory rdOut = NewDirectory();

            IndexWriter iwOut = new IndexWriter(rdOut, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));

            ParallelAtomicReader apr = new ParallelAtomicReader(SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(rd1)), SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(rd2)));

            // When unpatched, Lucene crashes here with a NoSuchElementException (caused by ParallelTermEnum)
            iwOut.AddIndexes(apr);
            iwOut.ForceMerge(1);

            // 2nd try with a readerless parallel reader
            iwOut.AddIndexes(new ParallelAtomicReader());
            iwOut.ForceMerge(1);

            ParallelCompositeReader cpr = new ParallelCompositeReader(DirectoryReader.Open(rd1), DirectoryReader.Open(rd2));

            // When unpatched, Lucene crashes here with a NoSuchElementException (caused by ParallelTermEnum)
            iwOut.AddIndexes(cpr);
            iwOut.ForceMerge(1);

            // 2nd try with a readerless parallel reader
            iwOut.AddIndexes(new ParallelCompositeReader());
            iwOut.ForceMerge(1);

            iwOut.Dispose();
            rdOut.Dispose();
            rd1.Dispose();
            rd2.Dispose();
        }
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:37,代码来源:TestParallelReaderEmptyIndex.cs

示例14: TestNumerics

        public virtual void TestNumerics([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
        {
            BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BNumerics"));
            if (dir is MockDirectoryWrapper)
            {
                ((MockDirectoryWrapper)dir).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
            }

            IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
               .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetRAMBufferSizeMB(256.0).SetMergeScheduler(scheduler).SetMergePolicy(NewLogMergePolicy(false, 10)).SetOpenMode(IndexWriterConfig.OpenMode_e.CREATE));

            Document doc = new Document();
            NumericDocValuesField dvField = new NumericDocValuesField("dv", 0);
            doc.Add(dvField);

            for (int i = 0; i < int.MaxValue; i++)
            {
                dvField.LongValue = i;
                w.AddDocument(doc);
                if (i % 100000 == 0)
                {
                    Console.WriteLine("indexed: " + i);
                    Console.Out.Flush();
                }
            }

            w.ForceMerge(1);
            w.Dispose();

            Console.WriteLine("verifying...");
            Console.Out.Flush();

            DirectoryReader r = DirectoryReader.Open(dir);
            long expectedValue = 0;
            foreach (AtomicReaderContext context in r.Leaves)
            {
                AtomicReader reader = context.AtomicReader;
                NumericDocValues dv = reader.GetNumericDocValues("dv");
                for (int i = 0; i < reader.MaxDoc; i++)
                {
                    Assert.AreEqual(expectedValue, dv.Get(i));
                    expectedValue++;
                }
            }

            r.Dispose();
            dir.Dispose();
        }
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:48,代码来源:Test2BNumericDocValues.cs

示例15: TestSimpleCase

        public virtual void TestSimpleCase()
        {
            string[] keywords = new string[] { "1", "2" };
            string[] unindexed = new string[] { "Netherlands", "Italy" };
            string[] unstored = new string[] { "Amsterdam has lots of bridges", "Venice has lots of canals" };
            string[] text = new string[] { "Amsterdam", "Venice" };

            Directory dir = NewDirectory();
            IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)).SetMaxBufferedDeleteTerms(1));

            FieldType custom1 = new FieldType();
            custom1.Stored = true;
            for (int i = 0; i < keywords.Length; i++)
            {
                Document doc = new Document();
                doc.Add(NewStringField("id", keywords[i], Field.Store.YES));
                doc.Add(NewField("country", unindexed[i], custom1));
                doc.Add(NewTextField("contents", unstored[i], Field.Store.NO));
                doc.Add(NewTextField("city", text[i], Field.Store.YES));
                modifier.AddDocument(doc);
            }
            modifier.ForceMerge(1);
            modifier.Commit();

            Term term = new Term("city", "Amsterdam");
            int hitCount = GetHitCount(dir, term);
            Assert.AreEqual(1, hitCount);
            if (VERBOSE)
            {
                Console.WriteLine("\nTEST: now delete by term=" + term);
            }
            modifier.DeleteDocuments(term);
            modifier.Commit();

            if (VERBOSE)
            {
                Console.WriteLine("\nTEST: now getHitCount");
            }
            hitCount = GetHitCount(dir, term);
            Assert.AreEqual(0, hitCount);

            modifier.Dispose();
            dir.Dispose();
        }
开发者ID:paulirwin,项目名称:lucene.net,代码行数:44,代码来源:TestIndexWriterDelete.cs


注:本文中的Lucene.Net.Index.IndexWriter.ForceMerge方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。