当前位置: 首页>>代码示例>>C#>>正文


C# RandomIndexWriter.ForceMerge方法代码示例

本文整理汇总了C#中RandomIndexWriter.ForceMerge方法的典型用法代码示例。如果您正苦于以下问题:C# RandomIndexWriter.ForceMerge方法的具体用法?C# RandomIndexWriter.ForceMerge怎么用?C# RandomIndexWriter.ForceMerge使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在RandomIndexWriter的用法示例。


在下文中一共展示了RandomIndexWriter.ForceMerge方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: SetUp

 public override void SetUp()
 {
     base.SetUp();
     dir = NewDirectory();
     IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer
         (Random()));
     iwc.SetMergePolicy(NewLogMergePolicy());
     var iw = new RandomIndexWriter(Random(), dir, iwc);
     var doc = new Document
     {
         NewStringField("id", "1", Field.Store.YES),
         NewTextField("body", "some contents and more contents", Field.Store.NO),
         new NumericDocValuesField("popularity", 5)
     };
     iw.AddDocument(doc);
     doc = new Document
     {
         NewStringField("id", "2", Field.Store.YES),
         NewTextField("body", "another document with different contents", Field.Store
             .NO),
         new NumericDocValuesField("popularity", 20)
     };
     iw.AddDocument(doc);
     doc = new Document
     {
         NewStringField("id", "3", Field.Store.YES),
         NewTextField("body", "crappy contents", Field.Store.NO),
         new NumericDocValuesField("popularity", 2)
     };
     iw.AddDocument(doc);
     iw.ForceMerge(1);
     reader = iw.Reader;
     iw.Dispose();
 }
开发者ID:WakeflyCBass,项目名称:lucenenet,代码行数:34,代码来源:TestExpressionValueSource.cs

示例2: BeforeClass

        public void BeforeClass()
        {
            Dir = NewDirectory();
            Sdir1 = NewDirectory();
            Sdir2 = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(Random(), Dir, new MockAnalyzer(Random()), Similarity, TimeZone);
            RandomIndexWriter swriter1 = new RandomIndexWriter(Random(), Sdir1, new MockAnalyzer(Random()), Similarity, TimeZone);
            RandomIndexWriter swriter2 = new RandomIndexWriter(Random(), Sdir2, new MockAnalyzer(Random()), Similarity, TimeZone);

            for (int i = 0; i < 10; i++)
            {
                Document doc = new Document();
                doc.Add(NewStringField("data", Convert.ToString(i), Field.Store.NO));
                writer.AddDocument(doc);
                ((i % 2 == 0) ? swriter1 : swriter2).AddDocument(doc);
            }
            writer.ForceMerge(1);
            swriter1.ForceMerge(1);
            swriter2.ForceMerge(1);
            writer.Dispose();
            swriter1.Dispose();
            swriter2.Dispose();

            Reader = DirectoryReader.Open(Dir);
            Searcher = NewSearcher(Reader);

            MultiReader = new MultiReader(new IndexReader[] { DirectoryReader.Open(Sdir1), DirectoryReader.Open(Sdir2) }, true);
            MultiSearcher = NewSearcher(MultiReader);

            MultiReaderDupls = new MultiReader(new IndexReader[] { DirectoryReader.Open(Sdir1), DirectoryReader.Open(Dir) }, true);
            MultiSearcherDupls = NewSearcher(MultiReaderDupls);
        }
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:32,代码来源:TestMultiTermQueryRewrites.cs

示例3: TestFieldNotPresent

        public void TestFieldNotPresent()
        {
            Directory dir = NewDirectory();
            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
            int num = AtLeast(3);
            int skip = Random().Next(num);
            var terms = new List<Term>();
            for (int i = 0; i < num; i++)
            {
                terms.Add(new Term("field" + i, "content1"));
                Document doc = new Document();
                if (skip == i)
                {
                    continue;
                }
                doc.Add(NewStringField("field" + i, "content1", Field.Store.YES));
                w.AddDocument(doc);
            }

            w.ForceMerge(1);
            IndexReader reader = w.Reader;
            w.Dispose();
            assertEquals(1, reader.Leaves.size());

            AtomicReaderContext context = reader.Leaves.First();
            TermsFilter tf = new TermsFilter(terms);

            FixedBitSet bits = (FixedBitSet)tf.GetDocIdSet(context, context.AtomicReader.LiveDocs);
            assertEquals("Must be num fields - 1 since we skip only one field", num - 1, bits.Cardinality());
            reader.Dispose();
            dir.Dispose();
        }
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:32,代码来源:TermsFilterTest.cs

示例4: TestSumDocFreq_Mem

        public virtual void TestSumDocFreq_Mem()
        {
            int numDocs = AtLeast(500);

            Directory dir = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);

            Document doc = new Document();
            Field id = NewStringField("id", "", Field.Store.NO);
            Field field1 = NewTextField("foo", "", Field.Store.NO);
            Field field2 = NewTextField("bar", "", Field.Store.NO);
            doc.Add(id);
            doc.Add(field1);
            doc.Add(field2);
            for (int i = 0; i < numDocs; i++)
            {
                id.StringValue = "" + i;
                char ch1 = (char)TestUtil.NextInt(Random(), 'a', 'z');
                char ch2 = (char)TestUtil.NextInt(Random(), 'a', 'z');
                field1.StringValue = "" + ch1 + " " + ch2;
                ch1 = (char)TestUtil.NextInt(Random(), 'a', 'z');
                ch2 = (char)TestUtil.NextInt(Random(), 'a', 'z');
                field2.StringValue = "" + ch1 + " " + ch2;
                writer.AddDocument(doc);
            }

            IndexReader ir = writer.Reader;

            AssertSumDocFreq(ir);
            ir.Dispose();

            int numDeletions = AtLeast(20);
            for (int i = 0; i < numDeletions; i++)
            {
                writer.DeleteDocuments(new Term("id", "" + Random().Next(numDocs)));
            }
            writer.ForceMerge(1);
            writer.Dispose();

            ir = DirectoryReader.Open(dir);
            AssertSumDocFreq(ir);
            ir.Dispose();
            dir.Dispose();
        }
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:44,代码来源:TestSumDocFreq.cs

示例5: TestSimple

 public virtual void TestSimple()
 {
     Directory dir = NewDirectory();
     RandomIndexWriter iw = new RandomIndexWriter(Random(), dir);
     int numDocs = AtLeast(100);
     for (int i = 0; i < numDocs; i++)
     {
         iw.AddDocument(Doc());
     }
     IndexReader ir = iw.Reader;
     VerifyCount(ir);
     ir.Dispose();
     iw.ForceMerge(1);
     ir = iw.Reader;
     VerifyCount(ir);
     ir.Dispose();
     iw.Dispose();
     dir.Dispose();
 }
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:19,代码来源:TestDocCount.cs

示例6: TestAcrossFields

        public virtual void TestAcrossFields()
        {
            Directory dir = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, true));
            Document doc = new Document();
            doc.Add(new TextField("hasMaybepayload", "here we go", Field.Store.YES));
            writer.AddDocument(doc);
            writer.Dispose();

            writer = new RandomIndexWriter(Random(), dir, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, true));
            doc = new Document();
            doc.Add(new TextField("hasMaybepayload2", "here we go", Field.Store.YES));
            writer.AddDocument(doc);
            writer.AddDocument(doc);
            writer.ForceMerge(1);
            writer.Dispose();

            dir.Dispose();
        }
开发者ID:WakeflyCBass,项目名称:lucenenet,代码行数:19,代码来源:TestPayloads.cs

示例7: TestBinary

        public virtual void TestBinary()
        {
            Directory dir = NewDirectory();
            Document doc = new Document();
            BytesRef @ref = new BytesRef();
            Field field = new BinaryDocValuesField("bytes", @ref);
            doc.Add(field);

            IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, null);
            iwc.SetMergePolicy(NewLogMergePolicy());
            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);

            int numDocs = AtLeast(500);
            for (int i = 0; i < numDocs; i++)
            {
                @ref.CopyChars(TestUtil.RandomUnicodeString(Random()));
                iw.AddDocument(doc);
                if (Random().Next(17) == 0)
                {
                    iw.Commit();
                }
            }
            DirectoryReader ir = iw.Reader;
            iw.ForceMerge(1);
            DirectoryReader ir2 = iw.Reader;
            AtomicReader merged = GetOnlySegmentReader(ir2);
            iw.Dispose();

            BinaryDocValues multi = MultiDocValues.GetBinaryValues(ir, "bytes");
            BinaryDocValues single = merged.GetBinaryDocValues("bytes");
            BytesRef actual = new BytesRef();
            BytesRef expected = new BytesRef();
            for (int i = 0; i < numDocs; i++)
            {
                single.Get(i, expected);
                multi.Get(i, actual);
                Assert.AreEqual(expected, actual);
            }
            ir.Dispose();
            ir2.Dispose();
            dir.Dispose();
        }
开发者ID:WakeflyCBass,项目名称:lucenenet,代码行数:42,代码来源:TestMultiDocValues.cs

示例8: TestAddIndexes

        public virtual void TestAddIndexes()
        {
            Directory d1 = NewDirectory();
            RandomIndexWriter w = new RandomIndexWriter(Random(), d1, Similarity, TimeZone);
            Document doc = new Document();
            doc.Add(NewStringField("id", "1", Field.Store.YES));
            doc.Add(new NumericDocValuesField("dv", 1));
            w.AddDocument(doc);
            IndexReader r1 = w.Reader;
            w.Dispose();

            Directory d2 = NewDirectory();
            w = new RandomIndexWriter(Random(), d2, Similarity, TimeZone);
            doc = new Document();
            doc.Add(NewStringField("id", "2", Field.Store.YES));
            doc.Add(new NumericDocValuesField("dv", 2));
            w.AddDocument(doc);
            IndexReader r2 = w.Reader;
            w.Dispose();

            Directory d3 = NewDirectory();
            w = new RandomIndexWriter(Random(), d3, Similarity, TimeZone);
            w.AddIndexes(SlowCompositeReaderWrapper.Wrap(r1), SlowCompositeReaderWrapper.Wrap(r2));
            r1.Dispose();
            d1.Dispose();
            r2.Dispose();
            d2.Dispose();

            w.ForceMerge(1);
            DirectoryReader r3 = w.Reader;
            w.Dispose();
            AtomicReader sr = GetOnlySegmentReader(r3);
            Assert.AreEqual(2, sr.NumDocs);
            NumericDocValues docValues = sr.GetNumericDocValues("dv");
            Assert.IsNotNull(docValues);
            r3.Dispose();
            d3.Dispose();
        }
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:38,代码来源:TestDocValuesIndexing.cs

示例9: TestNumerics

        public virtual void TestNumerics()
        {
            Directory dir = NewDirectory();
            Document doc = new Document();
            Field field = new NumericDocValuesField("numbers", 0);
            doc.Add(field);

            IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, null);
            iwc.SetMergePolicy(NewLogMergePolicy());
            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);

            int numDocs = AtLeast(500);
            for (int i = 0; i < numDocs; i++)
            {
                field.LongValue = Random().NextLong();
                iw.AddDocument(doc);
                if (Random().Next(17) == 0)
                {
                    iw.Commit();
                }
            }
            DirectoryReader ir = iw.Reader;
            iw.ForceMerge(1);
            DirectoryReader ir2 = iw.Reader;
            AtomicReader merged = GetOnlySegmentReader(ir2);
            iw.Dispose();

            NumericDocValues multi = MultiDocValues.GetNumericValues(ir, "numbers");
            NumericDocValues single = merged.GetNumericDocValues("numbers");
            for (int i = 0; i < numDocs; i++)
            {
                Assert.AreEqual(single.Get(i), multi.Get(i));
            }
            ir.Dispose();
            ir2.Dispose();
            dir.Dispose();
        }
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:37,代码来源:TestMultiDocValues.cs

示例10: Populate

 private void Populate(Directory directory, IndexWriterConfig config)
 {
     RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, config);
     for (int i = 0; i < NUMBER_OF_DOCUMENTS; i++)
     {
         Document document = new Document();
         for (int f = 0; f < NUMBER_OF_FIELDS; f++)
         {
             document.Add(NewStringField("field" + f, Text, Field.Store.NO));
         }
         writer.AddDocument(document);
     }
     writer.ForceMerge(1);
     writer.Dispose();
 }
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:15,代码来源:TestTermInfosReaderIndex.cs

示例11: TestIntersectStartTerm

        public virtual void TestIntersectStartTerm()
        {
            Directory dir = NewDirectory();
            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
            iwc.SetMergePolicy(new LogDocMergePolicy());

            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, iwc);
            Document doc = new Document();
            doc.Add(NewStringField("field", "abc", Field.Store.NO));
            w.AddDocument(doc);

            doc = new Document();
            doc.Add(NewStringField("field", "abd", Field.Store.NO));
            w.AddDocument(doc);

            doc = new Document();
            doc.Add(NewStringField("field", "acd", Field.Store.NO));
            w.AddDocument(doc);

            doc = new Document();
            doc.Add(NewStringField("field", "bcd", Field.Store.NO));
            w.AddDocument(doc);

            w.ForceMerge(1);
            DirectoryReader r = w.Reader;
            w.Dispose();
            AtomicReader sub = GetOnlySegmentReader(r);
            Terms terms = sub.Fields.Terms("field");

            Automaton automaton = (new RegExp(".*d", RegExp.NONE)).ToAutomaton();
            CompiledAutomaton ca = new CompiledAutomaton(automaton, false, false);
            TermsEnum te;

            // should seek to startTerm
            te = terms.Intersect(ca, new BytesRef("aad"));
            Assert.AreEqual("abd", te.Next().Utf8ToString());
            Assert.AreEqual(1, te.Docs(null, null, DocsEnum.FLAG_NONE).NextDoc());
            Assert.AreEqual("acd", te.Next().Utf8ToString());
            Assert.AreEqual(2, te.Docs(null, null, DocsEnum.FLAG_NONE).NextDoc());
            Assert.AreEqual("bcd", te.Next().Utf8ToString());
            Assert.AreEqual(3, te.Docs(null, null, DocsEnum.FLAG_NONE).NextDoc());
            Assert.IsNull(te.Next());

            // should fail to find ceil label on second arc, rewind
            te = terms.Intersect(ca, new BytesRef("add"));
            Assert.AreEqual("bcd", te.Next().Utf8ToString());
            Assert.AreEqual(3, te.Docs(null, null, DocsEnum.FLAG_NONE).NextDoc());
            Assert.IsNull(te.Next());

            // should reach end
            te = terms.Intersect(ca, new BytesRef("bcd"));
            Assert.IsNull(te.Next());
            te = terms.Intersect(ca, new BytesRef("ddd"));
            Assert.IsNull(te.Next());

            r.Dispose();
            dir.Dispose();
        }
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:58,代码来源:TestTermsEnum.cs

示例12: TestAdvanceSingleParentNoChild

        public void TestAdvanceSingleParentNoChild()
        {
            Directory dir = NewDirectory();
            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(new LogDocMergePolicy()));
            Document parentDoc = new Document();
            parentDoc.Add(NewStringField("parent", "1", Field.Store.NO));
            parentDoc.Add(NewStringField("isparent", "yes", Field.Store.NO));
            w.AddDocuments(Arrays.AsList(parentDoc));

            // Add another doc so scorer is not null
            parentDoc = new Document();
            parentDoc.Add(NewStringField("parent", "2", Field.Store.NO));
            parentDoc.Add(NewStringField("isparent", "yes", Field.Store.NO));
            Document childDoc = new Document();
            childDoc.Add(NewStringField("child", "2", Field.Store.NO));
            w.AddDocuments(Arrays.AsList(childDoc, parentDoc));

            // Need single seg:
            w.ForceMerge(1);
            IndexReader r = w.Reader;
            w.Dispose();
            IndexSearcher s = NewSearcher(r);
            Query tq = new TermQuery(new Term("child", "2"));
            Filter parentFilter = new FixedBitSetCachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("isparent", "yes"))));

            ToParentBlockJoinQuery q = new ToParentBlockJoinQuery(tq, parentFilter, ScoreMode.Avg);
            Weight weight = s.CreateNormalizedWeight(q);
            DocIdSetIterator disi = weight.Scorer(s.IndexReader.Leaves.First(), null);
            assertEquals(2, disi.Advance(0));
            r.Dispose();
            dir.Dispose();
        }
开发者ID:apache,项目名称:lucenenet,代码行数:32,代码来源:TestBlockJoin.cs

示例13: Test

        public virtual void Test()
        {
            IList<string> postingsList = new List<string>();
            int numTerms = AtLeast(300);
            int maxTermsPerDoc = TestUtil.NextInt(Random(), 10, 20);
            bool isSimpleText = "SimpleText".Equals(TestUtil.GetPostingsFormat("field"));

            IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, new MockAnalyzer(Random()));

            if ((isSimpleText || iwc.MergePolicy is MockRandomMergePolicy) && (TEST_NIGHTLY || RANDOM_MULTIPLIER > 1))
            {
                // Otherwise test can take way too long (> 2 hours)
                numTerms /= 2;
            }
            if (VERBOSE)
            {
                Console.WriteLine("maxTermsPerDoc=" + maxTermsPerDoc);
                Console.WriteLine("numTerms=" + numTerms);
            }
            for (int i = 0; i < numTerms; i++)
            {
                string term = Convert.ToString(i);
                for (int j = 0; j < i; j++)
                {
                    postingsList.Add(term);
                }
            }

            postingsList = CollectionsHelper.Shuffle(postingsList);

            ConcurrentQueue<string> postings = new ConcurrentQueue<string>(postingsList);

            Directory dir = NewFSDirectory(CreateTempDir(GetFullMethodName()));

            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);

            int threadCount = TestUtil.NextInt(Random(), 1, 5);
            if (VERBOSE)
            {
                Console.WriteLine("config: " + iw.w.Config);
                Console.WriteLine("threadCount=" + threadCount);
            }

            Field prototype = NewTextField("field", "", Field.Store.NO);
            FieldType fieldType = new FieldType((FieldType)prototype.FieldType);
            if (Random().NextBoolean())
            {
                fieldType.OmitNorms = true;
            }
            int options = Random().Next(3);
            if (options == 0)
            {
                fieldType.IndexOptions = FieldInfo.IndexOptions.DOCS_AND_FREQS; // we dont actually need positions
                fieldType.StoreTermVectors = true; // but enforce term vectors when we do this so we check SOMETHING
            }
            else if (options == 1 && !DoesntSupportOffsets.Contains(TestUtil.GetPostingsFormat("field")))
            {
                fieldType.IndexOptions = FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
            }
            // else just positions

            ThreadClass[] threads = new ThreadClass[threadCount];
            CountdownEvent startingGun = new CountdownEvent(1);

            for (int threadID = 0; threadID < threadCount; threadID++)
            {
                Random threadRandom = new Random(Random().Next());
                Document document = new Document();
                Field field = new Field("field", "", fieldType);
                document.Add(field);
                threads[threadID] = new ThreadAnonymousInnerClassHelper(this, numTerms, maxTermsPerDoc, postings, iw, startingGun, threadRandom, document, field);
                threads[threadID].Start();
            }
            startingGun.Signal();
            foreach (ThreadClass t in threads)
            {
                t.Join();
            }

            iw.ForceMerge(1);
            DirectoryReader ir = iw.Reader;
            Assert.AreEqual(1, ir.Leaves.Count);
            AtomicReader air = (AtomicReader)ir.Leaves[0].Reader;
            Terms terms = air.Terms("field");
            // numTerms-1 because there cannot be a term 0 with 0 postings:
            Assert.AreEqual(numTerms - 1, terms.Size());
            TermsEnum termsEnum = terms.Iterator(null);
            BytesRef termBR;
            while ((termBR = termsEnum.Next()) != null)
            {
                int value = Convert.ToInt32(termBR.Utf8ToString());
                Assert.AreEqual(value, termsEnum.TotalTermFreq());
                // don't really need to check more than this, as CheckIndex
                // will verify that totalTermFreq == total number of positions seen
                // from a docsAndPositionsEnum.
            }
            ir.Dispose();
            iw.Dispose();
            dir.Dispose();
        }
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:100,代码来源:TestBagOfPositions.cs

示例14: TestTermUTF16SortOrder

        public virtual void TestTermUTF16SortOrder()
        {
            Random rnd = Random();
            Directory dir = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(rnd, dir, Similarity, TimeZone);
            Document d = new Document();
            // Single segment
            Field f = NewStringField("f", "", Field.Store.NO);
            d.Add(f);
            char[] chars = new char[2];
            HashSet<string> allTerms = new HashSet<string>();

            int num = AtLeast(200);
            for (int i = 0; i < num; i++)
            {
                string s;
                if (rnd.NextBoolean())
                {
                    // Single char
                    if (rnd.NextBoolean())
                    {
                        // Above surrogates
                        chars[0] = (char)GetInt(rnd, 1 + UnicodeUtil.UNI_SUR_LOW_END, 0xffff);
                    }
                    else
                    {
                        // Below surrogates
                        chars[0] = (char)GetInt(rnd, 0, UnicodeUtil.UNI_SUR_HIGH_START - 1);
                    }
                    s = new string(chars, 0, 1);
                }
                else
                {
                    // Surrogate pair
                    chars[0] = (char)GetInt(rnd, UnicodeUtil.UNI_SUR_HIGH_START, UnicodeUtil.UNI_SUR_HIGH_END);
                    Assert.IsTrue(((int)chars[0]) >= UnicodeUtil.UNI_SUR_HIGH_START && ((int)chars[0]) <= UnicodeUtil.UNI_SUR_HIGH_END);
                    chars[1] = (char)GetInt(rnd, UnicodeUtil.UNI_SUR_LOW_START, UnicodeUtil.UNI_SUR_LOW_END);
                    s = new string(chars, 0, 2);
                }
                allTerms.Add(s);
                f.StringValue = s;

                writer.AddDocument(d);

                if ((1 + i) % 42 == 0)
                {
                    writer.Commit();
                }
            }

            IndexReader r = writer.Reader;

            // Test each sub-segment
            foreach (AtomicReaderContext ctx in r.Leaves)
            {
                CheckTermsOrder(ctx.Reader, allTerms, false);
            }
            CheckTermsOrder(r, allTerms, true);

            // Test multi segment
            r.Dispose();

            writer.ForceMerge(1);

            // Test single segment
            r = writer.Reader;
            CheckTermsOrder(r, allTerms, true);
            r.Dispose();

            writer.Dispose();
            dir.Dispose();
        }
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:72,代码来源:TestIndexWriterUnicode.cs

示例15: BeforeClass

        public static void BeforeClass()
        {
            Dir = NewDirectory();
            RandomIndexWriter iw = new RandomIndexWriter(Random(), Dir);
            int numDocs = AtLeast(300);
            for (int i = 0; i < numDocs; i++)
            {
                Document doc = new Document();

                AddSome(doc, AlwaysTerms);

                if (Random().Next(100) < 90)
                {
                    AddSome(doc, CommonTerms);
                }
                if (Random().Next(100) < 50)
                {
                    AddSome(doc, MediumTerms);
                }
                if (Random().Next(100) < 10)
                {
                    AddSome(doc, RareTerms);
                }
                iw.AddDocument(doc);
            }
            iw.ForceMerge(1);
            iw.Dispose();
            r = DirectoryReader.Open(Dir);
            atomicReader = GetOnlySegmentReader(r);
            Searcher = new IndexSearcher(atomicReader);
            Searcher.Similarity = new DefaultSimilarityAnonymousInnerClassHelper();
        }
开发者ID:joyanta,项目名称:lucene.net,代码行数:32,代码来源:TestMinShouldMatch2.cs


注:本文中的RandomIndexWriter.ForceMerge方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。