当前位置: 首页>>代码示例>>C#>>正文


C# IndexSearcher.Search方法代码示例

本文整理汇总了C#中IndexSearcher.Search方法的典型用法代码示例。如果您正苦于以下问题:C# IndexSearcher.Search方法的具体用法?C# IndexSearcher.Search怎么用?C# IndexSearcher.Search使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在IndexSearcher的用法示例。


在下文中一共展示了IndexSearcher.Search方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: search

  String q = args[1];            // B

  public static void search(String indexDir, String q) {
    Directory dir = FSDirectory.Open(new System.IO.FileInfo(indexDir)); // C
    IndexSearcher searcher = new IndexSearcher(dir, true); // D
    QueryParser parser = new QueryParser("contents",
                                         new StandardAnalyzer(Version.LUCENE_CURRENT)); // E
    Query query = parser.Parse(q); // E
    Lucene.Net.Search.TopDocs hits = searcher.Search(query, 10); // F
    System.Console.WriteLine("Found " +
                             hits.totalHits +
                             " document(s) that matched query '" + q + "':");
    for (int i = 0; i < hits.scoreDocs.Length; i++) {
      ScoreDoc scoreDoc = hits.ScoreDocs[i];         // G
      Document doc = searcher.Doc(scoreDoc.doc);     // G
      System.Console.WriteLine(doc.Get("filename")); // G
    }
    searcher.Close();                // H
}
开发者ID:pinopisello,项目名称:l4ia,代码行数:19,代码来源:Searcher.cs

示例2: TestRandomSearchPerformance

 public virtual void TestRandomSearchPerformance()
 {
     IndexSearcher searcher = new IndexSearcher(Reader);
     foreach (Term t in SampleTerms)
     {
         TermQuery query = new TermQuery(t);
         TopDocs topDocs = searcher.Search(query, 10);
         Assert.IsTrue(topDocs.TotalHits > 0);
     }
 }
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:10,代码来源:TestTermInfosReaderIndex.cs

示例3: Search

        /// <summary>
        ///     Searches the datasource using the specified criteria. Criteria is parsed by the query builder specified by
        ///     <typeparamref
        ///         name="QueryBuilderType" />
        ///     .
        /// </summary>
        /// <param name="scope">Name of the application.</param>
        /// <param name="criteria">The criteria.</param>
        /// <returns></returns>
        /// <exception cref="VirtoCommerce.Search.Providers.Lucene.LuceneSearchException"></exception>
        public virtual ISearchResults Search(string scope, ISearchCriteria criteria)
        {
            TopDocs docs = null;

            var folderName = this.GetFolderName(scope, criteria.DocumentType);

            var dir = FSDirectory.Open(new DirectoryInfo(this.GetDirectoryPath(folderName)));
            var searcher = new IndexSearcher(dir);

            var q = (Query)this.QueryBuilder.BuildQuery(criteria);

            Debug.WriteLine("Search Lucene Query:{0}", (object)q.ToString());

            try
            {
                var numDocs = criteria.StartingRecord + criteria.RecordsToRetrieve;

                if (criteria.Sort != null)
                {
                    var fields = criteria.Sort.GetSort();

                    docs = searcher.Search(
                        q,
                        null,
                        numDocs,
                        new Sort(
                            fields.Select(field => new SortField(field.FieldName, field.DataType, field.IsDescending))
                                  .ToArray()));
                }
                else
                {
                    docs = searcher.Search(q, numDocs);
                }
            }
            catch (Exception ex)
            {
                throw new LuceneSearchException("Search exception", ex);
            }

            var results = new LuceneSearchResults(searcher, searcher.IndexReader, docs, criteria, q);

            // Cleanup here
            searcher.IndexReader.Dispose();
            searcher.Dispose();
            return results.Results;
        }
开发者ID:karpinskiy,项目名称:vc-community,代码行数:56,代码来源:LuceneSearchProvider.cs

示例4: RunQuery

 private int RunQuery(IndexSearcher s, Query q)
 {
     s.Search(q, 10);
     int hitCount = s.Search(q, null, 10, new Sort(new SortField("title", SortField.Type_e.STRING))).TotalHits;
     if (DefaultCodecSupportsDocValues())
     {
         Sort dvSort = new Sort(new SortField("title", SortField.Type_e.STRING));
         int hitCount2 = s.Search(q, null, 10, dvSort).TotalHits;
         Assert.AreEqual(hitCount, hitCount2);
     }
     return hitCount;
 }
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:12,代码来源:ThreadedIndexingAndSearchingTestCase.cs

示例5: GenerateHighlights

        private static void GenerateHighlights(IList<Document> documents, IndexWriter writer, SearchCriteria criteria)
        {
            var documentHightlightMap = documents.ToDictionary(c => c._id.ToString());

            var reader = DirectoryReader.Open(writer, true, true);
            var queryParser = new HighlighterQueryParser(writer.GetAnalyzer());
            queryParser.SetMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_REWRITE);

            var query = queryParser.Parse(criteria.Query)
                                   .Rewrite(reader);

            var highlighter = CreateHighlighter();
            var fieldQuery = highlighter.GetFieldQuery(query);

            var searcher = new IndexSearcher(reader);
            var topFieldDocs = searcher.Search(query, documents.Count, Sort.RELEVANCE);
            var scoreDocs = topFieldDocs.ScoreDocs;

            foreach (var sd in scoreDocs)
            {
                var bestFragments = highlighter.GetBestFragments(fieldQuery, reader, sd.Doc, Schema.StandardField.FULL_TEXT, FRAGMENT_SIZE, FRAGMENT_COUNT);
                var document = searcher.Doc(sd.Doc);
                var docId = document.Get(Schema.StandardField.ID);

                if (documentHightlightMap.ContainsKey(docId) && bestFragments.Length > 0)
                {
                    var dictionary = documentHightlightMap[docId].AsDictionary();
                    var highlight = String.Join($"{Environment.NewLine} ... {Environment.NewLine}", bestFragments);
                    dictionary[HIGHLIGHT_FIELD_NAME] = highlight;
                }
            }
        }
开发者ID:cris-almodovar,项目名称:expando-db,代码行数:32,代码来源:LuceneHighlighter.cs

示例6: TestTransitionAPI

        public virtual void TestTransitionAPI()
        {
            Directory dir = NewDirectory();
            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);

            Documents.Document doc = new Documents.Document();
            doc.Add(new Field("stored", "abc", Field.Store.YES, Field.Index.NO));
            doc.Add(new Field("stored_indexed", "abc xyz", Field.Store.YES, Field.Index.NOT_ANALYZED));
            doc.Add(new Field("stored_tokenized", "abc xyz", Field.Store.YES, Field.Index.ANALYZED));
            doc.Add(new Field("indexed", "abc xyz", Field.Store.NO, Field.Index.NOT_ANALYZED));
            doc.Add(new Field("tokenized", "abc xyz", Field.Store.NO, Field.Index.ANALYZED));
            doc.Add(new Field("tokenized_reader", new StringReader("abc xyz")));
            doc.Add(new Field("tokenized_tokenstream", w.w.Analyzer.TokenStream("tokenized_tokenstream", new StringReader("abc xyz"))));
            doc.Add(new Field("binary", new byte[10]));
            doc.Add(new Field("tv", "abc xyz", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES));
            doc.Add(new Field("tv_pos", "abc xyz", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS));
            doc.Add(new Field("tv_off", "abc xyz", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_OFFSETS));
            doc.Add(new Field("tv_pos_off", "abc xyz", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
            w.AddDocument(doc);
            IndexReader r = w.Reader;
            w.Dispose();

            doc = r.Document(0);
            // 4 stored fields
            Assert.AreEqual(4, doc.Fields.Count);
            Assert.AreEqual("abc", doc.Get("stored"));
            Assert.AreEqual("abc xyz", doc.Get("stored_indexed"));
            Assert.AreEqual("abc xyz", doc.Get("stored_tokenized"));
            BytesRef br = doc.GetBinaryValue("binary");
            Assert.IsNotNull(br);
            Assert.AreEqual(10, br.Length);

            IndexSearcher s = new IndexSearcher(r);
            Assert.AreEqual(1, s.Search(new TermQuery(new Term("stored_indexed", "abc xyz")), 1).TotalHits);
            Assert.AreEqual(1, s.Search(new TermQuery(new Term("stored_tokenized", "abc")), 1).TotalHits);
            Assert.AreEqual(1, s.Search(new TermQuery(new Term("stored_tokenized", "xyz")), 1).TotalHits);
            Assert.AreEqual(1, s.Search(new TermQuery(new Term("indexed", "abc xyz")), 1).TotalHits);
            Assert.AreEqual(1, s.Search(new TermQuery(new Term("tokenized", "abc")), 1).TotalHits);
            Assert.AreEqual(1, s.Search(new TermQuery(new Term("tokenized", "xyz")), 1).TotalHits);
            Assert.AreEqual(1, s.Search(new TermQuery(new Term("tokenized_reader", "abc")), 1).TotalHits);
            Assert.AreEqual(1, s.Search(new TermQuery(new Term("tokenized_reader", "xyz")), 1).TotalHits);
            Assert.AreEqual(1, s.Search(new TermQuery(new Term("tokenized_tokenstream", "abc")), 1).TotalHits);
            Assert.AreEqual(1, s.Search(new TermQuery(new Term("tokenized_tokenstream", "xyz")), 1).TotalHits);

            foreach (string field in new string[] { "tv", "tv_pos", "tv_off", "tv_pos_off" })
            {
                Fields tvFields = r.GetTermVectors(0);
                Terms tvs = tvFields.Terms(field);
                Assert.IsNotNull(tvs);
                Assert.AreEqual(2, tvs.Size());
                TermsEnum tvsEnum = tvs.Iterator(null);
                Assert.AreEqual(new BytesRef("abc"), tvsEnum.Next());
                DocsAndPositionsEnum dpEnum = tvsEnum.DocsAndPositions(null, null);
                if (field.Equals("tv"))
                {
                    Assert.IsNull(dpEnum);
                }
                else
                {
                    Assert.IsNotNull(dpEnum);
                }
                Assert.AreEqual(new BytesRef("xyz"), tvsEnum.Next());
                Assert.IsNull(tvsEnum.Next());
            }

            r.Dispose();
            dir.Dispose();
        }
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:68,代码来源:TestDocument.cs

示例7: TestConstantScoreMultiTermQuery

        public void TestConstantScoreMultiTermQuery()
        {

            numHighlights = 0;

            query = new WildcardQuery(new Term(FIELD_NAME, "ken*"));
            ((WildcardQuery) query).RewriteMethod = MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE;
            searcher = new IndexSearcher(ramDir, true);
            // can't rewrite ConstantScore if you want to highlight it -
            // it rewrites to ConstantScoreQuery which cannot be highlighted
            // query = unReWrittenQuery.Rewrite(reader);
            Console.WriteLine("Searching for: " + query.ToString(FIELD_NAME));
            hits = searcher.Search(query, null, 1000);

            for (int i = 0; i < hits.TotalHits; i++)
            {
                String text = searcher.Doc(hits.ScoreDocs[i].Doc).Get(HighlighterTest.FIELD_NAME);
                int maxNumFragmentsRequired = 2;
                String fragmentSeparator = "...";
                QueryScorer scorer = null;
                TokenStream tokenStream = null;

                tokenStream = analyzer.TokenStream(FIELD_NAME, new StringReader(text));

                scorer = new QueryScorer(query, FIELD_NAME);

                Highlighter highlighter = new Highlighter(this, scorer);

                highlighter.TextFragmenter = new SimpleFragmenter(20);

                String result = highlighter.GetBestFragments(tokenStream, text, maxNumFragmentsRequired,
                                                             fragmentSeparator);
                Console.WriteLine("\t" + result);
            }
            Assert.IsTrue(numHighlights == 5, "Failed to find correct number of highlights " + numHighlights + " found");

            // try null field

            hits = searcher.Search(query, null, 1000);

            numHighlights = 0;

            for (int i = 0; i < hits.TotalHits; i++)
            {
                String text = searcher.Doc(hits.ScoreDocs[i].Doc).Get(HighlighterTest.FIELD_NAME);
                int maxNumFragmentsRequired = 2;
                String fragmentSeparator = "...";
                QueryScorer scorer = null;
                TokenStream tokenStream = null;

                tokenStream = analyzer.TokenStream(HighlighterTest.FIELD_NAME, new StringReader(text));

                scorer = new QueryScorer(query, null);

                Highlighter highlighter = new Highlighter(this, scorer);

                highlighter.TextFragmenter = new SimpleFragmenter(20);

                String result = highlighter.GetBestFragments(tokenStream, text, maxNumFragmentsRequired,
                                                             fragmentSeparator);
                Console.WriteLine("\t" + result);
            }
            Assert.IsTrue(numHighlights == 5, "Failed to find correct number of highlights " + numHighlights + " found");

            // try default field

            hits = searcher.Search(query, null, 1000);

            numHighlights = 0;

            for (int i = 0; i < hits.TotalHits; i++)
            {
                String text = searcher.Doc(hits.ScoreDocs[i].Doc).Get(HighlighterTest.FIELD_NAME);
                int maxNumFragmentsRequired = 2;
                String fragmentSeparator = "...";
                QueryScorer scorer = null;
                TokenStream tokenStream = null;

                tokenStream = analyzer.TokenStream(HighlighterTest.FIELD_NAME, new StringReader(text));

                scorer = new QueryScorer(query, "random_field", HighlighterTest.FIELD_NAME);

                Highlighter highlighter = new Highlighter(this, scorer);

                highlighter.TextFragmenter = new SimpleFragmenter(20);

                String result = highlighter.GetBestFragments(tokenStream, text, maxNumFragmentsRequired,
                                                             fragmentSeparator);
                Console.WriteLine("\t" + result);
            }
            Assert.IsTrue(numHighlights == 5, "Failed to find correct number of highlights " + numHighlights + " found");
        }
开发者ID:hanabi1224,项目名称:lucene.net,代码行数:92,代码来源:HighlighterTest.cs

示例8: TestFarsiRangeFilterCollating

        public virtual void TestFarsiRangeFilterCollating(Analyzer analyzer, BytesRef firstBeg, BytesRef firstEnd, BytesRef secondBeg, BytesRef secondEnd)
        {
            Directory dir = NewDirectory();
            IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
            Document doc = new Document();
            doc.Add(new TextField("content", "\u0633\u0627\u0628", Field.Store.YES));
            doc.Add(new StringField("body", "body", Field.Store.YES));
            writer.AddDocument(doc);
            writer.Dispose();
            IndexReader reader = DirectoryReader.Open(dir);
            IndexSearcher searcher = new IndexSearcher(reader);
            Query query = new TermQuery(new Term("body", "body"));

            // Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi
            // orders the U+0698 character before the U+0633 character, so the single
            // index Term below should NOT be returned by a TermRangeFilter with a Farsi
            // Collator (or an Arabic one for the case when Farsi searcher not
            // supported).
            ScoreDoc[] result = searcher.Search(query, new TermRangeFilter("content", firstBeg, firstEnd, true, true), 1).ScoreDocs;
            Assert.AreEqual(0, result.Length, "The index Term should not be included.");

            result = searcher.Search(query, new TermRangeFilter("content", secondBeg, secondEnd, true, true), 1).ScoreDocs;
            Assert.AreEqual(1, result.Length, "The index Term should be included.");

            reader.Dispose();
            dir.Dispose();
        }
开发者ID:joyanta,项目名称:lucene.net,代码行数:27,代码来源:CollationTestBase.cs

示例9: TestRegexQuery

        public void TestRegexQuery()
        {
            const int maxNumFragmentsRequired = 2;

            query = new RegexQuery(new Term(FIELD_NAME, "ken.*"));
            searcher = new IndexSearcher(ramDir, true);
            hits = searcher.Search(query, 100);

            var scorer = new QueryScorer(query, FIELD_NAME);
            var highlighter = new Highlighter(this, scorer);

            for (int i = 0; i < hits.TotalHits; i++)
            {
                String text = searcher.Doc(hits.ScoreDocs[i].Doc).Get(FIELD_NAME);
                TokenStream tokenStream = analyzer.TokenStream(FIELD_NAME, new StringReader(text));

                highlighter.TextFragmenter = new SimpleFragmenter(40);

                String result = highlighter.GetBestFragments(tokenStream, text, maxNumFragmentsRequired,
                                                             "...");
                Console.WriteLine("\t" + result);
            }

            Assert.IsTrue(numHighlights == 5, "Failed to find correct number of highlights " + numHighlights + " found");
        }
开发者ID:hanabi1224,项目名称:lucene.net,代码行数:25,代码来源:HighlighterTest.cs

示例10: TestNumericRangeQuery

        public void TestNumericRangeQuery()
        {
            // doesn't currently highlight, but make sure it doesn't cause exception either
            query = NumericRangeQuery.NewIntRange(NUMERIC_FIELD_NAME, 2, 6, true, true);
            searcher = new IndexSearcher(ramDir, true);
            hits = searcher.Search(query, 100);
            int maxNumFragmentsRequired = 2;

            QueryScorer scorer = new QueryScorer(query, FIELD_NAME);
            Highlighter highlighter = new Highlighter(this, scorer);

            for (int i = 0; i < hits.TotalHits; i++)
            {
                String text = searcher.Doc(hits.ScoreDocs[i].Doc).Get(NUMERIC_FIELD_NAME);
                TokenStream tokenStream = analyzer.TokenStream(FIELD_NAME, new StringReader(text));

                highlighter.TextFragmenter = new SimpleFragmenter(40);

                String result = highlighter.GetBestFragments(tokenStream, text, maxNumFragmentsRequired,
                                                             "...");
                //Console.WriteLine("\t" + result);
            }


        }
开发者ID:hanabi1224,项目名称:lucene.net,代码行数:25,代码来源:HighlighterTest.cs

示例11: DoSearching

 public void DoSearching(Query unReWrittenQuery)
 {
     searcher = new IndexSearcher(ramDir, true);
     // for any multi-term queries to work (prefix, wildcard, range,fuzzy etc)
     // you must use a rewritten query!
     query = unReWrittenQuery.Rewrite(reader);
     Console.WriteLine("Searching for: " + query.ToString(FIELD_NAME));
     hits = searcher.Search(query, null, 1000);
 }
开发者ID:hanabi1224,项目名称:lucene.net,代码行数:9,代码来源:HighlighterTest.cs

示例12: SearchIndex

        private void SearchIndex()
        {
            String q = "t_text1:random";
            QueryParser parser = new QueryParser(TEST_VERSION, "t_text1", a);
            Query query = parser.Parse(q);
            IndexSearcher searcher = new IndexSearcher(dir, true);
            // This scorer can return negative idf -> null fragment
            IScorer scorer = new QueryTermScorer(query, searcher.IndexReader, "t_text1");
            // This scorer doesn't use idf (patch version)
            //Scorer scorer = new QueryTermScorer( query, "t_text1" );
            Highlighter h = new Highlighter(scorer);

            TopDocs hits = searcher.Search(query, null, 10);
            for (int i = 0; i < hits.TotalHits; i++)
            {
                Document doc = searcher.Doc(hits.ScoreDocs[i].Doc);
                String result = h.GetBestFragment(a, "t_text1", doc.Get("t_text1"));
                Console.WriteLine("result:" + result);
                Assert.AreEqual(result, "more <B>random</B> words for second field");
            }
            searcher.Close();
        }
开发者ID:hanabi1224,项目名称:lucene.net,代码行数:22,代码来源:HighlighterTest.cs

示例13: TestUnRewrittenQuery

        public void TestUnRewrittenQuery()
        {
            var helper = new TestHighlightRunner();
            helper.TestAction = () =>
                                    {
                                        numHighlights = 0;
                                        // test to show how rewritten query can still be used
                                        searcher = new IndexSearcher(ramDir, true);
                                        Analyzer analyzer = new StandardAnalyzer(TEST_VERSION);

                                        QueryParser parser = new QueryParser(TEST_VERSION, FIELD_NAME, analyzer);
                                        Query query = parser.Parse("JF? or Kenned*");
                                        Console.WriteLine("Searching with primitive query");
                                        // forget to set this and...
                                        // query=query.Rewrite(reader);
                                        TopDocs hits = searcher.Search(query, null, 1000);

                                        // create an instance of the highlighter with the tags used to surround
                                        // highlighted text
                                        // QueryHighlightExtractor highlighter = new
                                        // QueryHighlightExtractor(this,
                                        // query, new StandardAnalyzer(TEST_VERSION));

                                        int maxNumFragmentsRequired = 3;

                                        for (int i = 0; i < hits.TotalHits; i++)
                                        {
                                            String text = searcher.Doc(hits.ScoreDocs[i].Doc).Get(FIELD_NAME);
                                            TokenStream tokenStream = analyzer.TokenStream(FIELD_NAME,
                                                                                           new StringReader(text));
                                            Highlighter highlighter = helper.GetHighlighter(query, FIELD_NAME,
                                                                                            tokenStream,
                                                                                            this, false);

                                            highlighter.TextFragmenter = new SimpleFragmenter(40);

                                            String highlightedText = highlighter.GetBestFragments(tokenStream, text,
                                                                                                  maxNumFragmentsRequired,
                                                                                                  "...");

                                            Console.WriteLine(highlightedText);
                                        }
                                        // We expect to have zero highlights if the query is multi-terms and is
                                        // not
                                        // rewritten!
                                        Assert.IsTrue(numHighlights == 0,
                                                      "Failed to find correct number of highlights " + numHighlights +
                                                      " found");
                                    };

            helper.Start();
        }
开发者ID:hanabi1224,项目名称:lucene.net,代码行数:52,代码来源:HighlighterTest.cs

示例14: TestHugeLabel

        public virtual void TestHugeLabel()
        {
            Directory indexDir = NewDirectory(), taxoDir = NewDirectory();
            IndexWriter indexWriter = new IndexWriter(indexDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
            DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE, new Cl2oTaxonomyWriterCache(2, 1f, 1));
            FacetsConfig config = new FacetsConfig();

            // Add one huge label:
            string bigs = null;
            int ordinal = -1;

            int len = FacetLabel.MAX_CATEGORY_PATH_LENGTH - 4; // for the dimension and separator
            bigs = TestUtil.RandomSimpleString(Random(), len, len);
            FacetField ff = new FacetField("dim", bigs);
            FacetLabel cp = new FacetLabel("dim", bigs);
            ordinal = taxoWriter.AddCategory(cp);
            Document doc = new Document();
            doc.Add(ff);
            indexWriter.AddDocument(config.Build(taxoWriter, doc));

            // Add tiny ones to cause a re-hash
            for (int i = 0; i < 3; i++)
            {
                string s = TestUtil.RandomSimpleString(Random(), 1, 10);
                taxoWriter.AddCategory(new FacetLabel("dim", s));
                doc = new Document();
                doc.Add(new FacetField("dim", s));
                indexWriter.AddDocument(config.Build(taxoWriter, doc));
            }

            // when too large components were allowed to be added, this resulted in a new added category
            Assert.AreEqual(ordinal, taxoWriter.AddCategory(cp));

            IOUtils.Close(indexWriter, taxoWriter);

            DirectoryReader indexReader = DirectoryReader.Open(indexDir);
            var taxoReader = new DirectoryTaxonomyReader(taxoDir);
            IndexSearcher searcher = new IndexSearcher(indexReader);
            DrillDownQuery ddq = new DrillDownQuery(new FacetsConfig());
            ddq.Add("dim", bigs);
            Assert.AreEqual(1, searcher.Search(ddq, 10).TotalHits);

            IOUtils.Close(indexReader, taxoReader, indexDir, taxoDir);
        }
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:44,代码来源:TestDirectoryTaxonomyWriter.cs

示例15: TestQueryScorerHits

        public void TestQueryScorerHits()
        {
            Analyzer analyzer = new SimpleAnalyzer();
            QueryParser qp = new QueryParser(TEST_VERSION, FIELD_NAME, analyzer);
            query = qp.Parse("\"very long\"");
            searcher = new IndexSearcher(ramDir, true);
            TopDocs hits = searcher.Search(query, 10);

            QueryScorer scorer = new QueryScorer(query, FIELD_NAME);
            Highlighter highlighter = new Highlighter(scorer);


            for (int i = 0; i < hits.ScoreDocs.Length; i++)
            {
                Document doc = searcher.Doc(hits.ScoreDocs[i].Doc);
                String storedField = doc.Get(FIELD_NAME);

                TokenStream stream = TokenSources.GetAnyTokenStream(searcher.IndexReader, hits.ScoreDocs[i].Doc,
                                                                    FIELD_NAME, doc, analyzer);

                IFragmenter fragmenter = new SimpleSpanFragmenter(scorer);

                highlighter.TextFragmenter = fragmenter;

                String fragment = highlighter.GetBestFragment(stream, storedField);

                Console.WriteLine(fragment);
            }
        }
开发者ID:hanabi1224,项目名称:lucene.net,代码行数:29,代码来源:HighlighterTest.cs


注:本文中的IndexSearcher.Search方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。