本文整理汇总了C#中Lucene.Net.Search.IndexSearcher.Close方法的典型用法代码示例。如果您正苦于以下问题:C# IndexSearcher.Close方法的具体用法?C# IndexSearcher.Close怎么用?C# IndexSearcher.Close使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Search.IndexSearcher
的用法示例。
在下文中一共展示了IndexSearcher.Close方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: searchLucene
public Data searchLucene(Data data)
{
Search_gl search = new Search_gl();
List<string> item = new List<string>();
Lucene.Net.Store.Directory directory = FSDirectory.Open(new DirectoryInfo(Environment.CurrentDirectory + "\\LuceneIndex"));
var analyzer = new StandardAnalyzer(Version.LUCENE_29);
IndexReader reader = IndexReader.Open(directory, true);
IndexSearcher searcher = new IndexSearcher(reader);
//QueryParser queryParser = new QueryParser(Version.LUCENE_29, "summary", analyzer); //search for single field
MultiFieldQueryParser parser = new MultiFieldQueryParser(new string[] {"name", "summary"}, analyzer); //search for multifield
Query query = parser.Parse((data.getString("search")) + "*"); //cant search blank text with wildcard as first character
TopScoreDocCollector collector = TopScoreDocCollector.create(1000, true);
searcher.Search(query, collector);
ScoreDoc[] hits = collector.TopDocs().ScoreDocs;
int count = hits.Length;
for (int i = 0; i < count; i++)
{
int docId = hits[i].doc;
float score = hits[i].score;
Document doc = searcher.Doc(docId);
string id = doc.Get("id");
item.Add(id);
}
Data list = search.search(data, item.ToArray());
reader.Close();
searcher.Close();
return list;
}
示例2: TestQuery
public virtual void TestQuery()
{
RAMDirectory dir = new RAMDirectory();
IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(), true);
AddDoc("one", iw);
AddDoc("two", iw);
AddDoc("three four", iw);
iw.Close();
IndexSearcher is_Renamed = new IndexSearcher(dir);
Hits hits = is_Renamed.Search(new MatchAllDocsQuery());
Assert.AreEqual(3, hits.Length());
// some artificial queries to trigger the use of skipTo():
BooleanQuery bq = new BooleanQuery();
bq.Add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
bq.Add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
hits = is_Renamed.Search(bq);
Assert.AreEqual(3, hits.Length());
bq = new BooleanQuery();
bq.Add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
bq.Add(new TermQuery(new Term("key", "three")), BooleanClause.Occur.MUST);
hits = is_Renamed.Search(bq);
Assert.AreEqual(1, hits.Length());
// delete a document:
is_Renamed.GetIndexReader().DeleteDocument(0);
hits = is_Renamed.Search(new MatchAllDocsQuery());
Assert.AreEqual(2, hits.Length());
is_Renamed.Close();
}
示例3: Initialize_Indexes_All_Nodes
public void Initialize_Indexes_All_Nodes()
{
string elementIdForTestingSearch = _deepNodeFinder.GetNodesForIndexing()[0].Id;
int expectedNumNodes = _deepNodeFinder.GetNodesForIndexing().Length;
Assert.AreEqual("usfr-pte_NetCashFlowsProvidedUsedOperatingActivitiesDirectAbstract", elementIdForTestingSearch,
"TEST SANITY: element id for test search");
Assert.AreEqual(1595, expectedNumNodes, "TEST SANITY: Number of nodes in found in the test taxonomy");
IndexReader indexReader = IndexReader.Open(_indexMgr.LuceneDirectory_ForTesting);
Assert.AreEqual(expectedNumNodes, indexReader.NumDocs(),
"An incorrect number of documents were found in the Lucene directory after initialization");
IndexSearcher searcher = new IndexSearcher(_indexMgr.LuceneDirectory_ForTesting);
try
{
Hits results =
searcher.Search(new TermQuery(new Term(LuceneNodeIndexer.ELEMENTID_FOR_DELETING_FIELD, elementIdForTestingSearch)));
Assert.AreEqual(1, results.Length(), "Search results should only have 1 hit");
Assert.AreEqual(elementIdForTestingSearch, results.Doc(0).Get(LuceneNodeIndexer.ELEMENTID_FIELD),
"Search results yielded the wrong element!");
}
finally
{
searcher.Close();
}
}
示例4: TestSorting
public virtual void TestSorting()
{
Directory directory = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.SetMaxBufferedDocs(2);
writer.SetMergeFactor(1000);
writer.AddDocument(Adoc(new System.String[]{"id", "a", "title", "ipod", "str_s", "a"}));
writer.AddDocument(Adoc(new System.String[]{"id", "b", "title", "ipod ipod", "str_s", "b"}));
writer.AddDocument(Adoc(new System.String[]{"id", "c", "title", "ipod ipod ipod", "str_s", "c"}));
writer.AddDocument(Adoc(new System.String[]{"id", "x", "title", "boosted", "str_s", "x"}));
writer.AddDocument(Adoc(new System.String[]{"id", "y", "title", "boosted boosted", "str_s", "y"}));
writer.AddDocument(Adoc(new System.String[]{"id", "z", "title", "boosted boosted boosted", "str_s", "z"}));
IndexReader r = writer.GetReader();
writer.Close();
IndexSearcher searcher = new IndexSearcher(r);
RunTest(searcher, true);
RunTest(searcher, false);
searcher.Close();
r.Close();
directory.Close();
}
示例5: _search
// main search method
private static IEnumerable<SampleData> _search(string searchQuery, string searchField = "") {
// validation
if (string.IsNullOrEmpty(searchQuery.Replace("*", "").Replace("?", ""))) return new List<SampleData>();
// set up lucene searcher
using (var searcher = new IndexSearcher(_directory, false)) {
var hits_limit = 1000;
var analyzer = new StandardAnalyzer(Version.LUCENE_29);
// search by single field
if (!string.IsNullOrEmpty(searchField)) {
var parser = new QueryParser(Version.LUCENE_29, searchField, analyzer);
var query = parseQuery(searchQuery, parser);
var hits = searcher.Search(query, hits_limit).ScoreDocs;
var results = _mapLuceneToDataList(hits, searcher);
analyzer.Close();
searcher.Close();
searcher.Dispose();
return results;
}
// search by multiple fields (ordered by RELEVANCE)
else {
var parser = new MultiFieldQueryParser
(Version.LUCENE_29, new[] {"Id", "Name", "Description"}, analyzer);
var query = parseQuery(searchQuery, parser);
var hits = searcher.Search(query, null, hits_limit, Sort.INDEXORDER).ScoreDocs;
var results = _mapLuceneToDataList(hits, searcher);
analyzer.Close();
searcher.Close();
searcher.Dispose();
return results;
}
}
}
示例6: MultiSearchBIMXchange
public static LuceneResult MultiSearchBIMXchange(Dictionary<string,string> terms, int pageSize, int pageNumber)
{
var directory = FSDirectory.Open(new DirectoryInfo("LuceneIndex"));
var booleanQuery = new BooleanQuery();
foreach(var term in terms)
{
var query = new TermQuery(new Term(term.Key, term.Value));
booleanQuery.Add(query,BooleanClause.Occur.MUST);
}
var searcher = new IndexSearcher(directory, true);
var topDocs = searcher.Search(booleanQuery, 10);
var docs = new List<Document>();
var start = (pageNumber - 1) * pageSize;
for (var i = start; i < start + pageSize && i < topDocs.TotalHits; i++)
{
var scoreDoc = topDocs.ScoreDocs[i];
var docId = scoreDoc.doc;
var doc = searcher.Doc(docId);
docs.Add(doc);
}
searcher.Close();
directory.Close();
var result = new LuceneResult {Results = docs, TotalCount = topDocs.TotalHits};
return result;
}
示例7: GetSearchResult
public override List<ISearchEntity> GetSearchResult(out int MatchCount)
{
Analyzer analyzer = new StandardAnalyzer();
IndexSearcher searcher = new IndexSearcher(searchInfo.ConfigElement.IndexDirectory);
MultiFieldQueryParser parserName = new MultiFieldQueryParser(new string[] { "title", "content", "keywords" }, analyzer);
Query queryName = parserName.Parse(searchInfo.QueryString);
Hits hits = searcher.Search(queryName);
List<ISearchEntity> ResultList = new List<ISearchEntity>();
for (int i = 0; i < hits.Length(); i++)
{
Document doc = hits.Doc(i);
ResultList.Add((ISearchEntity)new NewsModel()
{
EntityIdentity = Convert.ToInt32(doc.Get("newsid")),
Title = Convert.ToString(doc.Get("title")),
Content = Convert.ToString(doc.Get("content")),
Keywords = doc.Get("keywords")
});
}
searcher.Close();
MatchCount = hits.Length();
return ResultList;
}
示例8: Find
public SearchResults Find(string terms)
{
Directory directory = FSDirectory.GetDirectory("./index",false);
// Now search the index:
var isearcher = new IndexSearcher(directory);
// Parse a simple query that searches for "text":
//Query query = QueryParser.Parse("text", "fieldname", analyzer);
var qp = new QueryParser("description", _analyzer);
Query query = qp.Parse(terms);
Hits hits = isearcher.Search(query);
var sr = new SearchResults();
// Iterate through the results:
for (int i = 0; i < hits.Length(); i++)
{
Document hitDoc = hits.Doc(i);
sr.Add(new Result() { Name = hitDoc.Get("name"), Description = hitDoc.Get("description") });
}
isearcher.Close();
directory.Close();
return sr;
}
示例9: btnExecuteSearch_Click
private void btnExecuteSearch_Click(object sender, EventArgs e)
{
Directory indexDirectory = FSDirectory.Open(new System.IO.DirectoryInfo(tempPath));
IndexSearcher searcher = new IndexSearcher(indexDirectory, true); // read-only=true
// TODO: QueryParser support for Hebrew terms (most concerning issue is with acronyms - mid-word quotes)
QueryParser qp = new QueryParser("content", analyzer);
qp.SetDefaultOperator(QueryParser.Operator.AND);
Query query = qp.Parse(txbSearchQuery.Text);
ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
// Iterate through the results:
BindingList<SearchResult> l = new BindingList<SearchResult>();
for (int i = 0; i < hits.Length; i++)
{
Document hitDoc = searcher.Doc(hits[i].doc);
SearchResult sr = new SearchResult(hitDoc.GetField("title").StringValue(),
hitDoc.GetField("path").StringValue(), hits[i].score);
l.Add(sr);
}
searcher.Close();
indexDirectory.Close();
dgvResults.DataSource = l;
}
示例10: HelloWorldTest
public void HelloWorldTest()
{
Directory directory = new RAMDirectory();
Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_29);
IndexWriter writer = new IndexWriter(directory,
analyzer,
IndexWriter.MaxFieldLength.UNLIMITED);
Document doc = new Document();
doc.Add(new Field("id", "1", Field.Store.YES, Field.Index.NO));
doc.Add(new Field("postBody", "sample test", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
writer.Optimize();
writer.Commit();
writer.Close();
QueryParser parser = new QueryParser(Version.LUCENE_29, "postBody", analyzer);
Query query = parser.Parse("sample test");
//Setup searcher
IndexSearcher searcher = new IndexSearcher(directory, true);
//Do the search
var hits = searcher.Search(query, null, 10);
for (int i = 0; i < hits.TotalHits; i++)
{
var doc1 = hits.ScoreDocs[i];
}
searcher.Close();
directory.Close();
}
示例11: TestBooleanQuerySerialization
public void TestBooleanQuerySerialization()
{
Lucene.Net.Search.BooleanQuery lucQuery = new Lucene.Net.Search.BooleanQuery();
lucQuery.Add(new Lucene.Net.Search.TermQuery(new Lucene.Net.Index.Term("field", "x")), Occur.MUST);
System.Runtime.Serialization.Formatters.Binary.BinaryFormatter bf = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
System.IO.MemoryStream ms = new System.IO.MemoryStream();
bf.Serialize(ms, lucQuery);
ms.Seek(0, System.IO.SeekOrigin.Begin);
Lucene.Net.Search.BooleanQuery lucQuery2 = (Lucene.Net.Search.BooleanQuery)bf.Deserialize(ms);
ms.Close();
Assert.AreEqual(lucQuery, lucQuery2, "Error in serialization");
Lucene.Net.Search.IndexSearcher searcher = new Lucene.Net.Search.IndexSearcher(dir, true);
int hitCount = searcher.Search(lucQuery, 20).TotalHits;
searcher.Close();
searcher = new Lucene.Net.Search.IndexSearcher(dir, true);
int hitCount2 = searcher.Search(lucQuery2, 20).TotalHits;
Assert.AreEqual(hitCount, hitCount2, "Error in serialization - different hit counts");
}
示例12: SearchBIMXchange
public static LuceneResult SearchBIMXchange(string field, string key, int pageSize, int pageNumber)
{
const string luceneIndexPath = "C:\\LuceneIndex";
var directory = FSDirectory.Open(new DirectoryInfo(luceneIndexPath));
var analyzer = new StandardAnalyzer(Version.LUCENE_29);
var parser = new QueryParser(Version.LUCENE_29, field, analyzer);
var query = parser.Parse(String.Format("{0}*", key));
var searcher = new IndexSearcher(directory, true);
var topDocs = searcher.Search(query, 1000000);
var docs = new List<Document>();
var start = (pageNumber-1)*pageSize;
for (var i = start; i < start + pageSize && i < topDocs.TotalHits; i++)
{
var scoreDoc = topDocs.ScoreDocs[i];
var docId = scoreDoc.doc;
var doc = searcher.Doc(docId);
docs.Add(doc);
}
searcher.Close();
directory.Close();
var result = new LuceneResult {Results = docs, TotalCount = topDocs.TotalHits};
return result;
}
示例13: getHitCount
protected int getHitCount(String fieldName, String searchString)
{
IndexSearcher searcher = new IndexSearcher(directory, true); //4
Term t = new Term(fieldName, searchString);
Query query = new TermQuery(t); //5
int hitCount = TestUtil.hitCount(searcher, query); //6
searcher.Close();
return hitCount;
}
示例14: Page_Load
protected void Page_Load(object sender, EventArgs e)
{
//if (Session["KeyWords"] == null ? false : true)
//{
// Response.Redirect("Search.aspx");
//}
String text = Session["KeyWords"].ToString();
ChineseAnalyzer analyzer = new ChineseAnalyzer();
TokenStream ts = analyzer.TokenStream("ItemName", new System.IO.StringReader(text));
Lucene.Net.Analysis.Token token;
try
{
int n = 0;
while ((token = ts.Next()) != null)
{
this.lbMsg.Text += (n++) + "->" + token.TermText() + " " + token.StartOffset() + " " + token.EndOffset() + " " + token.Type() + "<br>";
// Response.Write((n++) + "->" + token.TermText() + " " + token.StartOffset() + " "
//+ token.EndOffset() + " " + token.Type() + "<br>");
}
}
catch
{
this.lbMsg.Text = "wrong";
}
// Analyzer analyzer = new StandardAnalyzer();
Directory directory = FSDirectory.GetDirectory(Server.MapPath("/indexFile/"), false);
IndexSearcher isearcher = new IndexSearcher(directory);
Query query;
query = QueryParser.Parse(Session["KeyWords"].ToString(), "ItemName", analyzer);
//query = QueryParser.Parse("2", "nid", analyzer);
Hits hits = isearcher.Search(query);
this.lbMsg.Text += "<font color=red>共找到" + hits.Length() + "条记录</font><br>";
//Response.Write("<font color=red>共找到" + hits.Length() + "条记录</font><br>");
for (int i = 0; i < hits.Length(); i++)
{
Document hitDoc = hits.Doc(i);
this.lbMsg.Text += "编号:" + hitDoc.Get("ItemID").ToString() + "<br>"
+ "分类:" + hitDoc.Get("CategoryName").ToString() + "<br>"
+ "专题:" + hitDoc.Get("ProductName").ToString() + "<br>"
+ "标题:<a href=" + hitDoc.Get("visiturl").ToString() + ">" + hitDoc.Get("ItemName").ToString() + "</a><br>";
//Response.Write("编号:" + hitDoc.Get("ItemID").ToString() + "<br>");
//Response.Write("分类:" + hitDoc.Get("CategoryName").ToString() + "<br>");
//Response.Write("标题:<a href=" + hitDoc.Get("visiturl").ToString() + ">" + hitDoc.Get("ItemName").ToString() + "</a><br>");
//Response.Write("专题:" + hitDoc.Get("ProductName").ToString() + "<br>");
}
isearcher.Close();
directory.Close();
}
示例15: SearchIndex
public virtual ActionResult SearchIndex(string term)
{
//Setup indexer
Directory directory = FSDirectory.GetDirectory("LuceneIndex", true);
Analyzer analyzer = new StandardAnalyzer();
IndexWriter writer = new IndexWriter(directory, analyzer, true);
IndexReader red = IndexReader.Open(directory);
int totDocs = red.MaxDoc();
red.Close();
foreach (var ticket in _ticketRepo.GetTicketsByProject(CurrentProject, 0, 1000).Items)
{
AddListingToIndex(ticket, writer);
}
writer.Optimize();
//Close the writer
writer.Close();
//Setup searcher
IndexSearcher searcher = new IndexSearcher(directory);
MultiFieldQueryParser parser = new MultiFieldQueryParser(
new string[] {
"summary", "keyName" },
analyzer);
Query query = parser.Parse(term);
Hits hits = searcher.Search(query);
var tickets = new List<Ticket>();
for (int i = 0; i < hits.Length(); i++)
{
Document doc = hits.Doc(i);
int id = 0;
if (int.TryParse(doc.Get("id"), out id))
{
tickets.Add(_ticketRepo.GetTicketById(id));
}
}
//Clean up everything
searcher.Close();
directory.Close();
return View(new SearchIndexModel()
{
Tickets = tickets
});
}