本文整理汇总了C#中IndexSearcher类的典型用法代码示例。如果您正苦于以下问题:C# IndexSearcher类的具体用法?C# IndexSearcher怎么用?C# IndexSearcher使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
IndexSearcher类属于命名空间,在下文中一共展示了IndexSearcher类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: search
public static void search(String indexDir, String q)
{
Directory dir = FSDriecotry.Open(new System.IO.FileInfo(indexDir));
IndexSearcher searcher = new IndexSearcher(dir, true);
QueryParser parser = new QueryParser("contents", new StandardAnalyzer(Version.LUCENE_CURRENT));
Query query = parser.Parser(q);
Lucene.Net.Saerch.TopDocs hits = searher.Search(query, 10);
System.Console.WriteLine("Found " + hits.totalHits + " document(s) that matched query '" + q + "':");
for (int i = 0; i < hits.scoreDocs.Length; i++) {
ScoreDoc scoreDoc = hits.ScoreDoc[i];
Document doc = searcher.Doc(scoreDoc.doc);
System.Console.WriteLine(doc.Get("filename"));
}
searcher.Close();
}
示例2: FullSearch
//.........这里部分代码省略.........
var custom = SearchHelper.GetCustom(currentSearchString);
if (custom.Length > 0)
{
var customSearch = custom.Split('|');
if (customSearch.Length > 0)
{
try
{
refinements.Add(customSearch[0], customSearch[1]);
}
catch (Exception exc)
{
Log.Error("Could not parse the custom search query", exc);
}
}
}
var search = SearchHelper.GetField(currentSearchString);
if (search.Length > 0)
{
var customSearch = search;
refinements.Add(customSearch, SearchHelper.GetText(currentSearchString));
}
var fileTypes = SearchHelper.GetFileTypes(currentSearchString);
if (fileTypes.Length > 0)
{
refinements.Add("extension", SearchHelper.GetFileTypes(currentSearchString));
}
var s = SearchHelper.GetSite(currentSearchString);
if (s.Length > 0)
{
SiteContext siteContext = SiteContextFactory.GetSiteContext(SiteManager.GetSite(s).Name);
var db = Context.ContentDatabase ?? Context.Database;
var startItemId = db.GetItem(siteContext.StartPath);
locationSearch = startItemId.ID.ToString();
}
var culture = CultureInfo.CreateSpecificCulture("en-US");
var startFlag = true;
var endFlag = true;
if (SearchHelper.GetStartDate(currentSearchString).Any())
{
if (!DateTime.TryParse(SearchHelper.GetStartDate(currentSearchString), culture, DateTimeStyles.None, out startDate))
{
startDate = DateTime.Now;
}
startFlag = false;
}
if (SearchHelper.GetEndDate(currentSearchString).Any())
{
if (!DateTime.TryParse(SearchHelper.GetEndDate(currentSearchString), culture, DateTimeStyles.None, out endDate))
{
endDate = DateTime.Now.AddDays(1);
}
endFlag = false;
}
using (var searcher = new IndexSearcher(indexName))
{
var location = IdHelper.ParseId(SearchHelper.GetLocation(currentSearchString, locationSearch));
var locationIdFromItem = itm != null ? itm.ID.ToGuid().ToEnumerable() : null;
var rangeSearch = new DateRangeSearchParam
{
ID = SearchHelper.GetID(currentSearchString).IsEmpty() ? SearchHelper.GetRecent(currentSearchString) : SearchHelper.GetID(currentSearchString),
ShowAllVersions = false,
FullTextQuery = SearchHelper.GetText(currentSearchString),
Refinements = refinements,
RelatedIds = references.Any() ? IdHelper.ParseId(references) : null,
SortDirection = sortDirection,
TemplateIds = SearchHelper.GetTemplates(currentSearchString),
LocationIds = !location.Any() ? locationIdFromItem : location,
Language = languages,
SortByField = sortField,
PageNumber = pageNumber,
PageSize = pageSize,
Author = author == string.Empty ? string.Empty : author,
};
if (!startFlag || !endFlag)
{
rangeSearch.Ranges = new List<DateRangeSearchParam.DateRangeField>
{
new DateRangeSearchParam.DateRangeField(SearchFieldIDs.CreatedDate, startDate, endDate)
{
InclusiveStart = true, InclusiveEnd = true
}
};
}
var returnResult = searcher.GetItems(rangeSearch);
hitCount = returnResult.Key;
return returnResult.Value;
}
}
示例3: CreateWeight
public override Weight CreateWeight(IndexSearcher searcher)
{
Weight baseWeight = baseQuery.CreateWeight(searcher);
object[] drillDowns = new object[drillDownQueries.Length];
for (int dim = 0; dim < drillDownQueries.Length; dim++)
{
Query query = drillDownQueries[dim];
Filter filter = DrillDownQuery.GetFilter(query);
if (filter != null)
{
drillDowns[dim] = filter;
}
else
{
// TODO: would be nice if we could say "we will do no
// scoring" here....
drillDowns[dim] = searcher.Rewrite(query).CreateWeight(searcher);
}
}
return new WeightAnonymousInnerClassHelper(this, baseWeight, drillDowns);
}
示例4: TestFarsiRangeFilterCollating
public virtual void TestFarsiRangeFilterCollating(Analyzer analyzer, BytesRef firstBeg, BytesRef firstEnd, BytesRef secondBeg, BytesRef secondEnd)
{
Directory dir = NewDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
Document doc = new Document();
doc.Add(new TextField("content", "\u0633\u0627\u0628", Field.Store.YES));
doc.Add(new StringField("body", "body", Field.Store.YES));
writer.AddDocument(doc);
writer.Dispose();
IndexReader reader = DirectoryReader.Open(dir);
IndexSearcher searcher = new IndexSearcher(reader);
Query query = new TermQuery(new Term("body", "body"));
// Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi
// orders the U+0698 character before the U+0633 character, so the single
// index Term below should NOT be returned by a TermRangeFilter with a Farsi
// Collator (or an Arabic one for the case when Farsi searcher not
// supported).
ScoreDoc[] result = searcher.Search(query, new TermRangeFilter("content", firstBeg, firstEnd, true, true), 1).ScoreDocs;
Assert.AreEqual(0, result.Length, "The index Term should not be included.");
result = searcher.Search(query, new TermRangeFilter("content", secondBeg, secondEnd, true, true), 1).ScoreDocs;
Assert.AreEqual(1, result.Length, "The index Term should be included.");
reader.Dispose();
dir.Dispose();
}
示例5: AssertMatches
// Make sure the documents returned by the search match the expected list
// Copied from TestSort.java
private void AssertMatches(IndexSearcher searcher, Query query, Sort sort, string expectedResult)
{
ScoreDoc[] result = searcher.Search(query, null, 1000, sort).ScoreDocs;
StringBuilder buff = new StringBuilder(10);
int n = result.Length;
for (int i = 0; i < n; ++i)
{
Document doc = searcher.Doc(result[i].Doc);
IndexableField[] v = doc.GetFields("tracer");
for (int j = 0; j < v.Length; ++j)
{
buff.Append(v[j].StringValue);
}
}
Assert.AreEqual(expectedResult, buff.ToString());
}
示例6: TestCollationKeySort
// Test using various international locales with accented characters (which
// sort differently depending on locale)
//
// Copied (and slightly modified) from
// Lucene.Net.Search.TestSort.testInternationalSort()
//
// TODO: this test is really fragile. there are already 3 different cases,
// depending upon unicode version.
public virtual void TestCollationKeySort(Analyzer usAnalyzer, Analyzer franceAnalyzer, Analyzer swedenAnalyzer, Analyzer denmarkAnalyzer, string usResult, string frResult, string svResult, string dkResult)
{
Directory indexStore = NewDirectory();
IndexWriter writer = new IndexWriter(indexStore, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)));
// document data:
// the tracer field is used to determine which document was hit
string[][] sortData = new string[][] { new string[] { "A", "x", "p\u00EAche", "p\u00EAche", "p\u00EAche", "p\u00EAche" }, new string[] { "B", "y", "HAT", "HAT", "HAT", "HAT" }, new string[] { "C", "x", "p\u00E9ch\u00E9", "p\u00E9ch\u00E9", "p\u00E9ch\u00E9", "p\u00E9ch\u00E9" }, new string[] { "D", "y", "HUT", "HUT", "HUT", "HUT" }, new string[] { "E", "x", "peach", "peach", "peach", "peach" }, new string[] { "F", "y", "H\u00C5T", "H\u00C5T", "H\u00C5T", "H\u00C5T" }, new string[] { "G", "x", "sin", "sin", "sin", "sin" }, new string[] { "H", "y", "H\u00D8T", "H\u00D8T", "H\u00D8T", "H\u00D8T" }, new string[] { "I", "x", "s\u00EDn", "s\u00EDn", "s\u00EDn", "s\u00EDn" }, new string[] { "J", "y", "HOT", "HOT", "HOT", "HOT" } };
FieldType customType = new FieldType();
customType.Stored = true;
for (int i = 0; i < sortData.Length; ++i)
{
Document doc = new Document();
doc.Add(new Field("tracer", sortData[i][0], customType));
doc.Add(new TextField("contents", sortData[i][1], Field.Store.NO));
if (sortData[i][2] != null)
{
doc.Add(new TextField("US", usAnalyzer.TokenStream("US", new StringReader(sortData[i][2]))));
}
if (sortData[i][3] != null)
{
doc.Add(new TextField("France", franceAnalyzer.TokenStream("France", new StringReader(sortData[i][3]))));
}
if (sortData[i][4] != null)
{
doc.Add(new TextField("Sweden", swedenAnalyzer.TokenStream("Sweden", new StringReader(sortData[i][4]))));
}
if (sortData[i][5] != null)
{
doc.Add(new TextField("Denmark", denmarkAnalyzer.TokenStream("Denmark", new StringReader(sortData[i][5]))));
}
writer.AddDocument(doc);
}
writer.ForceMerge(1);
writer.Dispose();
IndexReader reader = DirectoryReader.Open(indexStore);
IndexSearcher searcher = new IndexSearcher(reader);
Sort sort = new Sort();
Query queryX = new TermQuery(new Term("contents", "x"));
Query queryY = new TermQuery(new Term("contents", "y"));
sort.SetSort(new SortField("US", SortField.Type_e.STRING));
AssertMatches(searcher, queryY, sort, usResult);
sort.SetSort(new SortField("France", SortField.Type_e.STRING));
AssertMatches(searcher, queryX, sort, frResult);
sort.SetSort(new SortField("Sweden", SortField.Type_e.STRING));
AssertMatches(searcher, queryY, sort, svResult);
sort.SetSort(new SortField("Denmark", SortField.Type_e.STRING));
AssertMatches(searcher, queryY, sort, dkResult);
reader.Dispose();
indexStore.Dispose();
}
示例7: RunQuery
private int RunQuery(IndexSearcher s, Query q)
{
s.Search(q, 10);
int hitCount = s.Search(q, null, 10, new Sort(new SortField("title", SortField.Type_e.STRING))).TotalHits;
if (DefaultCodecSupportsDocValues())
{
Sort dvSort = new Sort(new SortField("title", SortField.Type_e.STRING));
int hitCount2 = s.Search(q, null, 10, dvSort).TotalHits;
Assert.AreEqual(hitCount, hitCount2);
}
return hitCount;
}
示例8: GenerateHighlights
private static void GenerateHighlights(IList<Document> documents, IndexWriter writer, SearchCriteria criteria)
{
var documentHightlightMap = documents.ToDictionary(c => c._id.ToString());
var reader = DirectoryReader.Open(writer, true, true);
var queryParser = new HighlighterQueryParser(writer.GetAnalyzer());
queryParser.SetMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_REWRITE);
var query = queryParser.Parse(criteria.Query)
.Rewrite(reader);
var highlighter = CreateHighlighter();
var fieldQuery = highlighter.GetFieldQuery(query);
var searcher = new IndexSearcher(reader);
var topFieldDocs = searcher.Search(query, documents.Count, Sort.RELEVANCE);
var scoreDocs = topFieldDocs.ScoreDocs;
foreach (var sd in scoreDocs)
{
var bestFragments = highlighter.GetBestFragments(fieldQuery, reader, sd.Doc, Schema.StandardField.FULL_TEXT, FRAGMENT_SIZE, FRAGMENT_COUNT);
var document = searcher.Doc(sd.Doc);
var docId = document.Get(Schema.StandardField.ID);
if (documentHightlightMap.ContainsKey(docId) && bestFragments.Length > 0)
{
var dictionary = documentHightlightMap[docId].AsDictionary();
var highlight = String.Join($"{Environment.NewLine} ... {Environment.NewLine}", bestFragments);
dictionary[HIGHLIGHT_FIELD_NAME] = highlight;
}
}
}
示例9: GetFacets
public static List<List<FacetReturn>> GetFacets(List<SearchStringModel> _searchQuery)
{
var ret = new List<List<FacetReturn>>();
var facets = Context.ContentDatabase.GetItem(Constants.FacetFolder).Children;
foreach (Item facet in facets)
{
if (facet.Fields["Enabled"].Value == "1")
{
var type = Activator.CreateInstance(Type.GetType(facet.Fields["Type"].Value));
if ((type as IFacet).IsNotNull())
{
var locationOverride = GetLocationOverride(_searchQuery);
var indexName = BucketManager.GetContextIndex(Context.ContentDatabase.GetItem(locationOverride));
using (var searcher = new IndexSearcher(indexName))
using (var context = new SortableIndexSearchContext(searcher.Index))
{
var query = SearchHelper.GetBaseQuery(_searchQuery, locationOverride);
var queryBase = searcher.ContructQuery(query);
var searchBitArray = new QueryFilter(queryBase).Bits(context.Searcher.GetIndexReader());
var res = ((IFacet)type).Filter(queryBase, _searchQuery, locationOverride, searchBitArray);
ret.Add(res);
}
}
}
}
return ret;
}
示例10: Search
/// <summary>
/// An extension of Item that allows you to launch a Search from an item
/// </summary>
/// <returns>List of Results of Type IEnumerable List of SitecoreItem (which implements IItem)</returns>
/// <param name="startLocationItem">The start location of the search</param>
/// <param name="hitCount">This will output the hitCount of the search</param>
/// <param name="currentSearchString">The raw JSON Parse query</param>
/// <param name="indexName">Force query to run on a particular index</param>
/// <param name="sortField">Sort query by field (must be in index)</param>
/// <param name="sortDirection">Sort in either "asc" or "desc"</param>
/// <example>BucketManager.Search(Sitecore.Context.Item, SearchModel)</example>
public static IEnumerable<SitecoreItem> Search(Item startLocationItem, out int hitCount, List<SearchStringModel> currentSearchString, string indexName = "itembuckets_buckets", string sortField = "", string sortDirection = "")
{
var refinements = new SafeDictionary<string>();
var searchStringModels = SearchHelper.GetTags(currentSearchString);
if (searchStringModels.Count > 0)
{
foreach (var ss in searchStringModels)
{
var query = ss.Value;
if (query.Contains("tagid="))
{
query = query.Split('|')[1].Replace("tagid=", string.Empty);
}
var db = Context.ContentDatabase ?? Context.Database;
refinements.Add("_tags", db.GetItem(query).ID.ToString());
}
}
using (var searcher = new IndexSearcher(indexName))
{
var keyValuePair = searcher.GetItems(new DateRangeSearchParam { FullTextQuery = SearchHelper.GetText(currentSearchString),
RelatedIds = null,
SortDirection = sortDirection,
TemplateIds = SearchHelper.GetTemplates(currentSearchString),
LocationIds = startLocationItem.ID.ToGuid().ToEnumerable(),
SortByField = sortField, Refinements = refinements});
hitCount = keyValuePair.Key;
return keyValuePair.Value;
}
}
示例11: SmokeTestSearcher
protected internal virtual void SmokeTestSearcher(IndexSearcher s)
{
RunQuery(s, new TermQuery(new Term("body", "united")));
RunQuery(s, new TermQuery(new Term("titleTokenized", "states")));
PhraseQuery pq = new PhraseQuery();
pq.Add(new Term("body", "united"));
pq.Add(new Term("body", "states"));
RunQuery(s, pq);
}
示例12: TestRandomSearchPerformance
public virtual void TestRandomSearchPerformance()
{
IndexSearcher searcher = new IndexSearcher(Reader);
foreach (Term t in SampleTerms)
{
TermQuery query = new TermQuery(t);
TopDocs topDocs = searcher.Search(query, 10);
Assert.IsTrue(topDocs.TotalHits > 0);
}
}
示例13: SearchWrapper
internal SearchWrapper(SearcherManager searcherManager)
{
_searcherManager = searcherManager;
_indexSearcher = (IndexSearcher) _searcherManager.acquire();
}
示例14: SetUp
public override void SetUp()
{
base.SetUp();
// we generate aweful regexps: good for testing.
// but for preflex codec, the test can be very slow, so use less iterations.
NumIterations = Codec.Default.Name.Equals("Lucene3x") ? 10 * RANDOM_MULTIPLIER : AtLeast(50);
Dir = NewDirectory();
RandomIndexWriter writer = new RandomIndexWriter(Random(), Dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.KEYWORD, false)).SetMaxBufferedDocs(TestUtil.NextInt(Random(), 50, 1000)));
Document doc = new Document();
Field field = NewStringField("field", "", Field.Store.YES);
doc.Add(field);
Terms = new SortedSet<BytesRef>();
int num = AtLeast(200);
for (int i = 0; i < num; i++)
{
string s = TestUtil.RandomUnicodeString(Random());
field.StringValue = s;
Terms.Add(new BytesRef(s));
writer.AddDocument(doc);
}
TermsAutomaton = BasicAutomata.MakeStringUnion(Terms);
Reader = writer.Reader;
Searcher = NewSearcher(Reader);
writer.Dispose();
}
示例15: SetUp
public override void SetUp()
{
base.SetUp();
PayloadHelper helper = new PayloadHelper();
Searcher_Renamed = helper.SetUp(Random(), Similarity, 1000);
IndexReader = Searcher_Renamed.IndexReader;
}