本文整理汇总了C#中Lucene.Net.Search.Query.ToString方法的典型用法代码示例。如果您正苦于以下问题:C# Query.ToString方法的具体用法?C# Query.ToString怎么用?C# Query.ToString使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Search.Query
的用法示例。
在下文中一共展示了Query.ToString方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: CheckHits_
public static void CheckHits_(Query query, System.String defaultFieldName, Searcher searcher, int[] results, TestCase testCase)
{
Hits hits = searcher.Search(query);
System.Collections.Hashtable correct = new System.Collections.Hashtable();
for (int i = 0; i < results.Length; i++)
{
correct.Add((System.Int32) results[i], null);
}
System.Collections.Hashtable actual = new System.Collections.Hashtable();
for (int i = 0; i < hits.Length(); i++)
{
actual.Add((System.Int32) hits.Id(i), null);
}
//Assert.AreEqual(correct, actual, query.ToString(defaultFieldName));
if (correct.Count != 0)
{
System.Collections.IDictionaryEnumerator iter = correct.GetEnumerator();
bool status = false;
while (iter.MoveNext())
{
status = actual.ContainsKey(iter.Key);
if (status == false)
break;
}
Assert.IsTrue(status, query.ToString(defaultFieldName));
}
}
示例2: Compile
//====================================================================================== INodeQueryCompiler Members
public string Compile(NodeQuery query, out NodeQueryParameter[] parameters)
{
_nodeQuery = query;
CompiledQuery = TreeWalker(query);
parameters = new NodeQueryParameter[0];
return CompiledQuery.ToString();
}
示例3: CountHits
private void CountHits(Analyzer analyzer, string[] docs, Query q, int expected)
{
Directory d = GetDirectory(analyzer, docs);
IndexReader r = DirectoryReader.Open(d);
IndexSearcher s = new IndexSearcher(r);
TotalHitCountCollector c = new TotalHitCountCollector();
s.Search(q, c);
Assert.AreEqual(expected, c.TotalHits, q.ToString());
r.Dispose();
d.Dispose();
}
示例4: CheckEqual
public static void CheckEqual(Query query, ScoreDoc[] hits1, ScoreDoc[] hits2)
{
const float scoreTolerance = 1.0e-6f;
if (hits1.Length != hits2.Length)
{
Assert.Fail("Unequal lengths: hits1=" + hits1.Length + ",hits2=" + hits2.Length);
}
for (int i = 0; i < hits1.Length; i++)
{
if (hits1[i].Doc != hits2[i].Doc)
{
Assert.Fail("Hit " + i + " docnumbers don't match\n" + Hits2str(hits1, hits2, 0, 0) + "for query:" + query.ToString());
}
if ((hits1[i].Doc != hits2[i].Doc) || Math.Abs(hits1[i].Score - hits2[i].Score) > scoreTolerance)
{
Assert.Fail("Hit " + i + ", doc nrs " + hits1[i].Doc + " and " + hits2[i].Doc + "\nunequal : " + hits1[i].Score + "\n and: " + hits2[i].Score + "\nfor query:" + query.ToString());
}
}
}
示例5: CheckNoMatchExplanations
public static float EXPLAIN_SCORE_TOLERANCE_DELTA = 0.00025f; // {{See: LUCENENET-288}} Intentional diversion from Java Lucene per above comment
/// <summary> Tests that all documents up to maxDoc which are *not* in the
/// expected result set, have an explanation which indicates no match
/// (ie: Explanation value of 0.0f)
/// </summary>
public static void CheckNoMatchExplanations(Query q, System.String defaultFieldName, Searcher searcher, int[] results)
{
System.String d = q.ToString(defaultFieldName);
System.Collections.Hashtable ignore = new System.Collections.Hashtable();
for (int i = 0; i < results.Length; i++)
{
SupportClass.CollectionsHelper.AddIfNotContains(ignore, (System.Int32) results[i]);
}
int maxDoc = searcher.MaxDoc();
for (int doc = 0; doc < maxDoc; doc++)
{
if (ignore.Contains((System.Int32) doc))
continue;
Explanation exp = searcher.Explain(q, doc);
Assert.IsNotNull(exp, "Explanation of [[" + d + "]] for #" + doc + " is null");
Assert.AreEqual(0.0f, exp.GetValue(), 0.0f, "Explanation of [[" + d + "]] for #" + doc + " doesn't indicate non-match: " + exp.ToString());
}
}
示例6: CheckNoMatchExplanations
/// <summary>
/// Tests that all documents up to maxDoc which are *not* in the
/// expected result set, have an explanation which indicates that
/// the document does not match
/// </summary>
public static void CheckNoMatchExplanations(Query q, string defaultFieldName, IndexSearcher searcher, int[] results)
{
string d = q.ToString(defaultFieldName);
SortedSet<int?> ignore = new SortedSet<int?>();
for (int i = 0; i < results.Length; i++)
{
ignore.Add(Convert.ToInt32(results[i]));
}
int maxDoc = searcher.IndexReader.MaxDoc();
for (int doc = 0; doc < maxDoc; doc++)
{
if (ignore.Contains(Convert.ToInt32(doc)))
{
continue;
}
Explanation exp = searcher.Explain(q, doc);
Assert.IsNotNull(exp, "Explanation of [[" + d + "]] for #" + doc + " is null");
Assert.IsFalse(exp.IsMatch, "Explanation of [[" + d + "]] for #" + doc + " doesn't indicate non-match: " + exp.ToString());
}
}
示例7: DoCheckHits
/// <summary>
/// Tests that a query matches the an expected set of documents using Hits.
///
/// <p>
/// Note that when using the Hits API, documents will only be returned
/// if they have a positive normalized score.
/// </p> </summary>
/// <param name="query"> the query to test </param>
/// <param name="searcher"> the searcher to test the query against </param>
/// <param name="defaultFieldName"> used for displaing the query in assertion messages </param>
/// <param name="results"> a list of documentIds that must match the query </param>
/// <seealso cref= #checkHitCollector </seealso>
public static void DoCheckHits(Random random, Query query, string defaultFieldName, IndexSearcher searcher, int[] results)
{
ScoreDoc[] hits = searcher.Search(query, 1000).ScoreDocs;
SortedSet<int?> correct = new SortedSet<int?>();
for (int i = 0; i < results.Length; i++)
{
correct.Add(Convert.ToInt32(results[i]));
}
SortedSet<int?> actual = new SortedSet<int?>();
for (int i = 0; i < hits.Length; i++)
{
actual.Add(Convert.ToInt32(hits[i].Doc));
}
Assert.AreEqual(correct, actual, query.ToString(defaultFieldName));
QueryUtils.Check(random, query, searcher, LuceneTestCase.Rarely(random));
}
示例8: ExplanationAsserter
public ExplanationAsserter(Query q, string defaultFieldName, IndexSearcher s, bool deep)
{
this.q = q;
this.s = s;
this.d = q.ToString(defaultFieldName);
this.Deep = deep;
}
示例9: SearchEx
public static List<SearchRecord> SearchEx(out Query query,out Dictionary<string,List<int>> statistics)
{
List<SearchRecord> recordList = new List<SearchRecord>();
query = GetQuery();
statistics = new Dictionary<string,List<int>>();
try
{
if (searchIndexList.Count > 0)
{
foreach (IndexSet indexSet in searchIndexList)
{
if (indexSet.Type == IndexTypeEnum.Increment)
continue;
Query theQuery = GetQuery(indexSet);
Source source = indexDict[indexSet];
Dictionary<string, IndexField> fpDict = source.FieldDict;
//IndexSearcher searcher = new IndexSearcher(indexSet.Path);
IndexSearcher presearcher = new IndexSearcher(indexSet.Path);
ParallelMultiSearcher searcher = new ParallelMultiSearcher(new IndexSearcher[] { presearcher });
#if DEBUG
System.Console.WriteLine(query.ToString());
#endif
TopDocs topDocs = searcher.Search(theQuery.Weight(searcher), null, searchSet.MaxMatches);
ScoreDoc[] scoreDocs = topDocs.scoreDocs;
List<int> posList = new List<int>();
for (int i = 0; i < scoreDocs.Length; i++)
{
Document doc = searcher.Doc(scoreDocs[i].doc);
float score = scoreDocs[i].score;
if (score < searchSet.MinScore)
continue;
Field[] fields = new Field[doc.GetFields().Count];
doc.GetFields().CopyTo(fields, 0);
List<SearchField> sfList = new List<SearchField>();
foreach (Field field in fields)
{
if (fpDict.ContainsKey(field.Name()))
sfList.Add(new SearchField(field, fpDict[field.Name()]));
else
sfList.Add(new SearchField(field));
}
recordList.Add(new SearchRecord(indexSet, sfList, indexDict[indexSet].PrimaryKey, score));
posList.Add(recordList.Count - 1);
}
try
{
statistics.Add(indexSet.Caption, posList);
}
catch (Exception)
{
int i = 2;
while (statistics.ContainsKey(indexSet.Caption + i.ToString()))
i++;
statistics.Add(indexSet.Caption + i.ToString(), posList);
}
}
}
else
{
foreach (IndexSet indexSet in indexFieldsDict.Keys)
{
if (indexSet.Type == IndexTypeEnum.Increment)
continue;
Query theQuery = GetQuery(indexSet);
Source source = indexDict[indexSet];
Dictionary<string, IndexField> fpDict = source.FieldDict;
//IndexSearcher searcher = new IndexSearcher(indexSet.Path);
IndexSearcher presearcher = new IndexSearcher(indexSet.Path);
ParallelMultiSearcher searcher = new ParallelMultiSearcher(new IndexSearcher[] { presearcher });
#if DEBUG
System.Console.WriteLine(theQuery.ToString());
#endif
TopDocs topDocs = searcher.Search(theQuery.Weight(searcher), null, searchSet.MaxMatches);
ScoreDoc[] scoreDocs = topDocs.scoreDocs;
List<int> posList = new List<int>();
for (int i = 0; i < scoreDocs.Length; i++)
{
Document doc = searcher.Doc(scoreDocs[i].doc);
float score = scoreDocs[i].score;
if (score < searchSet.MinScore)
continue;
Field[] fields = new Field[doc.GetFields().Count];
doc.GetFields().CopyTo(fields, 0);
List<SearchField> sfList = new List<SearchField>();
foreach (Field field in fields)
{
if (fpDict.ContainsKey(field.Name()))
sfList.Add(new SearchField(field, fpDict[field.Name()]));
else
sfList.Add(new SearchField(field));
}
recordList.Add(new SearchRecord(indexSet, sfList, indexDict[indexSet].PrimaryKey, score));
posList.Add(recordList.Count - 1);
}
try
{
statistics.Add(indexSet.Caption, posList);
}
catch (Exception)
{
//.........这里部分代码省略.........
示例10: CheckHitCollector
/// <summary>
/// Tests that a query matches the an expected set of documents using a
/// HitCollector.
///
/// <p>
/// Note that when using the HitCollector API, documents will be collected
/// if they "match" regardless of what their score is.
/// </p> </summary>
/// <param name="query"> the query to test </param>
/// <param name="searcher"> the searcher to test the query against </param>
/// <param name="defaultFieldName"> used for displaying the query in assertion messages </param>
/// <param name="results"> a list of documentIds that must match the query </param>
/// <seealso cref= #checkHits </seealso>
public static void CheckHitCollector(Random random, Query query, string defaultFieldName, IndexSearcher searcher, int[] results)
{
QueryUtils.Check(random, query, searcher);
Trace.TraceInformation("Checked");
SortedSet<int?> correct = new SortedSet<int?>();
for (int i = 0; i < results.Length; i++)
{
correct.Add(Convert.ToInt32(results[i]));
}
SortedSet<int?> actual = new SortedSet<int?>();
Collector c = new SetCollector(actual);
searcher.Search(query, c);
Assert.AreEqual(correct, actual, "Simple: " + query.ToString(defaultFieldName));
for (int i = -1; i < 2; i++)
{
actual.Clear();
IndexSearcher s = QueryUtils.WrapUnderlyingReader(random, searcher, i);
s.Search(query, c);
Assert.AreEqual(correct, actual, "Wrap Reader " + i + ": " + query.ToString(defaultFieldName));
}
}
示例11: CheckEqual
public static void CheckEqual(Query query, Hits hits1, Hits hits2)
{
float scoreTolerance = 1.0e-6f;
if (hits1.Length() != hits2.Length())
{
Assert.Fail("Unequal lengths: hits1=" + hits1.Length() + ",hits2=" + hits2.Length());
}
for (int i = 0; i < hits1.Length(); i++)
{
if (hits1.Id(i) != hits2.Id(i))
{
Assert.Fail("Hit " + i + " docnumbers don't match\n" + Hits2str(hits1, hits2, 0, 0) + "for query:" + query.ToString());
}
if ((hits1.Id(i) != hits2.Id(i)) || System.Math.Abs(hits1.Score(i) - hits2.Score(i)) > scoreTolerance)
{
Assert.Fail("Hit " + i + ", doc nrs " + hits1.Id(i) + " and " + hits2.Id(i) + "\nunequal : " + hits1.Score(i) + "\n and: " + hits2.Score(i) + "\nfor query:" + query.ToString());
}
}
}
示例12: MakeResults
private static string MakeResults(IndexSearcher searcher, TopDocs topDocs, int skip, int take, bool includeExplanation, Query query, long elapsed, IDictionary<string, int> rankings, PackageSearcherManager manager)
{
// note the use of a StringBuilder because we have the response data already formatted as JSON in the fields in the index
StringBuilder strBldr = new StringBuilder();
string timestamp;
if (!searcher.IndexReader.CommitUserData.TryGetValue("commit-time-stamp", out timestamp))
{
timestamp = null;
}
strBldr.AppendFormat("{{\"totalHits\":{0},\"timeTakenInMs\":{1},\"index\":\"{2}\"", topDocs.TotalHits, elapsed, manager.IndexName);
if (!String.IsNullOrEmpty(timestamp))
{
strBldr.AppendFormat(",\"indexTimestamp\":\"{0}\"", timestamp);
}
if (includeExplanation)
{
// JsonConvert.Serialize does escaping and quoting.
strBldr.AppendFormat(",\"executedQuery\":{0}", Newtonsoft.Json.JsonConvert.SerializeObject(query.ToString()));
}
strBldr.Append(",\"data\":[");
bool hasResult = false;
for (int i = skip; i < topDocs.ScoreDocs.Length; i++)
{
ScoreDoc scoreDoc = topDocs.ScoreDocs[i];
Document doc = searcher.Doc(scoreDoc.Doc);
string data = doc.Get("Data");
string id = doc.Get("Id");
NuGet.Versioning.NuGetVersion ngVersion = new Versioning.NuGetVersion(doc.Get("Version"));
if (!String.IsNullOrEmpty(id) && ngVersion != null)
{
Tuple<int,int> countRecord = manager.GetDownloadCount(id,ngVersion.ToNormalizedString());
if (countRecord != null)
{
// Patch the data in to the JSON
JObject parsed = JObject.Parse(data);
parsed["DownloadCount"] = countRecord.Item1;
parsed["PackageRegistration"]["DownloadCount"] = countRecord.Item2;
data = parsed.ToString(Formatting.None);
}
}
if (includeExplanation)
{
data = AddExplanation(searcher, data, query, scoreDoc, rankings);
}
strBldr.Append(data);
strBldr.Append(",");
hasResult = true;
}
if (hasResult)
{
strBldr.Remove(strBldr.Length - 1, 1);
}
strBldr.Append("]}");
string result = strBldr.ToString();
return result;
}
示例13: HighLightSearch
public static List<SearchRecord> HighLightSearch(out Query query, out Dictionary<string,List<int>> statistics)
{
List<SearchRecord> recordList = new List<SearchRecord>();
query = GetQuery();
statistics = new Dictionary<string,List<int>>();
try
{
if (searchIndexList.Count > 0)
{
foreach (IndexSet indexSet in searchIndexList)
{
if (indexSet.Type == IndexTypeEnum.Increment)
continue;
Query theQuery = GetQuery(indexSet);
Source source = indexDict[indexSet];
Dictionary<string, IndexField> fpDict = source.FieldDict;
//IndexSearcher searcher = new IndexSearcher(indexSet.Path);
IndexSearcher presearcher = new IndexSearcher(indexSet.Path);
ParallelMultiSearcher searcher = new ParallelMultiSearcher(new IndexSearcher[] { presearcher });
#if DEBUG
System.Console.WriteLine(query.ToString());
#endif
Highlighter highlighter = new Highlighter(new QueryScorer(theQuery));
highlighter.SetTextFragmenter(new SimpleFragmenter(SupportClass.FRAGMENT_SIZE));
TopDocs topDocs = searcher.Search(theQuery.Weight(searcher), null, searchSet.MaxMatches);
ScoreDoc[] scoreDocs = topDocs.scoreDocs;
List<int> posList = new List<int>();
for (int i = 0; i < scoreDocs.Length; i++)
{
float score = scoreDocs[i].score;
if (score < searchSet.MinScore)
continue;
Document doc = searcher.Doc(scoreDocs[i].doc);
Field[] fields = new Field[doc.GetFields().Count];
doc.GetFields().CopyTo(fields, 0);
List<SearchField> sfList = new List<SearchField>();
foreach (Field field in fields)
{
string key = field.Name();
string value = field.StringValue();
string output = SupportClass.String.DropHTML(value);
TokenStream tokenStream = analyzer.TokenStream(key, new System.IO.StringReader(output));
string result = "";
result = highlighter.GetBestFragment(tokenStream, output);
if (result != null && string.IsNullOrEmpty(result.Trim()) == false)
{
if (fpDict.ContainsKey(key))
sfList.Add(new SearchField(key, fpDict[key].Caption, value, result, field.GetBoost(), fpDict[key].IsTitle, true, fpDict[key].Order));
else
sfList.Add(new SearchField(key, key, value, result, field.GetBoost(), false, false, 0));
}
else
{
if (fpDict.ContainsKey(key))
sfList.Add(new SearchField(key, fpDict[key].Caption, value, value, field.GetBoost(), fpDict[key].IsTitle, true, fpDict[key].Order));
else
sfList.Add(new SearchField(key, key, value, result, field.GetBoost(), false, false, 0));
}
}
recordList.Add(new SearchRecord(indexSet, sfList, indexDict[indexSet].PrimaryKey, score));
posList.Add(recordList.Count - 1);
}
try
{
statistics.Add(indexSet.Caption, posList);
}
catch (Exception)
{
int i = 2;
while (statistics.ContainsKey(indexSet.Caption + i.ToString()))
i++;
statistics.Add(indexSet.Caption + i.ToString(), posList);
}
}
}
else
{
foreach (IndexSet indexSet in indexFieldsDict.Keys)
{
if (indexSet.Type == IndexTypeEnum.Increment)
continue;
Query theQuery = GetQuery(indexSet);
Source source = indexDict[indexSet];
Dictionary<string, IndexField> fpDict = source.FieldDict;
//IndexSearcher searcher = new IndexSearcher(indexSet.Path);
IndexSearcher presearcher = new IndexSearcher(indexSet.Path);
ParallelMultiSearcher searcher = new ParallelMultiSearcher(new IndexSearcher[] { presearcher });
#if DEBUG
System.Console.WriteLine(query.ToString());
#endif
Highlighter highlighter = new Highlighter(new QueryScorer(theQuery));
highlighter.SetTextFragmenter(new SimpleFragmenter(SupportClass.FRAGMENT_SIZE));
TopDocs topDocs = searcher.Search(theQuery.Weight(searcher), null, searchSet.MaxMatches);
ScoreDoc[] scoreDocs = topDocs.scoreDocs;
List<int> posList=new List<int>();
for (int i = 0; i < scoreDocs.Length; i++)
{
float score = scoreDocs[i].score;
if (score < searchSet.MinScore)
continue;
//.........这里部分代码省略.........
示例14: ExactFastSearch
public static List<SearchRecord> ExactFastSearch(out Query query)
{
List<SearchRecord> docList = new List<SearchRecord>();
query = null;
try
{
List<IndexReader> readerList = new List<IndexReader>();
foreach (IndexSet indexSet in searchIndexList)
{
if (indexSet.Type == IndexTypeEnum.Increment)
continue;
readerList.Add(IndexReader.Open(indexSet.Path));
}
MultiReader multiReader = new MultiReader(readerList.ToArray());
IndexSearcher searcher = new IndexSearcher(multiReader);
query = GetQuery();
#if DEBUG
System.Console.WriteLine(query.ToString());
#endif
TopDocs topDocs = searcher.Search(query.Weight(searcher), null, searchSet.MaxMatches);
ScoreDoc[] scoreDocs = topDocs.scoreDocs;
for (int i = 0; i < scoreDocs.Length; i++)
{
Document doc = searcher.Doc(scoreDocs[i].doc);
float score = scoreDocs[i].score;
if (score < searchSet.MinScore)
continue;
docList.Add(doc);
}
}
catch (Exception e)
{
SupportClass.FileUtil.WriteToLog(SupportClass.LogPath, e.StackTrace.ToString());
}
return docList;
}
示例15: AssertSubsetOfSameScores
private void AssertSubsetOfSameScores(Query q, TopDocs top1, TopDocs top2)
{
// The constrained query
// should be a subset to the unconstrained query.
if (top2.TotalHits > top1.TotalHits)
{
Assert.Fail("Constrained results not a subset:\n" + CheckHits.TopdocsString(top1, 0, 0) + CheckHits.TopdocsString(top2, 0, 0) + "for query:" + q.ToString());
}
for (int hit = 0; hit < top2.TotalHits; hit++)
{
int id = top2.ScoreDocs[hit].Doc;
float score = top2.ScoreDocs[hit].Score;
bool found = false;
// find this doc in other hits
for (int other = 0; other < top1.TotalHits; other++)
{
if (top1.ScoreDocs[other].Doc == id)
{
found = true;
float otherScore = top1.ScoreDocs[other].Score;
// check if scores match
Assert.AreEqual(score, otherScore, CheckHits.ExplainToleranceDelta(score, otherScore), "Doc " + id + " scores don't match\n" + CheckHits.TopdocsString(top1, 0, 0) + CheckHits.TopdocsString(top2, 0, 0) + "for query:" + q.ToString());
}
}
// check if subset
if (!found)
{
Assert.Fail("Doc " + id + " not found\n" + CheckHits.TopdocsString(top1, 0, 0) + CheckHits.TopdocsString(top2, 0, 0) + "for query:" + q.ToString());
}
}
}