本文整理汇总了C#中Lucene.Net.Search.Searcher.Explain方法的典型用法代码示例。如果您正苦于以下问题:C# Searcher.Explain方法的具体用法?C# Searcher.Explain怎么用?C# Searcher.Explain使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Search.Searcher
的用法示例。
在下文中一共展示了Searcher.Explain方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: CheckNoMatchExplanations
public static float EXPLAIN_SCORE_TOLERANCE_DELTA = 0.00025f; // {{See: LUCENENET-288}} Intentional diversion from Java Lucene per above comment
/// <summary> Tests that all documents up to maxDoc which are *not* in the
/// expected result set, have an explanation which indicates no match
/// (ie: Explanation value of 0.0f)
/// </summary>
public static void CheckNoMatchExplanations(Query q, System.String defaultFieldName, Searcher searcher, int[] results)
{
System.String d = q.ToString(defaultFieldName);
System.Collections.Hashtable ignore = new System.Collections.Hashtable();
for (int i = 0; i < results.Length; i++)
{
SupportClass.CollectionsHelper.AddIfNotContains(ignore, (System.Int32) results[i]);
}
int maxDoc = searcher.MaxDoc();
for (int doc = 0; doc < maxDoc; doc++)
{
if (ignore.Contains((System.Int32) doc))
continue;
Explanation exp = searcher.Explain(q, doc);
Assert.IsNotNull(exp, "Explanation of [[" + d + "]] for #" + doc + " is null");
Assert.AreEqual(0.0f, exp.GetValue(), 0.0f, "Explanation of [[" + d + "]] for #" + doc + " doesn't indicate non-match: " + exp.ToString());
}
}
示例2: assertHits
/// <summary> Checks to see if the hits are what we expected.
///
/// </summary>
/// <param name="query">the query to execute
/// </param>
/// <param name="description">the description of the search
/// </param>
/// <param name="expectedIds">the expected document ids of the hits
/// </param>
/// <param name="expectedScores">the expected scores of the hits
///
/// </param>
/// <throws> IOException </throws>
protected internal static void assertHits(Searcher s, Query query, System.String description, System.String[] expectedIds, float[] expectedScores)
{
QueryUtils.Check(query, s);
float tolerance = 1e-5f;
// Hits hits = searcher.search(query);
// hits normalizes and throws things off if one score is greater than 1.0
TopDocs topdocs = s.Search(query, null, 10000);
/***
// display the hits
System.out.println(hits.length() + " hits for search: \"" + description + '\"');
for (int i = 0; i < hits.length(); i++) {
System.out.println(" " + FIELD_ID + ':' + hits.doc(i).get(FIELD_ID) + " (score:" + hits.score(i) + ')');
}
*****/
// did we get the hits we expected
Assert.AreEqual(expectedIds.Length, topdocs.TotalHits);
for (int i = 0; i < topdocs.TotalHits; i++)
{
//System.out.println(i + " exp: " + expectedIds[i]);
//System.out.println(i + " field: " + hits.doc(i).get(FIELD_ID));
int id = topdocs.ScoreDocs[i].Doc;
float score = topdocs.ScoreDocs[i].Score;
Document doc = s.Doc(id);
Assert.AreEqual(expectedIds[i], doc.Get(FIELD_ID));
bool scoreEq = System.Math.Abs(expectedScores[i] - score) < tolerance;
if (!scoreEq)
{
System.Console.Out.WriteLine(i + " warning, expected score: " + expectedScores[i] + ", actual " + score);
System.Console.Out.WriteLine(s.Explain(query, id));
}
Assert.AreEqual(expectedScores[i], score, tolerance);
Assert.AreEqual(s.Explain(query, id).Value, score, tolerance);
}
}