本文整理汇总了C#中Lucene.Net.Search.Weight类的典型用法代码示例。如果您正苦于以下问题:C# Weight类的具体用法?C# Weight怎么用?C# Weight使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
Weight类属于Lucene.Net.Search命名空间,在下文中一共展示了Weight类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: MatchAllScorer
internal MatchAllScorer(MatchAllDocsQuery enclosingInstance, IndexReader reader, Similarity similarity, Weight w, byte[] norms):base(similarity)
{
InitBlock(enclosingInstance);
this.termDocs = reader.TermDocs(null);
score = w.Value;
this.norms = norms;
}
示例2: Hits
internal Hits(Searcher s, Query q, Filter f)
{
weight = q.Weight(s);
searcher = s;
filter = f;
GetMoreDocs(50); // retrieve 100 initially
}
示例3: ExactPhraseScorer
internal ExactPhraseScorer(Weight weight, PhraseQuery.PostingsAndFreq[] postings, Similarity.SimScorer docScorer)
: base(weight)
{
this.DocScorer = docScorer;
ChunkStates = new ChunkState[postings.Length];
EndMinus1 = postings.Length - 1;
// min(cost)
Cost_Renamed = postings[0].Postings.Cost();
for (int i = 0; i < postings.Length; i++)
{
// Coarse optimization: advance(target) is fairly
// costly, so, if the relative freq of the 2nd
// rarest term is not that much (> 1/5th) rarer than
// the first term, then we just use .nextDoc() when
// ANDing. this buys ~15% gain for phrases where
// freq of rarest 2 terms is close:
bool useAdvance = postings[i].DocFreq > 5 * postings[0].DocFreq;
ChunkStates[i] = new ChunkState(postings[i].Postings, -postings[i].Position, useAdvance);
if (i > 0 && postings[i].Postings.NextDoc() == DocIdSetIterator.NO_MORE_DOCS)
{
NoDocs = true;
return;
}
}
}
示例4: phraseFreq
private float freq; //prhase frequency in current doc as computed by phraseFreq().
internal PhraseScorer(Weight weight, TermPositions[] tps, int[] offsets, Similarity similarity, byte[] norms):base(similarity)
{
this.norms = norms;
this.weight = weight;
this.value_Renamed = weight.Value;
// convert tps to a list of phrase positions.
// note: phrase-position differs from term-position in that its position
// reflects the phrase offset: pp.pos = tp.pos - offset.
// this allows to easily identify a matching (exact) phrase
// when all PhrasePositions have exactly the same position.
for (int i = 0; i < tps.Length; i++)
{
PhrasePositions pp = new PhrasePositions(tps[i], offsets[i]);
if (last != null)
{
// add next to end of list
last.next = pp;
}
else
{
first = pp;
}
last = pp;
}
pq = new PhraseQueue(tps.Length); // construct empty pq
first.doc = - 1;
}
示例5: DisjunctionScorer
protected internal DisjunctionScorer(Weight weight, Scorer[] subScorers)
: base(weight)
{
this.SubScorers = subScorers;
this.NumScorers = subScorers.Length;
Heapify();
}
示例6: SearchWithFilter
private void SearchWithFilter(IndexReader reader, Weight weight, Filter filter, Collector collector)
{
DocIdSet docIdSet = filter.GetDocIdSet(reader);
if (docIdSet == null)
return;
Scorer scorer = weight.Scorer(reader, true, false);
if (scorer == null)
return;
scorer.DocID();
DocIdSetIterator docIdSetIterator = docIdSet.Iterator();
if (docIdSetIterator == null)
return;
int target = docIdSetIterator.NextDoc();
int num = scorer.Advance(target);
collector.SetScorer(scorer);
while (true)
{
while (num != target)
{
if (num > target)
target = docIdSetIterator.Advance(num);
else
num = scorer.Advance(target);
}
if (num != DocIdSetIterator.NO_MORE_DOCS && !((GroupCollector)collector).GroupLimitReached)
{
collector.Collect(num);
target = docIdSetIterator.NextDoc();
num = scorer.Advance(target);
}
else
break;
}
}
示例7: MatchAllScorer
internal MatchAllScorer(MatchAllDocsQuery enclosingInstance, IndexReader reader, Similarity similarity, Weight w):base(similarity)
{
InitBlock(enclosingInstance);
this.reader = reader;
id = - 1;
maxId = reader.MaxDoc() - 1;
score = w.GetValue();
}
示例8: MatchAllScorer
internal MatchAllScorer(MatchAllDocsQuery outerInstance, IndexReader reader, Bits liveDocs, Weight w, float score)
: base(w)
{
this.OuterInstance = outerInstance;
this.LiveDocs = liveDocs;
this.Score_Renamed = score;
MaxDoc = reader.MaxDoc;
}
示例9: Hits
public /*internal*/ bool debugCheckedForDeletions = false; // for test purposes.
internal Hits(Searcher s, Query q, Filter f)
{
weight = q.Weight(s);
searcher = s;
filter = f;
nDeletions = CountDeletions(s);
GetMoreDocs(50); // retrieve 100 initially
lengthAtStart = length;
}
示例10: TermScorer
/// <summary> Construct a <code>TermScorer</code>.
///
/// </summary>
/// <param name="weight">The weight of the <code>Term</code> in the query.
/// </param>
/// <param name="td">An iterator over the documents matching the <code>Term</code>.
/// </param>
/// <param name="similarity">The <code>Similarity</code> implementation to be used for score
/// computations.
/// </param>
/// <param name="norms">The field norms of the document fields for the <code>Term</code>.
/// </param>
public /*internal*/ TermScorer(Weight weight, TermDocs td, Similarity similarity, byte[] norms):base(similarity)
{
this.weight = weight;
this.termDocs = td;
this.norms = norms;
this.weightValue = weight.GetValue();
for (int i = 0; i < SCORE_CACHE_SIZE; i++)
scoreCache[i] = GetSimilarity().Tf(i) * weightValue;
}
示例11: Search
/// <summary> A search implementation which spans a new thread for each
/// Searchable, waits for each search to complete and merge
/// the results back together.
/// </summary>
public override TopDocs Search(Weight weight, Filter filter, int nDocs)
{
HitQueue hq = new HitQueue(nDocs, false);
int totalHits = 0;
MultiSearcherThread[] msta = new MultiSearcherThread[searchables.Length];
for (int i = 0; i < searchables.Length; i++)
{
// search each searchable
// Assume not too many searchables and cost of creating a thread is by far inferior to a search
msta[i] = new MultiSearcherThread(searchables[i], weight, filter, nDocs, hq, i, starts, "MultiSearcher thread #" + (i + 1));
msta[i].Start();
}
for (int i = 0; i < searchables.Length; i++)
{
try
{
msta[i].Join();
}
catch (System.Threading.ThreadInterruptedException ie)
{
// In 3.0 we will change this to throw
// InterruptedException instead
SupportClass.ThreadClass.Current().Interrupt();
throw new System.SystemException(ie.Message, ie);
}
System.IO.IOException ioe = msta[i].GetIOException();
if (ioe == null)
{
totalHits += msta[i].Hits();
}
else
{
// if one search produced an IOException, rethrow it
throw ioe;
}
}
ScoreDoc[] scoreDocs = new ScoreDoc[hq.Size()];
for (int i = hq.Size() - 1; i >= 0; i--)
// put docs in array
scoreDocs[i] = (ScoreDoc) hq.Pop();
float maxScore = (totalHits == 0)?System.Single.NegativeInfinity:scoreDocs[0].score;
return new TopDocs(totalHits, scoreDocs, maxScore);
}
示例12: Search
public override void Search(Weight weight, Filter filter, Collector collector)
{
if (filter == null)
{
for (int index = 0; index < this.subReaders.Length; ++index)
{
collector.SetNextReader(this.subReaders[index], this.docStarts[index]);
Scorer scorer = weight.Scorer(this.subReaders[index], !collector.AcceptsDocsOutOfOrder, true);
if (scorer != null)
this.SearchWithScorer(this.subReaders[index], weight, scorer, collector);
}
}
else
{
for (int index = 0; index < this.subReaders.Length; ++index)
{
collector.SetNextReader(this.subReaders[index], this.docStarts[index]);
this.SearchWithFilter(this.subReaders[index], weight, filter, collector);
}
}
}
示例13: PhraseScorer
internal PhraseScorer(Weight weight, TermPositions[] tps, int[] positions, Similarity similarity, byte[] norms) : base(similarity)
{
this.norms = norms;
this.weight = weight;
this.value_Renamed = weight.GetValue();
// convert tps to a list
for (int i = 0; i < tps.Length; i++)
{
PhrasePositions pp = new PhrasePositions(tps[i], positions[i]);
if (last != null)
{
// add next to end of list
last.next = pp;
}
else
first = pp;
last = pp;
}
pq = new PhraseQueue(tps.Length); // construct empty pq
}
示例14: CreateSpatialFilterAndWeight
public void CreateSpatialFilterAndWeight(PointRadiusCriterion geoFilter, Filter currentFilter, Weight currentWeight)
{
var spatialContext = SpatialContext.GEO;
var geohashTree = new GeohashPrefixTree(spatialContext, 10);
var strategy = new RecursivePrefixTreeStrategy(geohashTree, geoFilter.FieldName);
var point = spatialContext.MakePoint(geoFilter.Longitude, geoFilter.Latitude);
var spatialArgs = new SpatialArgs(SpatialOperation.Intersects, spatialContext.MakeCircle(point,
DistanceUtils.Dist2Degrees(geoFilter.RadiusKm, DistanceUtils.EARTH_MEAN_RADIUS_KM)));
var circle = spatialContext.MakeCircle(point,
DistanceUtils.Dist2Degrees(geoFilter.RadiusKm, DistanceUtils.EARTH_MEAN_RADIUS_KM));
var circleCells = strategy.GetGrid().GetWorldNode().GetSubCells(circle);
var luceneFilters = new List<Filter>();
if (currentFilter != null)
luceneFilters.Add(currentFilter);
var tempSpatial = strategy.MakeFilter(spatialArgs);
luceneFilters.Add(tempSpatial);
if (geoFilter.Sort != PointRadiusCriterion.SortOption.None)
{
var valueSource = strategy.MakeDistanceValueSource(point);
var funcQ = new FunctionQuery(valueSource);
// this is a bit odd... but boosting the score negatively orders results
if (geoFilter.Sort == PointRadiusCriterion.SortOption.Ascending)
{
funcQ.Boost = -1;
}
spatialWeight = funcQ.CreateWeight(this);
spatialWeight.GetSumOfSquaredWeights();
luceneFilters.Add(new QueryWrapperFilter(currentWeight.Query));
}
spatialFilter = new ChainedFilter(luceneFilters.ToArray(), 1);
}
示例15: Search
/// <summary> A search implementation which executes each
/// <see cref="Searchable"/> in its own thread and waits for each search to complete
/// and merge the results back together.
/// </summary>
public override TopDocs Search(Weight weight, Filter filter, int nDocs)
{
HitQueue hq = new HitQueue(nDocs, false);
object lockObj = new object();
TopDocs[] results = new TopDocs[searchables.Length];
//search each searchable
Parallel.For(0, searchables.Length, (i) => results[i] = MultiSearcherCallableNoSort(ThreadLock.MonitorLock, lockObj, searchables[i], weight, filter,
nDocs, hq, i, starts));
int totalHits = 0;
float maxScore = float.NegativeInfinity;
foreach (TopDocs topDocs in results)
{
totalHits += topDocs.TotalHits;
maxScore = Math.Max(maxScore, topDocs.MaxScore);
}
ScoreDoc[] scoreDocs = new ScoreDoc[hq.Size()];
for (int i = hq.Size() - 1; i >= 0; i--) // put docs in array
scoreDocs[i] = hq.Pop();
return new TopDocs(totalHits, scoreDocs, maxScore);
}