本文整理汇总了C#中Lucene.Net.Analysis.Token.GetPositionIncrement方法的典型用法代码示例。如果您正苦于以下问题:C# Token.GetPositionIncrement方法的具体用法?C# Token.GetPositionIncrement怎么用?C# Token.GetPositionIncrement使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Analysis.Token
的用法示例。
在下文中一共展示了Token.GetPositionIncrement方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: GetTokenPositioner
public override TokenPositioner GetTokenPositioner(Token token)
{
return
token.GetPositionIncrement() == 0
? TokenPositioner.NewRow
: TokenPositioner.NewColumn;
}
开发者ID:kstenson,项目名称:NHibernate.Search,代码行数:7,代码来源:TwoDimensionalNonWeightedSynonymTokenSettingsCodec.cs
示例2: SetCurrentToken
private void SetCurrentToken(Token token)
{
if (token == null) return;
ClearAttributes();
_termAtt.SetTermBuffer(token.TermBuffer(), 0, token.TermLength());
_posIncrAtt.SetPositionIncrement(token.GetPositionIncrement());
_flagsAtt.SetFlags(token.GetFlags());
_offsetAtt.SetOffset(token.StartOffset(), token.EndOffset());
_typeAtt.SetType(token.Type());
_payloadAtt.SetPayload(token.GetPayload());
}
示例3: Next
/// <summary> Returns the next input Token whose termText() is not a stop word.</summary>
public override Token Next(Token result)
{
// return the first non-stop word found
int skippedPositions = 0;
while ((result = input.Next(result)) != null)
{
if (!stopWords.Contains(result.TermBuffer(), 0, result.termLength))
{
if (enablePositionIncrements)
{
result.SetPositionIncrement(result.GetPositionIncrement() + skippedPositions);
}
return result;
}
skippedPositions += result.GetPositionIncrement();
}
// reached EOS -- return null
return null;
}
示例4: TestPerformance
public virtual void TestPerformance()
{
int[] tokCount = new int[]{100, 500, 1000, 2000, 5000, 10000};
int[] modCounts = new int[]{1, 2, 5, 10, 20, 50, 100, 200, 500};
for (int k = 0; k < tokCount.Length; k++)
{
System.Text.StringBuilder buffer = new System.Text.StringBuilder();
System.Console.Out.WriteLine("-----Tokens: " + tokCount[k] + "-----");
for (int i = 0; i < tokCount[k]; i++)
{
buffer.Append(English.IntToEnglish(i).ToUpper()).Append(' ');
}
//make sure we produce the same tokens
ModuloSinkTokenizer sink = new ModuloSinkTokenizer(this, tokCount[k], 100);
Token next = new Token();
TokenStream result = new TeeTokenFilter(new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), sink);
while ((next = result.Next(next)) != null)
{
}
result = new ModuloTokenFilter(this, new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), 100);
next = new Token();
System.Collections.IList tmp = new System.Collections.ArrayList();
while ((next = result.Next(next)) != null)
{
tmp.Add(next.Clone());
}
System.Collections.IList sinkList = sink.GetTokens();
Assert.IsTrue(tmp.Count == sinkList.Count, "tmp Size: " + tmp.Count + " is not: " + sinkList.Count);
for (int i = 0; i < tmp.Count; i++)
{
Token tfTok = (Token) tmp[i];
Token sinkTok = (Token) sinkList[i];
Assert.IsTrue(tfTok.TermText().Equals(sinkTok.TermText()) == true, tfTok.TermText() + " is not equal to " + sinkTok.TermText() + " at token: " + i);
}
//simulate two fields, each being analyzed once, for 20 documents
for (int j = 0; j < modCounts.Length; j++)
{
int tfPos = 0;
long start = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
for (int i = 0; i < 20; i++)
{
next = new Token();
result = new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString())));
while ((next = result.Next(next)) != null)
{
tfPos += next.GetPositionIncrement();
}
next = new Token();
result = new ModuloTokenFilter(this, new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), modCounts[j]);
while ((next = result.Next(next)) != null)
{
tfPos += next.GetPositionIncrement();
}
}
long finish = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
System.Console.Out.WriteLine("ModCount: " + modCounts[j] + " Two fields took " + (finish - start) + " ms");
int sinkPos = 0;
//simulate one field with one sink
start = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
for (int i = 0; i < 20; i++)
{
sink = new ModuloSinkTokenizer(this, tokCount[k], modCounts[j]);
next = new Token();
result = new TeeTokenFilter(new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), sink);
while ((next = result.Next(next)) != null)
{
sinkPos += next.GetPositionIncrement();
}
//System.out.println("Modulo--------");
result = sink;
while ((next = result.Next(next)) != null)
{
sinkPos += next.GetPositionIncrement();
}
}
finish = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
System.Console.Out.WriteLine("ModCount: " + modCounts[j] + " Tee fields took " + (finish - start) + " ms");
Assert.IsTrue(sinkPos == tfPos, sinkPos + " does not equal: " + tfPos);
}
System.Console.Out.WriteLine("- End Tokens: " + tokCount[k] + "-----");
}
}