本文整理汇总了C#中Token.TermBuffer方法的典型用法代码示例。如果您正苦于以下问题:C# Token.TermBuffer方法的具体用法?C# Token.TermBuffer怎么用?C# Token.TermBuffer使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Token
的用法示例。
在下文中一共展示了Token.TermBuffer方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: setToken
private void setToken(Token token)
{
ClearAttributes();
termAtt.SetTermBuffer(token.TermBuffer(), 0, token.TermLength());
flagsAtt.Flags = token.Flags;
typeAtt.Type = token.Type;
offsetAtt.SetOffset(token.StartOffset, token.EndOffset);
posIncAtt.PositionIncrement = token.PositionIncrement;
payloadAtt.Payload = token.Payload;
}
示例2: SetCurrentToken
private void SetCurrentToken(Token token)
{
if (token == null) return;
ClearAttributes();
_termAtt.SetTermBuffer(token.TermBuffer(), 0, token.TermLength());
_posIncrAtt.PositionIncrement = token.PositionIncrement;
_flagsAtt.Flags =token.Flags;
_offsetAtt.SetOffset(token.StartOffset, token.EndOffset);
_typeAtt.Type = token.Type;
_payloadAtt.Payload = token.Payload;
}
示例3: CreateToken
protected Token CreateToken(int offset, int length,
Token prototype)
{
int newStart = prototype.StartOffset + offset;
Token t = prototype.Clone(prototype.TermBuffer(), offset, length, newStart, newStart + length);
t.PositionIncrement = 0;
return t;
}
示例4: DecomposeInternal
protected override void DecomposeInternal(Token token)
{
// Only words longer than minWordSize get processed
if (token.TermLength() < this.minWordSize)
{
return;
}
char[] lowerCaseTermBuffer = MakeLowerCaseCopy(token.TermBuffer());
for (int i = 0; i < token.TermLength() - this.minSubwordSize; ++i)
{
Token longestMatchToken = null;
for (int j = this.minSubwordSize - 1; j < this.maxSubwordSize; ++j)
{
if (i + j > token.TermLength())
{
break;
}
if (dictionary.Contains(lowerCaseTermBuffer, i, j))
{
if (this.onlyLongestMatch)
{
if (longestMatchToken != null)
{
if (longestMatchToken.TermLength() < j)
{
longestMatchToken = CreateToken(i, j, token);
}
}
else
{
longestMatchToken = CreateToken(i, j, token);
}
}
else
{
tokens.AddLast(CreateToken(i, j, token));
}
}
}
if (this.onlyLongestMatch && longestMatchToken != null)
{
tokens.AddLast(longestMatchToken);
}
}
}