本文整理汇总了C#中antlr.getLine方法的典型用法代码示例。如果您正苦于以下问题:C# antlr.getLine方法的具体用法?C# antlr.getLine怎么用?C# antlr.getLine使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类antlr
的用法示例。
在下文中一共展示了antlr.getLine方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: ToLexicalInfo
protected LexicalInfo ToLexicalInfo(antlr.IToken token)
{
int line = token.getLine();
int startColumn = token.getColumn();
int endColumn = token.getColumn() + token.getText().Length;
String filename = token.getFilename();
return new LexicalInfo(filename, line, startColumn, endColumn);
}
示例2: OnParserError
void OnParserError(antlr.RecognitionException error)
{
var location = new LexicalInfo(error.getFilename(), error.getLine(), error.getColumn());
var nvae = error as antlr.NoViableAltException;
if (null != nvae)
ParserError(location, nvae);
else
GenericParserError(location, error);
}
示例3: ToSourceLocation
public static SourceLocation ToSourceLocation(antlr.IToken token)
{
return new SourceLocation(token.getLine(), token.getColumn());
}
示例4: ToLexicalInfo
public static LexicalInfo ToLexicalInfo(antlr.IToken token)
{
return new LexicalInfo(token.getFilename(), token.getLine(), token.getColumn());
}
示例5: ToEndSourceLocation
public static SourceLocation ToEndSourceLocation(antlr.IToken token)
{
return new SourceLocation(token.getLine(), token.getColumn() + token.getText().Length - 1);
}
示例6: ToEndSourceLocation
public static SourceLocation ToEndSourceLocation(antlr.IToken token)
{
string text = token.getText() ?? "";
return new SourceLocation(token.getLine(), token.getColumn() + text.Length - 1);
}
示例7: OnParserError
void OnParserError(antlr.RecognitionException error)
{
LexicalInfo data = new LexicalInfo(error.getFilename(), error.getLine(), error.getColumn());
antlr.NoViableAltException nvae = error as antlr.NoViableAltException;
if (null != nvae)
{
ParserError(data, nvae);
}
else
{
_context.Errors.Add(CompilerErrorFactory.GenericParserError(data, error));
}
}
示例8: CreateToken
antlr.IToken CreateToken(antlr.IToken prototype, int newTokenType, string newTokenText)
{
return new BooToken(newTokenType, newTokenText,
prototype.getFilename(),
prototype.getLine(),
prototype.getColumn()+SafeGetLength(prototype.getText()));
}
示例9: FlushBuffer
void FlushBuffer(antlr.IToken token)
{
if (0 == _buffer.Length) return;
string text = _buffer.ToString();
string[] lines = text.Split(NewLineCharArray);
if (lines.Length > 1)
{
string lastLine = lines[lines.Length-1];
// Protect against mixed indentation issues
if (String.Empty != lastLine) {
if (null == _expectedIndent) {
_expectedIndent = lastLine.Substring(0, 1);
}
if (String.Empty != lastLine.Replace(_expectedIndent, String.Empty))
{
string literal = _expectedIndent == "\t"
? "tabs"
: _expectedIndent == "\f"
? "form feeds" // The lexer allows them :p
: "spaces";
throw new TokenStreamRecognitionException(
new RecognitionException(
"Mixed indentation, expected the use of " + literal,
token.getFilename(),
token.getLine(),
// Point exactly to the first invalid char
lastLine.Length - lastLine.TrimStart(_expectedIndent[0]).Length + 1
)
);
}
}
if (lastLine.Length > CurrentIndentLevel)
{
EnqueueIndent(token);
_indentStack.Push(lastLine.Length);
}
else if (lastLine.Length < CurrentIndentLevel)
{
EnqueueEOS(token);
do
{
EnqueueDedent();
_indentStack.Pop();
}
while (lastLine.Length < CurrentIndentLevel);
}
else
{
EnqueueEOS(token);
}
}
}
示例10: CalculateEndpoint
private Tuple<int, int> CalculateEndpoint(antlr.IToken token, int endLine, int endIndex, int delimiterLength)
{
var startIndex = positionMap[token.getLine() - 1][token.getColumn() - 1];
var startLine = token.getLine() - 1;
if (startLine > endLine || startLine == endLine && startIndex > endIndex)
whitespaces.Add(new TextSpan { iStartLine = endLine, iStartIndex = endIndex, iEndLine = startLine, iEndIndex = startIndex });
endLine = startLine - 1;
endIndex = 0;
var runningIndex = startIndex + delimiterLength;
foreach (var part in token.getText().Split(new[] { "\r\n" }, StringSplitOptions.None))
{
endLine++;
endIndex = runningIndex + part.Length;
runningIndex = 0;
}
endIndex += delimiterLength;
//endIndex = positionMap[endLine][endIndex];
var cluster = new MappedToken(
startLine * lineSize + startIndex,
endIndex - startIndex);
if (tokenMap.Count > 0
&& tokenMap[tokenMap.Count() - 1].Index >= cluster.Index)
throw new ArgumentException("Token Mapping order");
tokenMap.Add(cluster);
return new Tuple<int, int>(endLine, endIndex);
}