本文整理汇总了C#中Lexer.GetTokens方法的典型用法代码示例。如果您正苦于以下问题:C# Lexer.GetTokens方法的具体用法?C# Lexer.GetTokens怎么用?C# Lexer.GetTokens使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lexer
的用法示例。
在下文中一共展示了Lexer.GetTokens方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: GenerateAst
/// <summary>
/// Generates the abstract syntax tree based on the token list of the lexer
/// </summary>
/// <param name="rdp">Parser that will generate the AST</param>
/// <param name="lexer">Lexer containing the token list</param>
/// <returns></returns>
private static Node GenerateAst(RecursiveDescentParser rdp, Lexer.Lexer lexer)
{
rdp.SetTokens(lexer.GetTokens());
var parentNode = rdp.ParseTokens();
if (rdp.HasTokensRemaining)
{
throw new UnexpectedTokenException(
$"Reached end of parse with tokens remaining! {rdp.NextToken.StringRepresentation} on line {rdp.NextToken.LineNumber}");
}
return parentNode;
}
示例2: Compile
internal static DetailLogger Compile(string filePath, Encoding encoding, out LayeKit kit)
{
filePath = Path.GetFullPath(filePath);
if (!File.Exists(filePath))
throw new ArgumentException("filePath");
if (compiledKits.ContainsKey(filePath))
{
kit = compiledKits[filePath];
return null;
}
var log = new DetailLogger();
var lexer = new Lexer(log);
var tokens = lexer.GetTokens(filePath, encoding);
if (log.ErrorCount != 0)
throw new CompilerException(log);
var parser = new Parser(log);
var ast = parser.GenerateAST(tokens);
if (log.ErrorCount != 0)
throw new CompilerException(log);
var compiler = new KitCompiler(log, Path.GetFileName(filePath));
ast.Visit(compiler);
if (log.ErrorCount != 0)
throw new CompilerException(log);
var proto = compiler.GetPrototype();
kit = new LayeKit(Directory.GetParent(filePath).FullName, proto);
compiledKits[filePath] = kit;
return log;
}
示例3: ParseTokens
private static void ParseTokens(string sourceString, Lexer.Lexer lexer)
{
// we are treating comments as preprocessor commands so we strip them here
sourceString = lexer.StripComments(sourceString);
// we are now ready to parse the tokens from the text.
var lineCount = 1; // used to track the line number, 1 indexed
// Iterate through all lines of the code and extract tokens from them.
foreach (var line in sourceString.Split('\n').Select(line => line.Trim()))
{
var subLine = line;
while (subLine.Length != 0)
{
// this call pops off a token and returns the remaining string
// token that was removed is added to the lexer's token selection
subLine = lexer.ParseToken(subLine, lineCount);
}
// a line has been parsed, so remember we are working with the next line
lineCount ++;
}
// after tokens are extracted, print the list of tokens
foreach (var token in lexer.GetTokens())
{
Console.Write($"[{token.StringRepresentation}], ");
}
}