本文整理汇总了C#中Tokenizer.NextToken方法的典型用法代码示例。如果您正苦于以下问题:C# Tokenizer.NextToken方法的具体用法?C# Tokenizer.NextToken怎么用?C# Tokenizer.NextToken使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Tokenizer
的用法示例。
在下文中一共展示了Tokenizer.NextToken方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: BlankStringReturnsEof
public void BlankStringReturnsEof()
{
var tokenizer = new Tokenizer(" ");
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)), "First Call");
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)), "Second Call");
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)), "Third Call");
}
示例2: StringWithDoubleQuotes
public void StringWithDoubleQuotes()
{
var tokenizer = new Tokenizer("\"string at start\" \"may contain ' char\" \"string at end\"");
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.String, "string at start")));
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.String, "may contain ' char")));
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.String, "string at end")));
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)));
}
示例3: StringWithSingleQuotes
public void StringWithSingleQuotes()
{
var tokenizer = new Tokenizer("'string at start' 'may contain \" char' 'string at end'");
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.String, "string at start")));
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.String, "may contain \" char")));
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.String, "string at end")));
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)));
}
示例4: WordsInUnicode
public void WordsInUnicode()
{
var tokenizer = new Tokenizer("method == Здравствуйте");
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Word, "method")));
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "==")));
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Word, "Здравствуйте")));
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)));
}
示例5: IdentifierTokens
public void IdentifierTokens()
{
var tokenizer = new Tokenizer(" Identifiers x abc123 a1x ");
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Word, "Identifiers")));
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Word, "x")));
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Word, "abc123")));
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Word, "a1x")));
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)));
}
示例6: LineCommentEof
public void LineCommentEof() {
using (TestStringReader reader = new TestStringReader("0 -- /* don't /*** 'do ***/ this */ 0")) {
Tokenizer tokenizer = new Tokenizer(reader, grammar);
Token token;
Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
Assert.Equal(SymbolKind.Terminal, token.Symbol.Kind);
Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
Assert.Equal(SymbolKind.Noise, token.Symbol.Kind);
Assert.Equal(ParseMessage.BlockRead, tokenizer.NextToken(out token));
Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
Assert.Equal(SymbolKind.End, token.Symbol.Kind);
}
}
示例7: BlockCommentWithUnclosedString
public void BlockCommentWithUnclosedString() {
using (TestStringReader reader = new TestStringReader("/* don't */ 'do this'")) {
Tokenizer tokenizer = new Tokenizer(reader, grammar);
Token token;
Assert.Equal(ParseMessage.BlockRead, tokenizer.NextToken(out token));
Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
Assert.Equal(SymbolKind.Noise, token.Symbol.Kind);
Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
Assert.Equal(SymbolKind.Terminal, token.Symbol.Kind);
Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
Assert.Equal(SymbolKind.End, token.Symbol.Kind);
}
}
示例8: ClosingBrace
private void ClosingBrace(Tokenizer.Tokenizer tokenizer)
{
AbstractToken token = tokenizer.NextToken();
if (token.GetType() == typeof(ClosingBrace))
{
return;
}
throw new ParseException("Expected type ClosingBrace but found: " + token.GetType() + " after " + tokenizer.GetPreviousCharacters(25));
}
示例9: TokenizerOutput
private void TokenizerOutput()
{
string testString = "";
testString = File.ReadAllText(Environment.CurrentDirectory + @"\ExampleTextFiles\SampleInputFileForTokenization.txt");
if (string.IsNullOrEmpty(testString))
return;
using (var t = new StringReader(testString))
{
var tokenizer = new Tokenizer(new ParseReader(t));
var currentToken = tokenizer.NextToken();
while (currentToken.GetText() != "$")
{
Console.WriteLine(currentToken.GetType() + " " + currentToken.GetText());
currentToken = tokenizer.NextToken();
}
}
Console.ReadKey();
}
示例10: Comma
private void Comma(Tokenizer.Tokenizer tokenizer, Boolean optional = false)
{
AbstractToken token = tokenizer.Peek();
if (token.GetType() == typeof(Comma))
{
tokenizer.NextToken();
return;
}
if (optional)
{
return;
}
throw new ParseException("Expected type Comma but found: " + token.GetType() + " after " + tokenizer.GetPreviousCharacters(25));
}
示例11: CheckLexicalErrorOnEnd
public void CheckLexicalErrorOnEnd() {
using (TestStringReader reader = new TestStringReader("'")) {
Tokenizer tokenizer = new Tokenizer(reader, grammar);
Token token;
Assert.Equal(ParseMessage.LexicalError, tokenizer.NextToken(out token));
Assert.Equal(SymbolKind.Error, token.Symbol.Kind);
Assert.Equal("'", token.Text);
Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
Assert.Equal(SymbolKind.End, token.Symbol.Kind);
}
}
示例12: EndOfDataWithUnfinishedTerminal
public void EndOfDataWithUnfinishedTerminal() {
using (TestStringReader reader = new TestStringReader("0 'zero")) {
Tokenizer tokenizer = new Tokenizer(reader, grammar);
Token token;
Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
Assert.Equal(SymbolKind.Terminal, token.Symbol.Kind);
Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
Assert.Equal(SymbolKind.Noise, token.Symbol.Kind);
Assert.Equal(ParseMessage.LexicalError, tokenizer.NextToken(out token));
}
}
示例13: split
/// <summary> Splits the given composite string into an array of components using the given
/// delimiter.
/// </summary>
public static System.String[] split(System.String composite, System.String delim)
{
System.Collections.ArrayList components = new System.Collections.ArrayList();
//defend against evil nulls
if (composite == null)
composite = "";
if (delim == null)
delim = "";
Tokenizer tok = new Tokenizer(composite, delim, false);
bool previousTokenWasDelim = true;
while (tok.HasMoreTokens)
{
System.String thisTok = tok.NextToken();
if (thisTok.Equals(delim))
{
if (previousTokenWasDelim)
components.Add(null);
previousTokenWasDelim = true;
}
else
{
components.Add(thisTok);
previousTokenWasDelim = false;
}
}
System.String[] ret = new System.String[components.Count];
for (int i = 0; i < components.Count; i++)
{
ret[i] = ((System.String) components[i]);
}
return ret;
}
示例14: ParseOperatorExpression
/// <summary>
/// Handles infix operators. Should not be called from anyplace but ParseExpression.
/// Does not currently handle unary or right-associative operators.
/// </summary>
/// <param name="lhs">Value of the left-hand side argument to the operator.</param>
/// <param name="tokens">Token stream</param>
/// <param name="minPrecedence">The precedence of any infix operator of which this is the rhs.</param>
/// <returns>Value of the parsed expression</returns>
private object ParseOperatorExpression(object lhs, Tokenizer tokens, int minPrecedence)
{
while (!tokens.EndOfTokens && IsBinaryOperator(tokens.PeekToken()) && Precedence(tokens.PeekToken()) >= minPrecedence)
{
string op = tokens.NextToken();
object rhs = this.ParsePrimary(tokens);
while (!tokens.EndOfTokens && IsBinaryOperator(tokens.PeekToken())
&& Precedence(tokens.PeekToken()) > Precedence(op))
{
rhs = this.ParseOperatorExpression(rhs, tokens, Precedence(tokens.PeekToken()));
}
lhs = ApplyOperator(op, lhs, rhs);
}
return lhs;
}
示例15: SymbolTokens_DoubleChar
public void SymbolTokens_DoubleChar()
{
var tokenizer = new Tokenizer("==&&||!==~!~");
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "==")));
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "&&")));
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "||")));
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "!=")));
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "=~")));
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "!~")));
Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)));
}