本文整理汇总了C#中Tokenizer.AddTokenMatcher方法的典型用法代码示例。如果您正苦于以下问题:C# Tokenizer.AddTokenMatcher方法的具体用法?C# Tokenizer.AddTokenMatcher怎么用?C# Tokenizer.AddTokenMatcher使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Tokenizer
的用法示例。
在下文中一共展示了Tokenizer.AddTokenMatcher方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: BadTokenPosition
public void BadTokenPosition()
{
Tokenizer tokenizer = new Tokenizer();
tokenizer.AddTokenMatcher(new IntegerLiteralMatcher());
tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());
try
{
tokenizer.Tokenize("5 A");
}
catch (UnknownTokenException ex)
{
Assert.AreEqual(3, ex.Position.Column);
Assert.AreEqual(1, ex.Position.Line);
Assert.AreEqual("A", ex.Token);
}
try
{
tokenizer.Tokenize("5 4\r\n2\r\n X\r\n5");
}
catch (UnknownTokenException ex)
{
Assert.AreEqual(4, ex.Position.Column);
Assert.AreEqual(3, ex.Position.Line);
Assert.AreEqual("X",ex.Token);
}
}
示例2: NumericLiterals
public void NumericLiterals()
{
Tokenizer tokenizer = new Tokenizer();
tokenizer.AddTokenMatcher(new IntegerLiteralMatcher());
tokenizer.AddTokenMatcher(new DecimalLiteralMatcher());
tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());
Token[] tokens;
tokens = tokenizer.Tokenize("10 10.0");
Assert.AreEqual(3,tokens.Length);
Assert.AreEqual("10",tokens[0].Text);
Assert.AreEqual("10.0",tokens[2].Text);
tokens = tokenizer.Tokenize("10m 10ul");
Assert.AreEqual(3, tokens.Length);
Assert.AreEqual("10m", tokens[0].Text);
Assert.AreEqual("10ul", tokens[2].Text);
tokens = tokenizer.Tokenize("10f 10l");
Assert.AreEqual(3, tokens.Length);
Assert.AreEqual("10f", tokens[0].Text);
Assert.AreEqual("10l", tokens[2].Text);
}
示例3: BadToken
public void BadToken()
{
Tokenizer tokenizer = new Tokenizer();
tokenizer.AddTokenMatcher(new IntegerLiteralMatcher());
tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());
tokenizer.Tokenize("5 A");
}
示例4: BadToken
public void BadToken()
{
try
{
Tokenizer tokenizer = new Tokenizer();
tokenizer.AddTokenMatcher(new IntegerLiteralMatcher());
tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());
tokenizer.Tokenize("5 A");
Assert.Fail();
}
catch(UnknownTokenException ex)
{
}
}
示例5: TestStringLiteral
public void TestStringLiteral()
{
Tokenizer tokenizer = new Tokenizer();
tokenizer.AddTokenMatcher(new StringLiteralMatcher());
tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());
tokenizer.AddTokenMatcher(new CharMatcher('+'));
Token[] tokens = tokenizer.Tokenize("\"test1\" + \"test2\"");
Assert.AreEqual(5, tokens.Length);
Assert.AreEqual("\"test1\"", tokens[0].Text);
Assert.AreEqual("\"test2\"", tokens[4].Text);
tokens = tokenizer.Tokenize("\"test1\" + \"test\\\"2\"");
Assert.AreEqual(5, tokens.Length);
Assert.AreEqual("\"test1\"", tokens[0].Text);
Assert.AreEqual("\"test\\\"2\"", tokens[4].Text);
}
示例6: TestFallback
public void TestFallback()
{
ITokenMatcher matcher1 = new CharMatcher('(');
ITokenMatcher matcher2 = new CharMatcher(')');
ITokenMatcher matcher3 = new StringMatcher("(test)");
ITokenMatcher matcher4 = new AnyCharMatcher("abcdefghijklmnopqrstuvwxyz");
Tokenizer tokenizer = new Tokenizer();
tokenizer.AddTokenMatcher(matcher1);
tokenizer.AddTokenMatcher(matcher2);
tokenizer.AddTokenMatcher(matcher3);
tokenizer.AddTokenMatcher(matcher4);
Token[] tokens = tokenizer.Tokenize("(test)(x)");
Assert.AreEqual(4,tokens.Length);
}
示例7: StartsAndEndsWithToken
public void StartsAndEndsWithToken()
{
Tokenizer tokenizer = new Tokenizer();
tokenizer.AddTokenMatcher(new StartsAndEndsWithMatcher("<!--","-->",'"'));
tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());
Token[] tokens;
tokens = tokenizer.Tokenize("<!--test--> <!-- test 2 -->");
Assert.AreEqual(3,tokens.Length);
tokens = tokenizer.Tokenize("<!--test \"-->\"--> <!-- test 2 -->");
Assert.AreEqual(3, tokens.Length);
}
示例8: TestAnySequence
public void TestAnySequence()
{
ITokenMatcher matcher = new CompositeMatcher(
new AnyCharMatcher("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_"),
new SequenceOfAnyCharMatcher("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789")
);
ITokenMatcher alphaMatcher = new SequenceOfAnyCharMatcher("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789");
Tokenizer tokenizer = new Tokenizer();
tokenizer.AddTokenMatcher(matcher);
tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());
tokenizer.AddTokenMatcher(new CharMatcher('.'));
Token[] tokens = tokenizer.Tokenize("Math.Max");
Assert.AreEqual(3,tokens.Length);
Assert.AreEqual("Math",tokens[0].Text);
Assert.AreEqual(".", tokens[1].Text);
Assert.AreEqual("Max", tokens[2].Text);
}