本文整理汇总了C#中Tokenizer.Initialize方法的典型用法代码示例。如果您正苦于以下问题:C# Tokenizer.Initialize方法的具体用法?C# Tokenizer.Initialize怎么用?C# Tokenizer.Initialize使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Tokenizer
的用法示例。
在下文中一共展示了Tokenizer.Initialize方法的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: FirstTest
public void FirstTest()
{
var luaFile = TestUtils.GetTestPath(@"lua-5.2.1-tests\literals.lua");
var engine = Lua.CreateEngine();
var context = Lua.GetLuaContext(engine);
var unit = context.CreateFileUnit(luaFile);
var reader = TestUtils.OpenReaderOrIgnoreTest(unit.GetReader);
Console.WriteLine("Reading data from {0}", new Uri(luaFile));
var tokenizer = new Tokenizer(ErrorSink.Default, new LuaCompilerOptions() { SkipFirstLine = true });
tokenizer.Initialize(null, reader, unit, SourceLocation.MinValue);
var fname = @"C:\tmp\tokenizer.txt";
using (var fout = File.CreateText(fname))
{
foreach (var token in tokenizer.EnumerateTokens().TakeWhile(t => t.Symbol != Symbol.Eof))
{
if (token.Symbol == Symbol.Whitespace)
continue;
if (token.Symbol == Symbol.EndOfLine)
continue;
fout.Write("{0,-12}", token.Symbol);
fout.Write("{0,-10}", token.Span.Start);
fout.Write("{0,-10}", token.Span.End);
fout.Write("{0}", token.Lexeme);
fout.WriteLine();
}
}
Console.WriteLine("Written results to {0}", new Uri(fname));
}
示例2: ParserErrorReportTests
public void ParserErrorReportTests(string luaFile, bool useLua52)
{
var options = new LuaCompilerOptions()
{
SkipFirstLine = true,
UseLua52Features = useLua52,
};
var engine = Lua.CreateEngine();
var context = Lua.GetLuaContext(engine);
var sourceUnit = context.CreateFileUnit(luaFile);
//var reader = TestUtils.OpenReaderOrIgnoreTest(() => File.OpenText(luaFile));
var reader = TestUtils.OpenReaderOrIgnoreTest(sourceUnit.GetReader);
var tokenizer = new Tokenizer(ErrorSink.Default, options);
tokenizer.Initialize(null, reader, sourceUnit, SourceLocation.MinValue);
var parser = new Parser(tokenizer, tokenizer.ErrorSink, options);
TestUtils.AssertSyntaxError(() =>
{
var ast = parser.Parse();
Assert.That(ast, Is.Not.Null);
});
}
示例3: GeneratorTest
public void GeneratorTest(SourceUnit sourceUnit, bool useLua52)
{
var options = new LuaCompilerOptions()
{
SkipFirstLine = true,
UseLua52Features = useLua52,
};
var reader = TestUtils.OpenReaderOrIgnoreTest(sourceUnit.GetReader);
TestUtils.AssertSyntaxError(() =>
{
var tokenizer = new Tokenizer(ErrorSink.Default, options);
tokenizer.Initialize(null, reader, sourceUnit, SourceLocation.MinValue);
var parser = new Parser(tokenizer, tokenizer.ErrorSink, options);
var ast = parser.Parse();
Assert.That(ast, Is.Not.Null);
var codeContext = new CodeContext((LuaContext)sourceUnit.LanguageContext);
var gen = new Generator(codeContext);
var expr = gen.Compile(ast, sourceUnit);
Assert.That(expr, Is.Not.Null);
});
}
示例4: Test1
static void Test1()
{
Tokenizer tokenizer = new Tokenizer(TextReader.Null);
//tokenizer.Initialize(new StringReader("EOT;\r\n"), LexicalStates.ST_HEREDOC, true);
//tokenizer.hereDocLabel = "EOT";
//tokenizer.Initialize(new StringReader("/*\r\n*/"), LexicalStates.ST_IN_SCRIPTING, true);
//tokenizer.hereDocLabel = null;
//tokenizer.Initialize(new StringReader("$x = 1; ###\r\n $y = 2;"), LexicalStates.ST_IN_SCRIPTING, true);
//tokenizer.hereDocLabel = null;
//tokenizer.Initialize(new StringReader("<? $x = array(); ?>"), LexicalStates.INITIAL, true);
//tokenizer.hereDocLabel = null;
// 111111111
// 0123456789012345678
//string s = "echo 'aě'.'řa'.'x';";
string s = "echo 'asdě' . 'řčřžý' . 'موقع للأخبا' . 'האתר' . 'as';";
//string s = "echo 'abřc'.'e'/*xx\n\nyy\nxxx */;";
byte[] buffer = new byte[1000];
byte[] b = Encoding.UTF8.GetBytes(s);
Stream stream = new MemoryStream(b);
tokenizer.Initialize(new StreamReader(stream), PHP.Core.Parsers.Lexer.LexicalStates.ST_IN_SCRIPTING, true);
//int b_start = 0;
//int b_end = -1;
//int b_length = 0;
Tokens token;
for (; ; )
{
token = tokenizer.GetNextToken();
// check byte position matches:
//b_length = tokenizer.GetTokenByteLength(Encoding.UTF8);
//b_start = b_end + 1;
//b_end += b_length;
//// check binary positions:
//long pos = stream.Position;
//stream.Seek(b_start, SeekOrigin.Begin);
//stream.Read(buffer, 0, b_length);
//stream.Seek(pos, SeekOrigin.Begin);
//Debug.Assert(String.CompareOrdinal(Encoding.UTF8.GetString(buffer, 0, b_length), tokenizer.TokenText) == 0);
if (token == Tokens.EOF) break;
//// check position:
//Assert.AreEqual(s.Substring(tokenizer.token_start_pos.Char, tokenizer.TokenLength), tokenizer.TokenText);
////
//Console.WriteLine("{0} '{1}' ({2}..{3}]", token, tokenizer.TokenText, tokenizer.token_start_pos.Char, tokenizer.token_end_pos.Char);
}
}
示例5: RunLexerOnLuaTestSuiteFile
public void RunLexerOnLuaTestSuiteFile(string luaFile, bool useLua52)
{
var options = new LuaCompilerOptions()
{
SkipFirstLine = true,
UseLua52Features = useLua52,
};
var engine = Lua.CreateEngine();
var context = Lua.GetLuaContext(engine);
var unit = context.CreateFileUnit(luaFile);
var reader = TestUtils.OpenReaderOrIgnoreTest(unit.GetReader);
var tokenizer = new Tokenizer(ErrorSink.Default, options);
var sw = new Stopwatch();
sw.Start();
tokenizer.Initialize(null, reader, unit, SourceLocation.MinValue);
int counter = tokenizer.EnumerateTokens()
.TakeWhile(t => t.Symbol != Symbol.Eof)
.Count();
sw.Stop();
Console.WriteLine("Tokenizer run: {0} ms, {1} tokens", sw.ElapsedMilliseconds, counter);
}
示例6: TokenizeLine
private LineTokenization TokenizeLine(Tokenizer tokenizer, ITextSnapshot snapshot, object previousLineState, int lineNo)
{
ITextSnapshotLine line = snapshot.GetLineFromLineNumber(lineNo);
SnapshotSpan lineSpan = new SnapshotSpan(snapshot, line.Start, line.LengthIncludingLineBreak);
var tcp = new SnapshotSpanSourceCodeReader(lineSpan);
tokenizer.Initialize(previousLineState, tcp, new SourceLocation(line.Start.Position, lineNo + 1, 1));
var tokens = tokenizer.ReadTokens(lineSpan.Length).ToArray();
return new LineTokenization(tokens, tokenizer.CurrentState);
}
示例7: CompileString
static Func<dynamic> CompileString(CodeContext context, string source)
{
ContractUtils.RequiresNotNull(context, "context");
var sourceUnit = context.Language.CreateSnippet(source, SourceCodeKind.Statements);
//var options = (LuaCompilerOptions)context.GetCompilerOptions();
//var errorSink = context.GetCompilerErrorSink();
//var lexer = context.GetService<TokenizerService>();
var lexer = new Tokenizer(ErrorSink.Default, LuaCompilerOptions.Default);
lexer.Initialize(null, sourceUnit.GetReader(), sourceUnit, SourceLocation.MinValue);
var parser = new Parser(lexer, lexer.ErrorSink);
var ast = parser.Parse();
var gen = new Generator(context);
var fnStack =context.FunctionStacks.LastOrDefault(x => x.ExecScope != null);
if (fnStack == default(FunctionStack))
fnStack = new FunctionStack(context, null, LuaScope.CreateRoot(context), "=(compiled code)");
var expr = gen.CompileInline(ast, fnStack.ExecScope, context.ExecutingScopeStorage, sourceUnit);
return expr.Compile();
}
示例8: CreateParser
public static Parser CreateParser(TextReader reader, PythonLanguageVersion version, ParserOptions parserOptions) {
if (reader == null) {
throw new ArgumentNullException("reader");
}
var options = parserOptions ?? ParserOptions.Default;
Parser parser = null;
var tokenizer = new Tokenizer(
version, options.ErrorSink,
(options.Verbatim ? TokenizerOptions.Verbatim : TokenizerOptions.None) | TokenizerOptions.GroupingRecovery,
(span, text) => options.RaiseProcessComment(parser, new CommentEventArgs(span, text)));
tokenizer.Initialize(null, reader, SourceLocation.MinValue);
tokenizer.IndentationInconsistencySeverity = options.IndentationInconsistencySeverity;
parser = new Parser(
tokenizer,
options.ErrorSink ?? ErrorSink.Null,
version,
options.Verbatim,
options.BindReferences,
options.PrivatePrefix
) { _sourceReader = reader };
return parser;
}
示例9: CreateParser
public static Parser CreateParser(TextReader reader, JLanguageVersion version, ParserOptions parserOptions)
{
if (reader == null) {
throw new ArgumentNullException("reader");
}
var options = parserOptions ?? ParserOptions.Default;
Tokenizer tokenizer = new Tokenizer(version, options.ErrorSink, (options.Verbatim ? TokenizerOptions.Verbatim : TokenizerOptions.None) | TokenizerOptions.GroupingRecovery);
tokenizer.Initialize(null, reader, SourceLocation.MinValue);
tokenizer.IndentationInconsistencySeverity = options.IndentationInconsistencySeverity;
Parser result = new Parser(tokenizer,
options.ErrorSink ?? ErrorSink.Null,
version,
options.Verbatim,
options.BindReferences,
options.PrivatePrefix
);
result._sourceReader = reader;
return result;
}
示例10: LexerErrorReportTests
public void LexerErrorReportTests(string snippet, string expect)
{
bool mustfail = TestContext.CurrentContext.Test.Properties.Contains("FailureCase");
var tokenizer = new Tokenizer(ErrorSink.Default, new LuaCompilerOptions() { SkipFirstLine = true });
var sourceUnit = engine.GetLuaContext().CreateSnippet(snippet, SourceCodeKind.Expression);
tokenizer.Initialize(null, sourceUnit.GetReader(), sourceUnit, SourceLocation.MinValue);
try
{
var unused = tokenizer.EnumerateTokens(s => true) // all tokens
.TakeWhile(t => t.Symbol != Symbol.Eof)
.Last();
if (mustfail)
Assert.Fail("Expected a SyntaxErrorException");
}
catch (SyntaxErrorException ex)
{
Assert.That(ex.Message, Is.EqualTo(expect));
}
}
示例11: TestOneString
private static List<TokenWithSpan> TestOneString(PythonLanguageVersion version, TokenizerOptions optionSet, string originalText) {
StringBuilder output = new StringBuilder();
var tokenizer = new Tokenizer(version, options: optionSet);
tokenizer.Initialize(new StringReader(originalText));
Token token;
int prevOffset = 0;
List<TokenWithSpan> tokens = new List<TokenWithSpan>();
while ((token = tokenizer.GetNextToken()) != Tokens.EndOfFileToken) {
tokens.Add(new TokenWithSpan(token, tokenizer.TokenSpan));
output.Append(tokenizer.PreceedingWhiteSpace);
output.Append(token.VerbatimImage);
const int contextSize = 50;
for (int i = prevOffset; i < originalText.Length && i < output.Length; i++) {
if (originalText[i] != output[i]) {
// output some context
StringBuilder x = new StringBuilder();
StringBuilder y = new StringBuilder();
StringBuilder z = new StringBuilder();
for (int j = Math.Max(0, i - contextSize); j < Math.Min(Math.Min(originalText.Length, output.Length), i + contextSize); j++) {
x.AppendRepr(originalText[j]);
y.AppendRepr(output[j]);
if (j == i) {
z.Append("^");
} else {
z.Append(" ");
}
}
Console.WriteLine("Mismatch context at {0}:", i);
Console.WriteLine("Original: {0}", x.ToString());
Console.WriteLine("New : {0}", y.ToString());
Console.WriteLine("Differs : {0}", z.ToString());
Console.WriteLine("Token : {0}", token);
Assert.AreEqual(originalText[i], output[i], String.Format("Characters differ at {0}, got {1}, expected {2}", i, output[i], originalText[i]));
}
}
prevOffset = output.Length;
}
output.Append(tokenizer.PreceedingWhiteSpace);
Assert.AreEqual(originalText.Length, output.Length);
return tokens;
}
示例12: ScanString
internal static ExpressionAst ScanString(string str)
{
string str1 = string.Concat((char)34, str.Replace("\"", "\"\""), (char)34);
Parser parser = new Parser();
Tokenizer tokenizer = new Tokenizer(parser);
tokenizer.Initialize(null, str1, null);
StringExpandableToken stringExpandableToken = (StringExpandableToken)tokenizer.NextToken();
ExpressionAst expressionAst = parser.ExpandableStringRule(stringExpandableToken);
if (!parser._errorList.Any<ParseError>())
{
return expressionAst;
}
else
{
throw new ParseException(parser._errorList.ToArray());
}
}
示例13: CompileString
static Func<dynamic> CompileString(LuaContext context, string source)
{
ContractUtils.RequiresNotNull(context, "context");
var sourceUnit = context.CreateSnippet(source, SourceCodeKind.Statements);
//var options = (LuaCompilerOptions)context.GetCompilerOptions();
//var errorSink = context.GetCompilerErrorSink();
//var lexer = context.GetService<TokenizerService>();
var lexer = new Tokenizer(ErrorSink.Default, LuaCompilerOptions.Default);
lexer.Initialize(null, sourceUnit.GetReader(), sourceUnit, SourceLocation.MinValue);
var parser = new Parser(lexer, lexer.ErrorSink);
var ast = parser.Parse();
var gen = new Generator(context);
var expr = gen.CompileInline(ast, context.Trace.CurrentEvaluationScope.GetRoot(), context.Trace.CurrentScopeStorage, sourceUnit);
return expr.Compile();
}