当前位置: 首页>>代码示例>>C#>>正文


C# Tokenizer类代码示例

本文整理汇总了C#中Tokenizer的典型用法代码示例。如果您正苦于以下问题:C# Tokenizer类的具体用法?C# Tokenizer怎么用?C# Tokenizer使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


Tokenizer类属于命名空间,在下文中一共展示了Tokenizer类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: Read

        public override void Read(Tokenizer t)
        {
            char? c = t.ConsumeChar();

            switch(c){
                case Chars.CHARACTER_TABULATION:
                case Chars.LINE_FEED:
                case Chars.FORM_FEED:
                case Chars.SPACE:
                    t.ChangeTokenState<BeforeAttributeNameState>();
                    return;
                case Chars.SOLIDUS:
                    t.ChangeTokenState<SelfClosingStartTagState>();
                    return;
                case Chars.GREATER_THAN_SIGN:
                    t.ChangeTokenState<DataState>();
                    t.EmitToken();
                    return;
                case null:
                    OnMessageRaised(new SuddenlyEndAtAttributeError());
                    t.UnConsume(1);
                    t.ChangeTokenState<DataState>();
                    return;
                default:
                    OnMessageRaised(new MissingSpaceAfterAttributeValueError(c));
                    t.UnConsume(1);
                    t.ChangeTokenState<BeforeAttributeNameState>();
                    return;
            }
        }
开发者ID:bakera,项目名称:Test,代码行数:30,代码来源:AfterAttributeValueQuotedState.cs

示例2: ReadHeaders

 internal static Dictionary<string, string> ReadHeaders(Tokenizer reader)
 {
     var s = BeginReadHeaders(reader);
     var result = new Dictionary<string, string>();
     ReadHeaders(reader, s, (key, value) => result[key] = value.TrimStart(new char[0]));
     return result;
 }
开发者ID:kevinobee,项目名称:Sitecore.Serialization.Checker,代码行数:7,代码来源:SerializationUtils.cs

示例3: FirstTest

        public void FirstTest()
        {
            var luaFile = TestUtils.GetTestPath(@"lua-5.2.1-tests\literals.lua");

            var engine = Lua.CreateEngine();
            var context = Lua.GetLuaContext(engine);
            var unit = context.CreateFileUnit(luaFile);
            var reader = TestUtils.OpenReaderOrIgnoreTest(unit.GetReader);
            Console.WriteLine("Reading data from {0}", new Uri(luaFile));

            var tokenizer = new Tokenizer(ErrorSink.Default, new LuaCompilerOptions() { SkipFirstLine = true });
            tokenizer.Initialize(null, reader, unit, SourceLocation.MinValue);

            var fname = @"C:\tmp\tokenizer.txt";
            using (var fout = File.CreateText(fname))
            {
                foreach (var token in tokenizer.EnumerateTokens().TakeWhile(t => t.Symbol != Symbol.Eof))
                {
                    if (token.Symbol == Symbol.Whitespace)
                        continue;
                    if (token.Symbol == Symbol.EndOfLine)
                        continue;

                    fout.Write("{0,-12}", token.Symbol);
                    fout.Write("{0,-10}", token.Span.Start);
                    fout.Write("{0,-10}", token.Span.End);
                    fout.Write("{0}", token.Lexeme);

                    fout.WriteLine();
                }
            }
            Console.WriteLine("Written results to {0}", new Uri(fname));
        }
开发者ID:SPARTAN563,项目名称:IronLua,代码行数:33,代码来源:TokeniserTests.cs

示例4: Read

        public override void Read(Tokenizer t)
        {
            char? c = t.ConsumeChar();
            switch(c){
                case Chars.HYPHEN_MINUS:
                    t.ChangeTokenState<CommentEndState>();
                    return;
                case Chars.NULL:
                    OnMessageRaised(new NullInCommentError());
                    t.CurrentCommentToken.Append(Chars.HYPHEN_MINUS);
                    t.CurrentCommentToken.Append(Chars.REPLACEMENT_CHARACTER);

                    t.ChangeTokenState<CommentState>();
                    return;
                case null:
                    OnMessageRaised(new SuddenlyEndAtCommentError());
                    t.EmitToken();
                    t.UnConsume(1);
                    t.ChangeTokenState<DataState>();
                    return;
                default:
                    t.CurrentCommentToken.Append(Chars.HYPHEN_MINUS);
                    t.CurrentCommentToken.Append(c);
                    t.ChangeTokenState<CommentState>();
                    return;
            }
        }
开发者ID:bakera,项目名称:Test,代码行数:27,代码来源:CommentEndDashState.cs

示例5: Parse

 public ITag Parse(string tag)
 {
     var tokenizer = new Tokenizer(tag, true, true, null, TagLibConstants.SEPERATORS, TagLibConstants.LITERALS, null);
     var helper = new ParseHelper(tokenizer);
     helper.Init();
     return Construct(helper,_factory.GetNewLocator()).Parse();
 }
开发者ID:rslijp,项目名称:sharptiles,代码行数:7,代码来源:TagLibParserFactory.cs

示例6: TokenizeOneLongOption

 public void TokenizeOneLongOption()
 {
     var tokenizer = new Tokenizer("--alpha 123");
     AssertTokenEqual(TokenKind.LongOption, "alpha", tokenizer.NextToken);
     AssertTokenEqual(TokenKind.Value, "123", tokenizer.NextToken);
     Assert.IsTrue(tokenizer.AtEnd);
 }
开发者ID:goldshtn,项目名称:msos,代码行数:7,代码来源:TokenizerTests.cs

示例7: Read

        public override void Read(Tokenizer t)
        {
            char? c = t.ConsumeChar();

            if(c.IsLatinCapitalLetter()){
                t.TemporaryBuffer += c.ToLower();
                t.EmitToken(c);
                return;
            } else if(c.IsLatinSmallLetter()){
                t.TemporaryBuffer += c;
                t.EmitToken(c);
                return;
            }

            switch(c){
                case Chars.CHARACTER_TABULATION:
                case Chars.LINE_FEED:
                case Chars.FORM_FEED:
                case Chars.SPACE:
                case Chars.SOLIDUS:
                case Chars.GREATER_THAN_SIGN:
                    if(t.TemporaryBuffer.Equals("script", StringComparison.InvariantCulture)){
                        t.ChangeTokenState<ScriptDataDoubleEscapedState>();
                    } else {
                        t.ChangeTokenState<ScriptDataEscapedState>();
                    }
                    t.EmitToken(c);
                    return;
                default:
                    t.UnConsume(1);
                    t.ChangeTokenState<ScriptDataEscapedState>();
                    return;
            }
        }
开发者ID:bakera,项目名称:Test,代码行数:34,代码来源:ScriptDataDoubleEscapeStartState.cs

示例8: CreatingSimpleTree

        public void CreatingSimpleTree()
        {
            Tokenizer tokenizer = new Tokenizer(s_errorHandler, true);
            List<Token> tokens = tokenizer.process(File.OpenText("code4.txt"));

            Parser parser = new Parser(tokens, s_errorHandler);
            parser.process();

            //Console.WriteLine("Tree: " + parser.getAST().toStringTree());
            ASTPainter p = new ASTPainter();
            p.PaintAST(parser.getAST());

            AST root = parser.getAST();
            Assert.AreEqual(Token.TokenType.PROGRAM_ROOT, root.getTokenType());

            AST statementList = root.getChild(0);
            Assert.AreEqual(Token.TokenType.STATEMENT_LIST, statementList.getTokenType());

            AST multiplicationTree = statementList.getChild(1);
            Assert.AreEqual(Token.TokenType.OPERATOR, multiplicationTree.getTokenType());

            AST operand1 = multiplicationTree.getChild(0);
            AST operand2 = multiplicationTree.getChild(1);
            Assert.AreEqual("a", operand1.getTokenString());
            Assert.AreEqual("b", operand2.getTokenString());
        }
开发者ID:substans,项目名称:Sprak,代码行数:26,代码来源:Parser_TEST.cs

示例9: Read

 public override void Read(Tokenizer t)
 {
     char? c = t.ConsumeChar();
     switch(c){
         case Chars.GREATER_THAN_SIGN:
             OnMessageRaised(new EmptyEndTagError());
             t.ChangeTokenState<DataState>();
             return;
         case null:
             OnMessageRaised(new SuddenlyEndAtTagError());
             t.EmitToken(Chars.LESS_THAN_SIGN);
             t.EmitToken(Chars.SOLIDUS);
             t.UnConsume(1);
             t.ChangeTokenState<DataState>();
             return;
     }
     if(c.IsLatinCapitalLetter()){
         t.CurrentToken = new EndTagToken(){Name = c.ToLower().ToString()};
         t.ChangeTokenState<TagNameState>();
         return;
     } else if(c.IsLatinSmallLetter()){
         t.CurrentToken = new EndTagToken(){Name = c.ToString()};
         t.ChangeTokenState<TagNameState>();
         return;
     }
     OnMessageRaised(new UnknownEndTagError());
     t.ChangeTokenState<BogusCommentState>();
     return;
 }
开发者ID:bakera,项目名称:Test,代码行数:29,代码来源:EndTagOpenState.cs

示例10: CheckDuplicateAttribute

 private void CheckDuplicateAttribute(Tokenizer t)
 {
     if(t.CurrentTagToken.IsDuplicateAttribute){
         t.CurrentTagToken.DropAttribute();
         OnMessageRaised(new DuplicateAttributeError(t.CurrentTagToken.CurrentAttribute.Name));
     }
 }
开发者ID:bakera,项目名称:Test,代码行数:7,代码来源:AttributeNameState.cs

示例11: Read

 public override void Read(Tokenizer t)
 {
     char? c = t.ConsumeChar();
     switch(c){
         case Chars.EXCLAMATION_MARK:
             t.ChangeTokenState<MarkupDeclarationOpenState>();
             return;
         case Chars.SOLIDUS:
             t.ChangeTokenState<EndTagOpenState>();
             return;
         case Chars.QUESTION_MARK:
             OnMessageRaised(new ProcessingInstructionError());
             t.ChangeTokenState<BogusCommentState>();
             return;
     }
     if(c.IsLatinCapitalLetter()){
         t.CurrentToken = new StartTagToken(){Name = c.ToLower().ToString()};
         t.ChangeTokenState<TagNameState>();
         return;
     } else if(c.IsLatinSmallLetter()){
         t.CurrentToken = new StartTagToken(){Name = c.ToString()};
         t.ChangeTokenState<TagNameState>();
         return;
     }
     OnMessageRaised(new UnknownMarkupError());
     t.UnConsume(1);
     t.ChangeTokenState<DataState>();
     t.EmitToken(Chars.LESS_THAN_SIGN);
     return;
 }
开发者ID:bakera,项目名称:Test,代码行数:30,代码来源:TagOpenState.cs

示例12: Read

        public override void Read(Tokenizer t)
        {
            char? c = t.ConsumeChar();

            switch(c){
                case Chars.CHARACTER_TABULATION:
                case Chars.LINE_FEED:
                case Chars.FORM_FEED:
                case Chars.SPACE:
                    return;
                case Chars.GREATER_THAN_SIGN:{
                    t.ChangeTokenState<DataState>();
                    t.EmitToken();
                    return;
                }
                case null:
                    OnMessageRaised(new SuddenlyEndAtDoctypeError());
                    ((DoctypeToken)t.CurrentToken).ForceQuirks = true;
                    t.UnConsume(1);
                    t.ChangeTokenState<DataState>();
                    t.EmitToken();
                    return;
                default:
                    OnMessageRaised(new UnknownIdentifierAfterDoctypeError());
                    t.ChangeTokenState<BogusDoctypeState>();
                    return;
            }
        }
开发者ID:bakera,项目名称:Test,代码行数:28,代码来源:AfterDoctypeSystemIdentifierState.cs

示例13: IsMatchImpl

        protected override Token IsMatchImpl( Tokenizer tokenizer )
        {
            foreach( var character in Match )
            {
                if( tokenizer.Current == character.ToString( CultureInfo.InvariantCulture ) )
                {
                    tokenizer.Consume();
                }
                else
                {
                    return null;
                }
            }

            bool found;

            if( !AllowAsSubString )
            {
                var next = tokenizer.Current;

                found = string.IsNullOrWhiteSpace( next ) || SpecialCharacters.Any( character => character.Match == next );
            }
            else
            {
                found = true;
            }

            if( found )
            {
                return new Token( myTokenType, Match );
            }

            return null;
        }
开发者ID:JackWangCUMT,项目名称:Plainion.GraphViz,代码行数:34,代码来源:MatchKeyword.cs

示例14: GeneratorTest

        public void GeneratorTest(SourceUnit sourceUnit, bool useLua52)
        {
            var options = new LuaCompilerOptions()
            {
                SkipFirstLine = true,
                UseLua52Features = useLua52,
            };

            var reader = TestUtils.OpenReaderOrIgnoreTest(sourceUnit.GetReader);

            TestUtils.AssertSyntaxError(() =>
            {
                var tokenizer = new Tokenizer(ErrorSink.Default, options);
                tokenizer.Initialize(null, reader, sourceUnit, SourceLocation.MinValue);
                var parser = new Parser(tokenizer, tokenizer.ErrorSink, options);
                var ast = parser.Parse();
                Assert.That(ast, Is.Not.Null);

                var codeContext = new CodeContext((LuaContext)sourceUnit.LanguageContext);

                var gen = new Generator(codeContext);
                var expr = gen.Compile(ast, sourceUnit);
                Assert.That(expr, Is.Not.Null);
            });
        }
开发者ID:SPARTAN563,项目名称:IronLua,代码行数:25,代码来源:GeneratorTests.cs

示例15: TestTokenizerBasicWhitespaceSeparatedStringsWithWhitespaceIncluded

        public void TestTokenizerBasicWhitespaceSeparatedStringsWithWhitespaceIncluded()
        {
            var input = "one two      three \n\n    \t   four\t\t\tfive\t\t\tsix";
            Tokenizer<Token> tokenizer = new Tokenizer<Token>();
            tokenizer.WhitespaceBehavior = WhitespaceBehavior.DelimitAndInclude;
            var tokens = tokenizer.Tokenize(input);

            var reconstructed = "";
            bool? lastTokenWasWhitespace = null;
            Token lastToken = null;
            foreach (var token in tokens)
            {
                if(token.Value == null)
                {
                    Assert.Fail("Unexpected null valued token");
                }
                else if(string.IsNullOrWhiteSpace(token.Value))
                {
                    lastTokenWasWhitespace = true;
                }
                else
                {
                    if(lastTokenWasWhitespace.HasValue && lastTokenWasWhitespace.Value == false)
                    {
                        Assert.Fail("2 consecutive non-whitespace tokens encountered.");
                    }
                    lastTokenWasWhitespace = false;
                }

                reconstructed += token.Value;
                lastToken = token;
            }

            Assert.AreEqual(input, reconstructed);
        }
开发者ID:abbottdev,项目名称:PowerArgs,代码行数:35,代码来源:TokenizerTests.cs


注:本文中的Tokenizer类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。