当前位置: 首页>>代码示例>>C#>>正文


C# Lexer.Tokenise方法代码示例

本文整理汇总了C#中Lexer.Tokenise方法的典型用法代码示例。如果您正苦于以下问题:C# Lexer.Tokenise方法的具体用法?C# Lexer.Tokenise怎么用?C# Lexer.Tokenise使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在Lexer的用法示例。


在下文中一共展示了Lexer.Tokenise方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: Lexer_NoMatchingTokens_ShouldThrowException

        public void Lexer_NoMatchingTokens_ShouldThrowException()
        {
            var lexer = new Lexer(Enumerable.Empty<TokenDefinition>());

            var result = lexer.Tokenise("   ").ToArray();

            Assert.Fail("Exception should have been thrown");
        }
开发者ID:RichardAllanBrown,项目名称:OctoBotSharp,代码行数:8,代码来源:LexerTest.cs

示例2: Lexer_EmptyString_ShouldReturnOneTokenOfEnd

        public void Lexer_EmptyString_ShouldReturnOneTokenOfEnd()
        {
            var lexer = new Lexer(Enumerable.Empty<TokenDefinition>());

            var result = lexer.Tokenise("");

            AssertTokens("Empty string should have no tokens", result);
        }
开发者ID:RichardAllanBrown,项目名称:OctoBotSharp,代码行数:8,代码来源:LexerTest.cs

示例3: Lexer_DoesNotReturnIgnoreTokens_ReturnsRightTokens

        public void Lexer_DoesNotReturnIgnoreTokens_ReturnsRightTokens()
        {
            var def = new[] { new TokenDefinition(@"[-+]?\d+", Token.Int, TokenDefinitionSettings.LexerIgnore) };
            var lexer = new Lexer(def);

            var result = lexer.Tokenise("1");

            AssertTokens("Lexer should not return token when it's set to ignore", result);
        }
开发者ID:RichardAllanBrown,项目名称:OctoBotSharp,代码行数:9,代码来源:LexerTest.cs

示例4: Lexer_CanMatchAToken_ShouldReturnAppropriateTokens

        public void Lexer_CanMatchAToken_ShouldReturnAppropriateTokens()
        {
            var def = new[] { new TokenDefinition(@"[-+]?\d+", Token.Int) };
            var lexer = new Lexer(def);

            var result = lexer.Tokenise("4");

            AssertTokens("String with only one int should only have int token", result, Token.Int);
        }
开发者ID:RichardAllanBrown,项目名称:OctoBotSharp,代码行数:9,代码来源:LexerTest.cs

示例5: Lexer_CanHandleMultipleTokenDefs

        public void Lexer_CanHandleMultipleTokenDefs()
        {
            var def = new[]
            {
                new TokenDefinition(@"[-+]?\d+", Token.Int, TokenDefinitionSettings.LexerIgnore),
                new TokenDefinition(@"\(", Token.OpenBracket),
                new TokenDefinition(@"\)", Token.CloseBracket),
            };

            var lexer = new Lexer(def);

            var result = lexer.Tokenise("(3)23(())");
            var expected = new [] { Token.OpenBracket, Token.CloseBracket, Token.OpenBracket, Token.OpenBracket, Token.CloseBracket, Token.CloseBracket };

            AssertTokens("Brackets should be lexed", result, expected);
        }
开发者ID:RichardAllanBrown,项目名称:OctoBotSharp,代码行数:16,代码来源:LexerTest.cs

示例6: Load

		// Given a bunch of raw text, load all nodes that were inside it.
		// You can call this multiple times to append to the collection of nodes,
		// but note that new nodes will replace older ones with the same name.
		// Returns the number of nodes that were loaded.
		public Program Load(string text, Library library, string fileName, Program includeProgram, bool showTokens, bool showParseTree, string onlyConsiderNode) {

			// The final parsed nodes that were in the file we were given
			Dictionary<string, Yarn.Parser.Node> nodes = new Dictionary<string, Parser.Node>();

			// Load the raw data and get the array of node title-text pairs
			var nodeInfos = ParseInput (text);

			int nodesLoaded = 0;

			foreach (NodeInfo nodeInfo in nodeInfos) {

				if (onlyConsiderNode != null && nodeInfo.title != onlyConsiderNode)
					continue;

				// Attempt to parse every node; log if we encounter any errors
				#if CATCH_EXCEPTIONS
				try {
				#endif 
					
					if (nodes.ContainsKey(nodeInfo.title)) {
						throw new InvalidOperationException("Attempted to load a node called "+
							nodeInfo.title+", but a node with that name has already been loaded!");
					}

					var lexer = new Lexer ();
					var tokens = lexer.Tokenise (nodeInfo.title, nodeInfo.text);

					if (showTokens)
						PrintTokenList (tokens);

					var node = new Parser (tokens, library).Parse();

					// If this node is tagged "rawText", then preserve its source
					if (string.IsNullOrEmpty(nodeInfo.tags) == false && 
						nodeInfo.tags.Contains("rawText")) {
						node.source = nodeInfo.text;
					}

					node.name = nodeInfo.title;

					if (showParseTree)
						PrintParseTree(node);

					nodes[nodeInfo.title] = node;

					nodesLoaded++;

				#if CATCH_EXCEPTIONS
				} catch (Yarn.TokeniserException t) {
					// Add file information
					var message = string.Format ("In file {0}: Error reading node {1}: {2}", fileName, nodeInfo.title, t.Message);
					throw new Yarn.TokeniserException (message);
				} catch (Yarn.ParseException p) {
					var message = string.Format ("In file {0}: Error parsing node {1}: {2}", fileName, nodeInfo.title, p.Message);
					throw new Yarn.ParseException (message);
				} catch (InvalidOperationException e) {
					var message = string.Format ("In file {0}: Error reading node {1}: {2}", fileName, nodeInfo.title, e.Message);
					throw new InvalidOperationException (message);
				}
				#endif 


			}

			var compiler = new Yarn.Compiler(fileName);

			foreach (var node in nodes) {
				compiler.CompileNode (node.Value);
			}

			if (includeProgram != null) {
				compiler.program.Include (includeProgram);
			}

			return compiler.program;

		}
开发者ID:thesecretlab,项目名称:YarnSpinner,代码行数:82,代码来源:Loader.cs


注:本文中的Lexer.Tokenise方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。