本文整理汇总了C#中Antlr3.Tool.Grammar.GetTokenType方法的典型用法代码示例。如果您正苦于以下问题:C# Grammar.GetTokenType方法的具体用法?C# Grammar.GetTokenType怎么用?C# Grammar.GetTokenType使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Antlr3.Tool.Grammar
的用法示例。
在下文中一共展示了Grammar.GetTokenType方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: ImportTokenVocabulary
/** Pull your token definitions from an existing grammar in memory.
* You must use Grammar() ctor then this method then setGrammarContent()
* to make this work. This was useful primarily for testing and
* interpreting grammars until I added import grammar functionality.
* When you import a grammar you implicitly import its vocabulary as well
* and keep the same token type values.
*
* Returns the max token type found.
*/
public virtual int ImportTokenVocabulary( Grammar importFromGr )
{
var importedTokenIDs = importFromGr.TokenIDs;
foreach ( string tokenID in importedTokenIDs )
{
int tokenType = importFromGr.GetTokenType( tokenID );
composite.MaxTokenType = Math.Max( composite.MaxTokenType, tokenType );
if ( tokenType >= Label.MIN_TOKEN_TYPE )
{
//[email protected]("import token from grammar "+tokenID+"="+tokenType);
DefineToken( tokenID, tokenType );
}
}
return composite.MaxTokenType; // return max found
}
示例2: checkSymbols
//throws Exception
protected void checkSymbols( Grammar g,
string rulesStr,
string tokensStr )
{
var tokens = g.GetTokenDisplayNames();
// make sure expected tokens are there
//StringTokenizer st = new StringTokenizer( tokensStr, ", " );
//while ( st.hasMoreTokens() )
foreach ( string tokenName in tokensStr.Split( new string[] { ", " }, StringSplitOptions.RemoveEmptyEntries ) )
{
//String tokenName = st.nextToken();
Assert.IsTrue(g.GetTokenType(tokenName) != Label.INVALID, "token " + tokenName + " expected");
tokens.Remove( tokenName );
}
// make sure there are not any others (other than <EOF> etc...)
foreach ( string tokenName in tokens )
{
Assert.IsTrue( g.GetTokenType( tokenName ) < Label.MIN_TOKEN_TYPE, "unexpected token name " + tokenName );
}
// make sure all expected rules are there
//st = new StringTokenizer( rulesStr, ", " );
int n = 0;
//while ( st.hasMoreTokens() )
foreach ( string ruleName in rulesStr.Split( new string[] { ", " }, StringSplitOptions.RemoveEmptyEntries ) )
{
//String ruleName = st.nextToken();
Assert.IsNotNull(g.GetRule(ruleName), "rule " + ruleName + " expected");
n++;
}
var rules = g.Rules;
//System.out.println("rules="+rules);
// make sure there are no extra rules
Assert.AreEqual(n, rules.Count, "number of rules mismatch; expecting " + n + "; found " + rules.Count);
}