本文整理汇总了Python中Lexer.Lexer类的典型用法代码示例。如果您正苦于以下问题:Python Lexer类的具体用法?Python Lexer怎么用?Python Lexer使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Lexer类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_math_symbols
def test_math_symbols():
lexer = Lexer('+ - * /')
assert lexer.next_token() == Token(TokenTypes.ADD)
assert lexer.next_token() == Token(TokenTypes.SUB)
assert lexer.next_token() == Token(TokenTypes.MUL)
assert lexer.next_token() == Token(TokenTypes.DIV)
示例2: test_next_token
def test_next_token():
lexer = Lexer('1+ 3')
assert lexer.next_token() == Token(TokenTypes.INT, 1)
assert lexer.next_token() == Token(TokenTypes.ADD)
assert lexer.next_token() == Token(TokenTypes.INT, 3)
assert lexer.next_token() == Token(TokenTypes.EOF)
示例3: __init__
def __init__(self, parent=None):
"""
Constructor
@param parent parent widget of this lexer
"""
QsciLexerSQL.__init__(self, parent)
Lexer.__init__(self)
示例4: test_skip_whitespace
def test_skip_whitespace():
lexer = Lexer('1 +3 9')
assert lexer.next_token() == Token(TokenTypes.INT, 1)
assert lexer.next_token() == Token(TokenTypes.ADD)
assert lexer.next_token() == Token(TokenTypes.INT, 3)
assert lexer.next_token() == Token(TokenTypes.INT, 9)
assert lexer.next_token() == Token(TokenTypes.EOF)
示例5: getTokens
def getTokens(self, filename):
""" lex file and return list of tokens (tokenizer) """
lexer = Lexer()
try:
tokenizer = lexer.fileScan(filename)
except Exception, inst:
sys.stderr.write(str(inst)+'\n')
sys.exit(1)
示例6: evaluate
def evaluate(self):
#lexer
lexer = Lexer(self.input)
token_list = lexer.lex()
#parser
parser = Parser(token_list)
value = parser.parse()
return value
示例7: testScanNegativeInteger
def testScanNegativeInteger(self):
lexer = Lexer()
lexer.input("def number = -102")
(tokens, errors) = lexer.allTokens()
# assert that no error occured
self.assertEqual(errors, [])
# assert correct token scanning
self.assertEqual(types(tokens), 'DEF ID = INT')
示例8: __init__
def __init__(self, parent=None):
"""
Constructor
@param parent parent widget of this lexer
"""
QsciLexerVHDL.__init__(self, parent)
Lexer.__init__(self)
self.commentString = QString("--")
示例9: testScanVariable
def testScanVariable(self):
lexer = Lexer()
lexer.input("def x = y")
(tokens, errors) = lexer.allTokens()
# assert that no error occured
self.assertEqual(errors, [])
# assert correct token scanning
self.assertEqual(types(tokens), 'DEF ID = ID')
示例10: testScanBooleanTrue
def testScanBooleanTrue(self):
lexer = Lexer()
lexer.input("def boolean = true")
(tokens, errors) = lexer.allTokens()
# assert that no error occured
self.assertEqual(errors, [])
# assert correct token scanning
self.assertEqual(types(tokens), 'DEF ID = BOOL')
示例11: testScanString
def testScanString(self):
lexer = Lexer()
lexer.input("def string = \"hello, world!\"")
(tokens, errors) = lexer.allTokens()
# assert that no error occured
self.assertEqual(errors, [])
# assert correct token scanning
self.assertEqual(types(tokens), 'DEF ID = STRING')
示例12: testScanLambda
def testScanLambda(self):
lexer = Lexer()
lexer.input("def f(x) = lambda(y) x")
(tokens, errors) = lexer.allTokens()
# assert that no error occured
self.assertEqual(errors, [])
# assert correct token scanning
self.assertEqual(types(tokens), 'DEF ID ( ID ) = LAMBDA ( ID ) ID')
示例13: testScanList
def testScanList(self):
lexer = Lexer()
lexer.input("def t = [a, b]")
(tokens, errors) = lexer.allTokens()
# assert that no error occured
self.assertEqual(errors, [])
# assert correct token scanning
self.assertEqual(types(tokens), 'DEF ID = [ ID , ID ]')
示例14: testScanTuple
def testScanTuple(self):
lexer = Lexer()
lexer.input("def t = (a, b)")
(tokens, errors) = lexer.allTokens()
# assert that no error occured
self.assertEqual(errors, [])
# assert correct token scanning
self.assertEqual(types(tokens), 'DEF ID = ( ID , ID )')
示例15: testScanFunction
def testScanFunction(self):
lexer = Lexer()
lexer.input("def f(x) = x")
(tokens, errors) = lexer.allTokens()
# assert that no error occured
self.assertEqual(errors, [])
# assert correct token scanning
self.assertEqual(types(tokens), 'DEF ID ( ID ) = ID')