本文整理汇总了Python中lexer.Lexer.token方法的典型用法代码示例。如果您正苦于以下问题:Python Lexer.token方法的具体用法?Python Lexer.token怎么用?Python Lexer.token使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类lexer.Lexer
的用法示例。
在下文中一共展示了Lexer.token方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from lexer import Lexer [as 别名]
# 或者: from lexer.Lexer import token [as 别名]
class Parser:
def __init__(self, prg, path="stdin"):
self.lexer = Lexer(prg)
self.current_program = path
self.raw_prgstr = prg
def parse(self):
program = self.parse_program()
program.value = path
program.type = ast.ast("PROGRAM")
return program
def parse_program(self):
program = ast.node("PROGRAM")
pli = None
first = True
while True:
li = self.parse_statement()
if first:
first = False
program.nodes = li
else:
pli.next = li
if li.node is None or li.node.type == ast.nodetype("ENDOFPROGRAM"):
li.node = self.parse_fin()
break
pli = li
return program
def parse_statement(self):
li = ast. ListItem.empty()
t = self.lexer.token()
self.lexer.putback()
if t.type == ast.tokentype("TOKEN_FIN"):
li.node = self.parse_fin()
return li
elif t.type == ast.tokentype("TOKEN_RET"):
li.node = self.parse_ret()
else:
li.node = self.parse_expr(0)
t = lexer.token()
if t.type != ast.tokentype('TOKEN_DOT'):
raise CtrParserError(t, "Expected a dot (.).\n")
return li
def parse_fin(self):
f = ast.Node.empty()
self.lexer.token()
f.type = ast.nodetype("ENDOFPROGRAM")
return f
def parse_ret(self):
li = ast.ListItem.empty()
r = ast.Node.empty()
self.lexer.token()
r =
示例2: compile_snakth
# 需要导入模块: from lexer import Lexer [as 别名]
# 或者: from lexer.Lexer import token [as 别名]
def compile_snakth(ex, fn='<string>', debug=False):
# ensure that snakth imports from within snakth files works
install_import_hook()
if debug:
l = Lexer(ex)
while 1:
x = l.token()
pprint(x)
if x == l.EOF:
break
l = Lexer(ex)
p = Parser(l)
p.run()
if debug:
print ast.dump(p.tree)
c = compile(p.tree, fn, 'exec', __future__.print_function.compiler_flag)
return c
示例3: TestLexer
# 需要导入模块: from lexer import Lexer [as 别名]
# 或者: from lexer.Lexer import token [as 别名]
class TestLexer(unittest.TestCase):
''' Test GPTrees scanner
'''
def setUp(self):
self.lexer = Lexer(mode = MODE_EXCEPT)
def testError(self):
''' Test for invalid character
'''
self.lexer.input('$')
self.assertRaises(GPTreesError, self.lexer.token)
def testTokens(self):
''' Test for some tokens
'''
self.lexer.input("Ident ( ) 'string' , --> | ;")
tokens = 'ID', 'LP', 'RP', 'STRING', 'COMMA', 'IS', 'OR', 'SC'
for token in tokens:
self.assertEqual(token, self.lexer.token().type)