当前位置: 首页>>代码示例>>Python>>正文


Python Lexer.tokenize方法代码示例

本文整理汇总了Python中lexer.Lexer.tokenize方法的典型用法代码示例。如果您正苦于以下问题:Python Lexer.tokenize方法的具体用法?Python Lexer.tokenize怎么用?Python Lexer.tokenize使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在lexer.Lexer的用法示例。


在下文中一共展示了Lexer.tokenize方法的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_var_property

# 需要导入模块: from lexer import Lexer [as 别名]
# 或者: from lexer.Lexer import tokenize [as 别名]
    def test_var_property(self):
        src = """Item {
            property var varProperty: { "key1": "value1", "key2": "value2" }
            }"""

        lexer = Lexer(src)
        lexer.tokenize()
        qmlclass = QmlClass("Foo")
        qmlparser.parse(lexer.tokens, qmlclass)

        self.assertEqual(qmlclass.properties[0].name, "varProperty")
        self.assertEqual(qmlclass.properties[0].type, "var")
开发者ID:nielsmadan,项目名称:doxyqml,代码行数:14,代码来源:qmlparsertestcase.py

示例2: test

# 需要导入模块: from lexer import Lexer [as 别名]
# 或者: from lexer.Lexer import tokenize [as 别名]
    def test(self):
        src = "Item { function foo() {} function bar() {} }"
        lexer = Lexer(src)
        lexer.tokenize()
        qmlclass = QmlClass("Foo")
        qmlparser.parse(lexer.tokens, qmlclass)

        self.assertEqual(qmlclass.base_name, "Item")

        self.assertEqual(qmlclass.functions[0].name, "foo")
        self.assertEqual(qmlclass.functions[1].name, "bar")
        self.assertEqual(len(qmlclass.functions), 2)
开发者ID:nielsmadan,项目名称:doxyqml,代码行数:14,代码来源:qmlparsertestcase.py

示例3: test_function_property

# 需要导入模块: from lexer import Lexer [as 别名]
# 或者: from lexer.Lexer import tokenize [as 别名]
    def test_function_property(self):
        src = """Item {
            property var fnProperty: function (arg1, arg2) { return arg1 + arg2; }
            }"""

        lexer = Lexer(src)
        lexer.tokenize()
        qmlclass = QmlClass("Foo")
        qmlparser.parse(lexer.tokens, qmlclass)

        self.assertEqual(qmlclass.properties[0].name, "fnProperty")
        self.assertEqual(qmlclass.properties[0].type, "var")
开发者ID:nielsmadan,项目名称:doxyqml,代码行数:14,代码来源:qmlparsertestcase.py

示例4: test_keyword_arguments

# 需要导入模块: from lexer import Lexer [as 别名]
# 或者: from lexer.Lexer import tokenize [as 别名]
    def test_keyword_arguments(self):
        src = """Item {
                     function foo(propertyArgument, signalArgument) {
                         return propertyArgument + signalArgument;
                     }
                 }"""

        lexer = Lexer(src)
        lexer.tokenize()
        qmlclass = QmlClass("Foo")
        qmlparser.parse(lexer.tokens, qmlclass)

        self.assertEqual(qmlclass.functions[0].name, "foo")
        self.assertEqual(qmlclass.functions[0].type, "void")
开发者ID:nielsmadan,项目名称:doxyqml,代码行数:16,代码来源:qmlparsertestcase.py

示例5: test_normal_arguments

# 需要导入模块: from lexer import Lexer [as 别名]
# 或者: from lexer.Lexer import tokenize [as 别名]
    def test_normal_arguments(self):
        src = """Item {
                     function foo(arg1, arg2) {
                         return arg1 + arg2;
                     }
                 }"""

        lexer = Lexer(src)
        lexer.tokenize()
        qmlclass = QmlClass("Foo")
        qmlparser.parse(lexer.tokens, qmlclass)

        self.assertEqual(qmlclass.functions[0].name, "foo")
        self.assertEqual(qmlclass.functions[0].type, "void")
开发者ID:nielsmadan,项目名称:doxyqml,代码行数:16,代码来源:qmlparsertestcase.py

示例6: main

# 需要导入模块: from lexer import Lexer [as 别名]
# 或者: from lexer.Lexer import tokenize [as 别名]
def main():
    args = parse_args()

    name = args.qml_file
    text = open(name).read()

    lexer = Lexer(text)
    try:
        lexer.tokenize()
    except LexerError, exc:
        logging.error("Failed to tokenize %s" % name)
        row, msg = info_for_error_at(text, exc.idx)
        logging.error("Lexer error line %d: %s\n%s", row, exc, msg)
        if args.debug:
            raise
        else:
            return -1
开发者ID:hasselmm,项目名称:doxyqml,代码行数:19,代码来源:doxyqml.py

示例7: main

# 需要导入模块: from lexer import Lexer [as 别名]
# 或者: from lexer.Lexer import tokenize [as 别名]
def main():
    opt_parser = create_opt_parser()
    options, args = opt_parser.parse_args()
    if len(args) != 1:
        opt_parser.error("Invalid number of arguments")

    name = args[0]
    text = open(name).read()

    lexer = Lexer(text)
    try:
        lexer.tokenize()
    except LexerError, exc:
        logging.error("Failed to tokenize %s" % name)
        row, msg = info_for_error_at(text, exc.idx)
        logging.error("Lexer error line %d: %s\n%s", row, exc, msg)
        if options.debug:
            raise
        else:
            return -1
开发者ID:IceCreamWarrior,项目名称:doxyqml,代码行数:22,代码来源:doxyqml.py

示例8: test_default_property

# 需要导入模块: from lexer import Lexer [as 别名]
# 或者: from lexer.Lexer import tokenize [as 别名]
    def test_default_property(self):
        src = """Item {
            /// v1 doc
            default property int v1
            /// v2 doc
            property int v2
            }"""
        lexer = Lexer(src)
        lexer.tokenize()
        qmlclass = QmlClass("Foo")
        qmlparser.parse(lexer.tokens, qmlclass)

        self.assertEqual(qmlclass.properties[0].name, "v1")
        self.assertEqual(qmlclass.properties[0].type, "int")
        self.assertEqual(qmlclass.properties[0].doc, "/// v1 doc")
        self.assert_(qmlclass.properties[0].is_default)

        self.assertEqual(qmlclass.properties[1].name, "v2")
        self.assertEqual(qmlclass.properties[1].type, "int")
        self.assertEqual(qmlclass.properties[1].doc, "/// v2 doc")
        self.assert_(not qmlclass.properties[1].is_default)
开发者ID:nielsmadan,项目名称:doxyqml,代码行数:23,代码来源:qmlparsertestcase.py

示例9: test_readonly_property

# 需要导入模块: from lexer import Lexer [as 别名]
# 或者: from lexer.Lexer import tokenize [as 别名]
    def test_readonly_property(self):
        src = """Item {
            /// v1 doc
            readonly property int v1
            /// v2 doc
            property int v2
            }"""
        lexer = Lexer(src)
        lexer.tokenize()
        qmlclass = QmlClass("Foo")
        qmlparser.parse(lexer.tokens, qmlclass)

        properties = qmlclass.get_properties()
        self.assertEqual(properties[0].name, "v1")
        self.assertEqual(properties[0].type, "int")
        self.assertEqual(properties[0].doc, "/// v1 doc")
        self.assert_(properties[0].is_readonly)

        self.assertEqual(properties[1].name, "v2")
        self.assertEqual(properties[1].type, "int")
        self.assertEqual(properties[1].doc, "/// v2 doc")
        self.assert_(not properties[1].is_readonly)
开发者ID:CedricCabessa,项目名称:doxyqml,代码行数:24,代码来源:qmlparsertestcase.py

示例10: main

# 需要导入模块: from lexer import Lexer [as 别名]
# 或者: from lexer.Lexer import tokenize [as 别名]
def main():
    args = parse_args()

    name = args.qml_file
    text = open(name).read()

    lexer = Lexer(text)
    try:
        lexer.tokenize()
    except LexerError as exc:
        logging.error("Failed to tokenize %s" % name)
        row, msg = info_for_error_at(text, exc.idx)
        logging.error("Lexer error line %d: %s\n%s", row, exc, msg)
        if args.debug:
            raise
        else:
            return -1

    if args.debug:
        for token in lexer.tokens:
            print("%20s %s" % (token.type, token.value))

    classname, classversion = find_classname(name)
    qml_class = QmlClass(classname, classversion)

    try:
        qmlparser.parse(lexer.tokens, qml_class)
    except qmlparser.QmlParserError as exc:
        logging.error("Failed to parse %s" % name)
        row, msg = info_for_error_at(text, exc.token.idx)
        logging.error("Lexer error line %d: %s\n%s", row, exc, msg)
        if args.debug:
            raise
        else:
            return -1

    print(qml_class)

    return 0
开发者ID:davidedmundson,项目名称:doxyqml,代码行数:41,代码来源:doxyqml.py

示例11: test

# 需要导入模块: from lexer import Lexer [as 别名]
# 或者: from lexer.Lexer import tokenize [as 别名]
def test(text):
    print
    print text
    lexer = Lexer()
    tokens = lexer.tokenize(text)
    print tokens

    parser = Parser()
    ast = parser.parse(tokens)
    print ast
    print fmt(ast)

    interpreter = Interpreter()
    interpreter.interpret(ast, None)
开发者ID:grp,项目名称:HACKERLANG,代码行数:16,代码来源:test.py

示例12: __init__

# 需要导入模块: from lexer import Lexer [as 别名]
# 或者: from lexer.Lexer import tokenize [as 别名]
class Parser:
	def __init__(self):
		self.lexer = Lexer()

	def getLexer(self):
		return self.lexer

	def setLexer(self, val):
		self.lexer = val

	def parsePrgm(self, filename):
		f = open(filename)
		ch = f.readlines()
		self.lexer.tokenize(ch)
		print("+++ PARSER +++")
		while (self.getLexer().getStream() != []):
			self.parseInstruction()

	def expect(self, token_kind):
		next_tok = self.lexer.getNext()
		if next_tok.kind != token_kind:
			print("\nParsing error on line {0} pos {1}".format(next_tok.pos[0], next_tok.pos[1]))
			print("Expecting "+token_kind+", got "+next_tok.kind)
			sys.exit(1)
		return next_tok

	def showNext(self):
		return self.lexer.showNext()

	def acceptIt(self):
		token = self.lexer.getNext()
		return token

	def parseInstruction(self):
		print("Parse instruction")
		if self.showNext().kind in ["pnt", "vect", "line", "seg", "ang", "scal", "string"]:
			self.parseDeclaration()
		#elif self.showNext().kind == 'ident':
			#self.parseAffectation()
		elif self.showNext().kind in ["print"]:
			self.parsePrint()
		else:
			print ("GOSH! Parsing error for Instruction ligne : ", self.showNext().pos)
			sys.exit(0)

	def parseDeclaration(self):
		print ("Parse declaration")
		if self.showNext().kind == 'pnt':
			self.parseDeclPnt()
		elif self.showNext().kind == 'line':
			self.parseDeclLine()
		elif self.showNext().kind == 'vect':
			self.parseDeclVect()
		elif self.showNext().kind == 'seg':
			self.parseDeclSeg()
		elif self.showNext().kind == 'scal':
			self.parseDeclScal()
		elif self.showNext().kind == 'string':
			self.parseDeclString()
		else:
			print ("GOSH! Parsing error for Declaration ligne : ", self.showNext().pos)
			sys.exit(0)

	def parseDeclPnt(self):
		print ("Parse declaration point")
		self.expect('pnt')
		self.expect('ident')
		if self.showNext().kind != 'excl':
			self.expect('colon')
			self.parseObjPnt()
		self.expect('excl')

	def parseDeclLine(self):
		print ("Parse declaration line")
		self.expect('line')
		self.expect('ident')
		if self.showNext().kind != 'excl':
			self.expect('colon')
			self.parseObjLine()
		self.expect('excl')

	def parseDeclVect(self):
		print ("Parse declaration vector")
		self.expect('vect')
		self.expect('ident')
		if self.showNext().kind != 'excl':
			self.expect('colon')
			self.parseObjVect()
		self.expect('excl')

	def parseDeclSeg(self):
		print ("Parse declaration segment")
		self.expect('seg')
		self.expect('ident')
		if self.showNext().kind != 'excl':
			self.expect('colon')
			self.parseObjSeg()
		self.expect('excl')

	def parseDeclScal(self):
#.........这里部分代码省略.........
开发者ID:OpesMentis,项目名称:GeoLanguage,代码行数:103,代码来源:parser.py

示例13: test

# 需要导入模块: from lexer import Lexer [as 别名]
# 或者: from lexer.Lexer import tokenize [as 别名]
def test():
    expression = raw_input("Expression: ")
    l = Lexer()

    l.tokenize(expression)
开发者ID:acrognale,项目名称:anpl,代码行数:7,代码来源:lexer_test.py


注:本文中的lexer.Lexer.tokenize方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。