当前位置: 首页>>代码示例>>Python>>正文


Python PythonLexer.tokens方法代码示例

本文整理汇总了Python中pygments.lexers.python.PythonLexer.tokens方法的典型用法代码示例。如果您正苦于以下问题:Python PythonLexer.tokens方法的具体用法?Python PythonLexer.tokens怎么用?Python PythonLexer.tokens使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在pygments.lexers.python.PythonLexer的用法示例。


在下文中一共展示了PythonLexer.tokens方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: get_tokens_unprocessed

# 需要导入模块: from pygments.lexers.python import PythonLexer [as 别名]
# 或者: from pygments.lexers.python.PythonLexer import tokens [as 别名]
def get_tokens_unprocessed(self, text):
        tokens = RegexLexer.get_tokens_unprocessed(self, text)
        tokens = self._process_symbols(tokens)
        tokens = self._process_declarations(tokens)
        return tokens 
开发者ID:luckystarufo,项目名称:pySINDy,代码行数:7,代码来源:lisp.py

示例2: _process_declarations

# 需要导入模块: from pygments.lexers.python import PythonLexer [as 别名]
# 或者: from pygments.lexers.python.PythonLexer import tokens [as 别名]
def _process_declarations(self, tokens):
        opening_paren = False
        for index, token, value in tokens:
            yield index, token, value
            if self._relevant(token):
                if opening_paren and token == Keyword and value in self.DECLARATIONS:
                    declaration = value
                    for index, token, value in \
                            self._process_declaration(declaration, tokens):
                        yield index, token, value
                opening_paren = value == '(' and token == Punctuation 
开发者ID:luckystarufo,项目名称:pySINDy,代码行数:13,代码来源:lisp.py

示例3: _process_symbols

# 需要导入模块: from pygments.lexers.python import PythonLexer [as 别名]
# 或者: from pygments.lexers.python.PythonLexer import tokens [as 别名]
def _process_symbols(self, tokens):
        opening_paren = False
        for index, token, value in tokens:
            if opening_paren and token in (Literal, Name.Variable):
                token = self.MAPPINGS.get(value, Name.Function)
            elif token == Literal and value in self.BUILTINS_ANYWHERE:
                token = Name.Builtin
            opening_paren = value == '(' and token == Punctuation
            yield index, token, value 
开发者ID:luckystarufo,项目名称:pySINDy,代码行数:11,代码来源:lisp.py

示例4: _process_declaration

# 需要导入模块: from pygments.lexers.python import PythonLexer [as 别名]
# 或者: from pygments.lexers.python.PythonLexer import tokens [as 别名]
def _process_declaration(self, declaration, tokens):
        for index, token, value in tokens:
            if self._relevant(token):
                break
            yield index, token, value

        if declaration == 'datatype':
            prev_was_colon = False
            token = Keyword.Type if token == Literal else token
            yield index, token, value
            for index, token, value in tokens:
                if prev_was_colon and token == Literal:
                    token = Keyword.Type
                yield index, token, value
                if self._relevant(token):
                    prev_was_colon = token == Literal and value == ':'
        elif declaration == 'package':
            token = Name.Namespace if token == Literal else token
            yield index, token, value
        elif declaration == 'define':
            token = Name.Function if token == Literal else token
            yield index, token, value
            for index, token, value in tokens:
                if self._relevant(token):
                    break
                yield index, token, value
            if value == '{' and token == Literal:
                yield index, Punctuation, value
                for index, token, value in self._process_signature(tokens):
                    yield index, token, value
            else:
                yield index, token, value
        else:
            token = Name.Function if token == Literal else token
            yield index, token, value

        return 
开发者ID:luckystarufo,项目名称:pySINDy,代码行数:39,代码来源:lisp.py

示例5: _process_signature

# 需要导入模块: from pygments.lexers.python import PythonLexer [as 别名]
# 或者: from pygments.lexers.python.PythonLexer import tokens [as 别名]
def _process_signature(self, tokens):
        for index, token, value in tokens:
            if token == Literal and value == '}':
                yield index, Punctuation, value
                return
            elif token in (Literal, Name.Function):
                token = Name.Variable if value.istitle() else Keyword.Type
            yield index, token, value 
开发者ID:luckystarufo,项目名称:pySINDy,代码行数:10,代码来源:lisp.py

示例6: _process_declaration

# 需要导入模块: from pygments.lexers.python import PythonLexer [as 别名]
# 或者: from pygments.lexers.python.PythonLexer import tokens [as 别名]
def _process_declaration(self, declaration, tokens):
        for index, token, value in tokens:
            if self._relevant(token):
                break
            yield index, token, value

        if declaration == 'datatype':
            prev_was_colon = False
            token = Keyword.Type if token == Literal else token
            yield index, token, value
            for index, token, value in tokens:
                if prev_was_colon and token == Literal:
                    token = Keyword.Type
                yield index, token, value
                if self._relevant(token):
                    prev_was_colon = token == Literal and value == ':'
        elif declaration == 'package':
            token = Name.Namespace if token == Literal else token
            yield index, token, value
        elif declaration == 'define':
            token = Name.Function if token == Literal else token
            yield index, token, value
            for index, token, value in tokens:
                if self._relevant(token):
                    break
                yield index, token, value
            if value == '{' and token == Literal:
                yield index, Punctuation, value
                for index, token, value in self._process_signature(tokens):
                    yield index, token, value
            else:
                yield index, token, value
        else:
            token = Name.Function if token == Literal else token
            yield index, token, value

        raise StopIteration 
开发者ID:wakatime,项目名称:komodo-wakatime,代码行数:39,代码来源:lisp.py

示例7: _process_signature

# 需要导入模块: from pygments.lexers.python import PythonLexer [as 别名]
# 或者: from pygments.lexers.python.PythonLexer import tokens [as 别名]
def _process_signature(self, tokens):
        for index, token, value in tokens:
            if token == Literal and value == '}':
                yield index, Punctuation, value
                raise StopIteration
            elif token in (Literal, Name.Function):
                token = Name.Variable if value.istitle() else Keyword.Type
            yield index, token, value 
开发者ID:wakatime,项目名称:komodo-wakatime,代码行数:10,代码来源:lisp.py


注:本文中的pygments.lexers.python.PythonLexer.tokens方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。