本文整理汇总了Python中pygments.token.Name.Function方法的典型用法代码示例。如果您正苦于以下问题:Python Name.Function方法的具体用法?Python Name.Function怎么用?Python Name.Function使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pygments.token.Name
的用法示例。
在下文中一共展示了Name.Function方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_can_cope_with_backtick_names_in_functions
# 需要导入模块: from pygments.token import Name [as 别名]
# 或者: from pygments.token.Name import Function [as 别名]
def test_can_cope_with_backtick_names_in_functions(lexer):
fragment = u'fun `wo bble`'
tokens = [
(Keyword, u'fun'),
(Text, u' '),
(Name.Function, u'`wo bble`'),
(Text, u'\n')
]
assert list(lexer.get_tokens(fragment)) == tokens
示例2: test_can_cope_with_commas_and_dashes_in_backtick_Names
# 需要导入模块: from pygments.token import Name [as 别名]
# 或者: from pygments.token.Name import Function [as 别名]
def test_can_cope_with_commas_and_dashes_in_backtick_Names(lexer):
fragment = u'fun `wo,-bble`'
tokens = [
(Keyword, u'fun'),
(Text, u' '),
(Name.Function, u'`wo,-bble`'),
(Text, u'\n')
]
assert list(lexer.get_tokens(fragment)) == tokens
示例3: test_can_cope_with_generics
# 需要导入模块: from pygments.token import Name [as 别名]
# 或者: from pygments.token.Name import Function [as 别名]
def test_can_cope_with_generics(lexer):
fragment = u'inline fun <reified T : ContractState> VaultService.queryBy(): Vault.Page<T> {'
tokens = [
(Keyword, u'inline fun'),
(Text, u' '),
(Punctuation, u'<'),
(Keyword, u'reified'),
(Text, u' '),
(Name, u'T'),
(Text, u' '),
(Punctuation, u':'),
(Text, u' '),
(Name, u'ContractState'),
(Punctuation, u'>'),
(Text, u' '),
(Name.Class, u'VaultService'),
(Punctuation, u'.'),
(Name.Function, u'queryBy'),
(Punctuation, u'('),
(Punctuation, u')'),
(Punctuation, u':'),
(Text, u' '),
(Name, u'Vault'),
(Punctuation, u'.'),
(Name, u'Page'),
(Punctuation, u'<'),
(Name, u'T'),
(Punctuation, u'>'),
(Text, u' '),
(Punctuation, u'{'),
(Text, u'\n')
]
assert list(lexer.get_tokens(fragment)) == tokens
示例4: test_reserved_word
# 需要导入模块: from pygments.token import Name [as 别名]
# 或者: from pygments.token.Name import Function [as 别名]
def test_reserved_word(lexer):
fragment = u'namespace Foobar\n links : String\n links = "abc"'
tokens = [
(Keyword.Reserved, u'namespace'),
(Text, u' '),
(Keyword.Type, u'Foobar'),
(Text, u'\n'),
(Text, u' '),
(Name.Function, u'links'),
(Text, u' '),
(Operator.Word, u':'),
(Text, u' '),
(Keyword.Type, u'String'),
(Text, u'\n'),
(Text, u' '),
(Text, u' '),
(Text, u'links'),
(Text, u' '),
(Operator.Word, u'='),
(Text, u' '),
(Literal.String, u'"'),
(Literal.String, u'abc'),
(Literal.String, u'"'),
(Text, u'\n')
]
assert list(lexer.get_tokens(fragment)) == tokens
示例5: test_call
# 需要导入模块: from pygments.token import Name [as 别名]
# 或者: from pygments.token.Name import Function [as 别名]
def test_call(lexer):
fragment = u'f(1, a)\n'
tokens = [
(Name.Function, u'f'),
(Punctuation, u'('),
(Token.Literal.Number, u'1'),
(Punctuation, u','),
(Token.Text, u' '),
(Token.Name, u'a'),
(Punctuation, u')'),
(Token.Text, u'\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens