本文整理汇总了Python中pygments.token.Number.Integer方法的典型用法代码示例。如果您正苦于以下问题:Python Number.Integer方法的具体用法?Python Number.Integer怎么用?Python Number.Integer使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pygments.token.Number
的用法示例。
在下文中一共展示了Number.Integer方法的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_gcd_expr
# 需要导入模块: from pygments.token import Number [as 别名]
# 或者: from pygments.token.Number import Integer [as 别名]
def test_gcd_expr(lexer):
fragment = u'1^3+(5-5)*gcd(a,b)\n'
tokens = [
(Token.Number.Integer, u'1'),
(Token.Operator, u'^'),
(Token.Literal.Number.Integer, u'3'),
(Token.Operator, u'+'),
(Token.Punctuation, u'('),
(Token.Literal.Number.Integer, u'5'),
(Token.Operator, u'-'),
(Token.Literal.Number.Integer, u'5'),
(Token.Punctuation, u')'),
(Token.Operator, u'*'),
(Token.Name, u'gcd'),
(Token.Punctuation, u'('),
(Token.Name, u'a'),
(Token.Operator, u','),
(Token.Name, u'b'),
(Token.Punctuation, u')'),
(Token.Text, u'\n')
]
assert list(lexer.get_tokens(fragment)) == tokens
示例2: test_can_lex_integer
# 需要导入模块: from pygments.token import Number [as 别名]
# 或者: from pygments.token.Number import Integer [as 别名]
def test_can_lex_integer(lexer):
_assert_are_tokens_of_type(lexer, '1 23 456', Number.Integer)
示例3: test_sum
# 需要导入模块: from pygments.token import Number [as 别名]
# 或者: from pygments.token.Number import Integer [as 别名]
def test_sum(lexer):
fragment = u'1+3\n'
tokens = [
(Number.Integer, u'1'),
(Operator, u'+'),
(Number.Integer, u'3'),
(Text, u'\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
示例4: test_if_statement
# 需要导入模块: from pygments.token import Number [as 别名]
# 或者: from pygments.token.Number import Integer [as 别名]
def test_if_statement(lexer):
fragment = u"""@( 0 > 3 ) ஆனால்
பதிப்பி "wont print"
முடி"""
tokens = [
(Token.Operator, u'@'),
(Token.Punctuation, u'('),
(Token.Text, u' '),
(Token.Literal.Number.Integer, u'0'),
(Token.Text, u' '),
(Token.Operator, u'>'),
(Token.Text, u' '),
(Token.Literal.Number.Integer, u'3'),
(Token.Text, u' '),
(Token.Punctuation, u')'),
(Token.Text, u' '),
(Token.Keyword, u'ஆனால்'),
(Token.Text, u'\n'),
(Token.Text, u' '),
(Token.Keyword, u'பதிப்பி'),
(Token.Text, u' '),
(Token.Literal.String, u'"wont print"'),
(Token.Text, u'\n'),
(Token.Keyword, u'முடி'),
(Token.Text, u'\n')
]
assert list(lexer.get_tokens(fragment)) == tokens
示例5: test_range_syntax1
# 需要导入模块: from pygments.token import Number [as 别名]
# 或者: from pygments.token.Number import Integer [as 别名]
def test_range_syntax1(lexer):
fragment = u'1..3\n'
tokens = [
(Number.Integer, u'1'),
(Operator, u'..'),
(Number.Integer, u'3'),
(Text, u'\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
示例6: test_range_syntax3
# 需要导入模块: from pygments.token import Number [as 别名]
# 或者: from pygments.token.Number import Integer [as 别名]
def test_range_syntax3(lexer):
fragment = u'1 .. 3\n'
tokens = [
(Number.Integer, u'1'),
(Text, u' '),
(Operator, u'..'),
(Text, u' '),
(Number.Integer, u'3'),
(Text, u'\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
示例7: test_operator_methods
# 需要导入模块: from pygments.token import Number [as 别名]
# 或者: from pygments.token.Number import Integer [as 别名]
def test_operator_methods(lexer):
fragment = u'x.==4\n'
tokens = [
(Token.Name, u'x'),
(Token.Operator, u'.'),
(Token.Name.Operator, u'=='),
(Token.Literal.Number.Integer, u'4'),
(Token.Text, u'\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
示例8: test_can_lex_integer
# 需要导入模块: from pygments.token import Number [as 别名]
# 或者: from pygments.token.Number import Integer [as 别名]
def test_can_lex_integer(lexer):
assert_are_tokens_of_type(lexer, '1 23 456', Number.Integer)
示例9: test_numeric_literals
# 需要导入模块: from pygments.token import Number [as 别名]
# 或者: from pygments.token.Number import Integer [as 别名]
def test_numeric_literals(lexer):
fragment = '0 5L 9__542_72l 0xbEEf 0X9_A 0_35 01 0b0___101_0'
fragment += ' 0. .7_17F 3e-1_3d 1f 6_01.9e+3 0x.1Fp3 0XEP8D\n'
tokens = [
(Number.Integer, '0'),
(Text, ' '),
(Number.Integer, '5L'),
(Text, ' '),
(Number.Integer, '9__542_72l'),
(Text, ' '),
(Number.Hex, '0xbEEf'),
(Text, ' '),
(Number.Hex, '0X9_A'),
(Text, ' '),
(Number.Oct, '0_35'),
(Text, ' '),
(Number.Oct, '01'),
(Text, ' '),
(Number.Bin, '0b0___101_0'),
(Text, ' '),
(Number.Float, '0.'),
(Text, ' '),
(Number.Float, '.7_17F'),
(Text, ' '),
(Number.Float, '3e-1_3d'),
(Text, ' '),
(Number.Float, '1f'),
(Text, ' '),
(Number.Float, '6_01.9e+3'),
(Text, ' '),
(Number.Float, '0x.1Fp3'),
(Text, ' '),
(Number.Float, '0XEP8D'),
(Text, '\n')
]
assert list(lexer.get_tokens(fragment)) == tokens
示例10: test_label_followed_by_statement
# 需要导入模块: from pygments.token import Number [as 别名]
# 或者: from pygments.token.Number import Integer [as 别名]
def test_label_followed_by_statement(lexer):
fragment = u'''\
int main()
{
foo:return 0;
goto foo;
}
'''
tokens = [
(Token.Keyword.Type, u'int'),
(Token.Text, u' '),
(Token.Name.Function, u'main'),
(Token.Punctuation, u'('),
(Token.Punctuation, u')'),
(Token.Text, u'\n'),
(Token.Punctuation, u'{'),
(Token.Text, u'\n'),
(Token.Name.Label, u'foo'),
(Token.Punctuation, u':'),
(Token.Keyword, u'return'),
(Token.Text, u' '),
(Token.Literal.Number.Integer, u'0'),
(Token.Punctuation, u';'),
(Token.Text, u'\n'),
(Token.Text, u' '),
(Token.Keyword, u'goto'),
(Token.Text, u' '),
(Token.Name, u'foo'),
(Token.Punctuation, u';'),
(Token.Text, u'\n'),
(Token.Punctuation, u'}'),
(Token.Text, u'\n'),
]
assert list(lexer.get_tokens(textwrap.dedent(fragment))) == tokens
示例11: test_interpolation_nested_curly
# 需要导入模块: from pygments.token import Number [as 别名]
# 或者: from pygments.token.Number import Integer [as 别名]
def test_interpolation_nested_curly(lexer):
fragment = (
u'"A#{ (3..5).group_by { |x| x/2}.map '
u'do |k,v| "#{k}" end.join }" + "Z"\n')
tokens = [
(Token.Literal.String.Double, u'"'),
(Token.Literal.String.Double, u'A'),
(Token.Literal.String.Interpol, u'#{'),
(Token.Text, u' '),
(Token.Punctuation, u'('),
(Token.Literal.Number.Integer, u'3'),
(Token.Operator, u'..'),
(Token.Literal.Number.Integer, u'5'),
(Token.Punctuation, u')'),
(Token.Operator, u'.'),
(Token.Name, u'group_by'),
(Token.Text, u' '),
(Token.Literal.String.Interpol, u'{'),
(Token.Text, u' '),
(Token.Operator, u'|'),
(Token.Name, u'x'),
(Token.Operator, u'|'),
(Token.Text, u' '),
(Token.Name, u'x'),
(Token.Operator, u'/'),
(Token.Literal.Number.Integer, u'2'),
(Token.Literal.String.Interpol, u'}'),
(Token.Operator, u'.'),
(Token.Name, u'map'),
(Token.Text, u' '),
(Token.Keyword, u'do'),
(Token.Text, u' '),
(Token.Operator, u'|'),
(Token.Name, u'k'),
(Token.Punctuation, u','),
(Token.Name, u'v'),
(Token.Operator, u'|'),
(Token.Text, u' '),
(Token.Literal.String.Double, u'"'),
(Token.Literal.String.Interpol, u'#{'),
(Token.Name, u'k'),
(Token.Literal.String.Interpol, u'}'),
(Token.Literal.String.Double, u'"'),
(Token.Text, u' '),
(Token.Keyword, u'end'),
(Token.Operator, u'.'),
(Token.Name, u'join'),
(Token.Text, u' '),
(Token.Literal.String.Interpol, u'}'),
(Token.Literal.String.Double, u'"'),
(Token.Text, u' '),
(Token.Operator, u'+'),
(Token.Text, u' '),
(Token.Literal.String.Double, u'"'),
(Token.Literal.String.Double, u'Z'),
(Token.Literal.String.Double, u'"'),
(Token.Text, u'\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
示例12: test_switch
# 需要导入模块: from pygments.token import Number [as 别名]
# 或者: from pygments.token.Number import Integer [as 别名]
def test_switch(lexer):
fragment = u'''\
int main()
{
switch (0)
{
case 0:
default:
;
}
}
'''
tokens = [
(Token.Keyword.Type, u'int'),
(Token.Text, u' '),
(Token.Name.Function, u'main'),
(Token.Punctuation, u'('),
(Token.Punctuation, u')'),
(Token.Text, u'\n'),
(Token.Punctuation, u'{'),
(Token.Text, u'\n'),
(Token.Text, u' '),
(Token.Keyword, u'switch'),
(Token.Text, u' '),
(Token.Punctuation, u'('),
(Token.Literal.Number.Integer, u'0'),
(Token.Punctuation, u')'),
(Token.Text, u'\n'),
(Token.Text, u' '),
(Token.Punctuation, u'{'),
(Token.Text, u'\n'),
(Token.Text, u' '),
(Token.Keyword, u'case'),
(Token.Text, u' '),
(Token.Literal.Number.Integer, u'0'),
(Token.Operator, u':'),
(Token.Text, u'\n'),
(Token.Text, u' '),
(Token.Keyword, u'default'),
(Token.Operator, u':'),
(Token.Text, u'\n'),
(Token.Text, u' '),
(Token.Punctuation, u';'),
(Token.Text, u'\n'),
(Token.Text, u' '),
(Token.Punctuation, u'}'),
(Token.Text, u'\n'),
(Token.Punctuation, u'}'),
(Token.Text, u'\n'),
]
assert list(lexer.get_tokens(textwrap.dedent(fragment))) == tokens
示例13: test_switch_space_before_colon
# 需要导入模块: from pygments.token import Number [as 别名]
# 或者: from pygments.token.Number import Integer [as 别名]
def test_switch_space_before_colon(lexer):
fragment = u'''\
int main()
{
switch (0)
{
case 0 :
default :
;
}
}
'''
tokens = [
(Token.Keyword.Type, u'int'),
(Token.Text, u' '),
(Token.Name.Function, u'main'),
(Token.Punctuation, u'('),
(Token.Punctuation, u')'),
(Token.Text, u'\n'),
(Token.Punctuation, u'{'),
(Token.Text, u'\n'),
(Token.Text, u' '),
(Token.Keyword, u'switch'),
(Token.Text, u' '),
(Token.Punctuation, u'('),
(Token.Literal.Number.Integer, u'0'),
(Token.Punctuation, u')'),
(Token.Text, u'\n'),
(Token.Text, u' '),
(Token.Punctuation, u'{'),
(Token.Text, u'\n'),
(Token.Text, u' '),
(Token.Keyword, u'case'),
(Token.Text, u' '),
(Token.Literal.Number.Integer, u'0'),
(Token.Text, u' '),
(Token.Operator, u':'),
(Token.Text, u'\n'),
(Token.Text, u' '),
(Token.Keyword, u'default'),
(Token.Text, u' '),
(Token.Operator, u':'),
(Token.Text, u'\n'),
(Token.Text, u' '),
(Token.Punctuation, u';'),
(Token.Text, u'\n'),
(Token.Text, u' '),
(Token.Punctuation, u'}'),
(Token.Text, u'\n'),
(Token.Punctuation, u'}'),
(Token.Text, u'\n'),
]
assert list(lexer.get_tokens(textwrap.dedent(fragment))) == tokens