本文整理汇总了Python中Quasar.parser.tokens.css_tokens.CSSTokenizer.tokenize_stream方法的典型用法代码示例。如果您正苦于以下问题:Python CSSTokenizer.tokenize_stream方法的具体用法?Python CSSTokenizer.tokenize_stream怎么用?Python CSSTokenizer.tokenize_stream使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Quasar.parser.tokens.css_tokens.CSSTokenizer
的用法示例。
在下文中一共展示了CSSTokenizer.tokenize_stream方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_hash_token_name_code_point
# 需要导入模块: from Quasar.parser.tokens.css_tokens import CSSTokenizer [as 别名]
# 或者: from Quasar.parser.tokens.css_tokens.CSSTokenizer import tokenize_stream [as 别名]
def test_hash_token_name_code_point():
token_stream = CSSTokenizer("#mydiv")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], HashToken)
assert token_stream.tokens[0].value == 'mydiv'
assert token_stream.tokens[0].type_flag == 'id'
示例2: test_CDO_token
# 需要导入模块: from Quasar.parser.tokens.css_tokens import CSSTokenizer [as 别名]
# 或者: from Quasar.parser.tokens.css_tokens.CSSTokenizer import tokenize_stream [as 别名]
def test_CDO_token():
raise SkipTest # this is hanging somewhere...
token_stream = CSSTokenizer("<!--")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], CDOToken)
assert token_stream.tokens[0].value == ''
示例3: test_signed_positive_float_scientific_notation_big_e_unsigned
# 需要导入模块: from Quasar.parser.tokens.css_tokens import CSSTokenizer [as 别名]
# 或者: from Quasar.parser.tokens.css_tokens.CSSTokenizer import tokenize_stream [as 别名]
def test_signed_positive_float_scientific_notation_big_e_unsigned():
token_stream = CSSTokenizer("+123.45E9")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], NumberToken)
assert token_stream.tokens[0].value == 1.2345e11
assert token_stream.tokens[0].type_ == 'number'
assert token_stream.tokens[0].string == '+123.45E9'
示例4: test_signed_negative_float_normal_notation
# 需要导入模块: from Quasar.parser.tokens.css_tokens import CSSTokenizer [as 别名]
# 或者: from Quasar.parser.tokens.css_tokens.CSSTokenizer import tokenize_stream [as 别名]
def test_signed_negative_float_normal_notation():
token_stream = CSSTokenizer("-123.45")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], NumberToken)
assert token_stream.tokens[0].value == -123.45
assert token_stream.tokens[0].type_ == 'number'
assert token_stream.tokens[0].string == '-123.45'
示例5: test_negative_float_scientific_notation_little_e_signed_positive
# 需要导入模块: from Quasar.parser.tokens.css_tokens import CSSTokenizer [as 别名]
# 或者: from Quasar.parser.tokens.css_tokens.CSSTokenizer import tokenize_stream [as 别名]
def test_negative_float_scientific_notation_little_e_signed_positive():
token_stream = CSSTokenizer("-123.45e+9")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], NumberToken)
assert token_stream.tokens[0].value == -1.2345e11
assert token_stream.tokens[0].type_ == 'number'
assert token_stream.tokens[0].string == '-123.45e+9'
示例6: test_positive_float_scientific_notation_big_e_signed_negative
# 需要导入模块: from Quasar.parser.tokens.css_tokens import CSSTokenizer [as 别名]
# 或者: from Quasar.parser.tokens.css_tokens.CSSTokenizer import tokenize_stream [as 别名]
def test_positive_float_scientific_notation_big_e_signed_negative():
token_stream = CSSTokenizer("+123.45E-9")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], NumberToken)
assert_almost_equal(token_stream.tokens[0].value, 1.2345e-7)
assert token_stream.tokens[0].type_ == 'number'
assert token_stream.tokens[0].string == '+123.45E-9'
示例7: test_hash_token_valid_escape
# 需要导入模块: from Quasar.parser.tokens.css_tokens import CSSTokenizer [as 别名]
# 或者: from Quasar.parser.tokens.css_tokens.CSSTokenizer import tokenize_stream [as 别名]
def test_hash_token_valid_escape():
raise SkipTest # I'm having issues b/c I'm using a narrow build...
token_stream = CSSTokenizer("#\\9")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], HashToken)
assert token_stream.tokens[0].value == '9'
assert token_stream.tokens[0].type_flag == 'id'
示例8: test_negative_float_scientific_notation_big_e_signed_negative
# 需要导入模块: from Quasar.parser.tokens.css_tokens import CSSTokenizer [as 别名]
# 或者: from Quasar.parser.tokens.css_tokens.CSSTokenizer import tokenize_stream [as 别名]
def test_negative_float_scientific_notation_big_e_signed_negative():
token_stream = CSSTokenizer("-123.45E-9")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], NumberToken)
assert token_stream.tokens[0].value == -0.00000012345
assert token_stream.tokens[0].type_ == 'number'
assert token_stream.tokens[0].string == '-123.45E-9'
示例9: test_unsigned_float_scientific_notation_little_e_unsigned
# 需要导入模块: from Quasar.parser.tokens.css_tokens import CSSTokenizer [as 别名]
# 或者: from Quasar.parser.tokens.css_tokens.CSSTokenizer import tokenize_stream [as 别名]
def test_unsigned_float_scientific_notation_little_e_unsigned():
token_stream = CSSTokenizer("123.45e9")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], NumberToken)
assert token_stream.tokens[0].value == 1.2345e11
assert token_stream.tokens[0].type_ == 'number'
assert token_stream.tokens[0].string == '123.45e9'
示例10: test_positive_integer_scientific_notation_little_e_signed_positive
# 需要导入模块: from Quasar.parser.tokens.css_tokens import CSSTokenizer [as 别名]
# 或者: from Quasar.parser.tokens.css_tokens.CSSTokenizer import tokenize_stream [as 别名]
def test_positive_integer_scientific_notation_little_e_signed_positive():
token_stream = CSSTokenizer("+12345e+9")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], NumberToken)
assert token_stream.tokens[0].value == 1.2345e13
assert token_stream.tokens[0].type_ == 'number'
assert token_stream.tokens[0].string == '+12345e+9'
示例11: test_negative_integer_scientific_notation_big_e_signed_negative
# 需要导入模块: from Quasar.parser.tokens.css_tokens import CSSTokenizer [as 别名]
# 或者: from Quasar.parser.tokens.css_tokens.CSSTokenizer import tokenize_stream [as 别名]
def test_negative_integer_scientific_notation_big_e_signed_negative():
token_stream = CSSTokenizer("-12345E-9")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], NumberToken)
assert_almost_equal(token_stream.tokens[0].value,- 1.2345e-5)
assert token_stream.tokens[0].type_ == 'number'
assert token_stream.tokens[0].string == '-12345E-9'
示例12: test_signed_negative_integer_scientific_notation_big_e_unsigned
# 需要导入模块: from Quasar.parser.tokens.css_tokens import CSSTokenizer [as 别名]
# 或者: from Quasar.parser.tokens.css_tokens.CSSTokenizer import tokenize_stream [as 别名]
def test_signed_negative_integer_scientific_notation_big_e_unsigned():
token_stream = CSSTokenizer("-12345E9")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], NumberToken)
assert token_stream.tokens[0].value == -1.2345e13
assert token_stream.tokens[0].type_ == 'number'
assert token_stream.tokens[0].string == '-12345E9'
示例13: test_signed_positive_integer_normal_notation
# 需要导入模块: from Quasar.parser.tokens.css_tokens import CSSTokenizer [as 别名]
# 或者: from Quasar.parser.tokens.css_tokens.CSSTokenizer import tokenize_stream [as 别名]
def test_signed_positive_integer_normal_notation():
token_stream = CSSTokenizer("+12345")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], NumberToken)
assert token_stream.tokens[0].value == 12345
assert token_stream.tokens[0].type_ == 'integer'
assert token_stream.tokens[0].string == '+12345'
示例14: test_delim_token
# 需要导入模块: from Quasar.parser.tokens.css_tokens import CSSTokenizer [as 别名]
# 或者: from Quasar.parser.tokens.css_tokens.CSSTokenizer import tokenize_stream [as 别名]
def test_delim_token():
token_stream = CSSTokenizer("#!")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], DelimToken)
assert token_stream.tokens[0].value == '#'
assert isinstance(token_stream.tokens[1], DelimToken)
assert token_stream.tokens[1].value == '!'
示例15: test_delim_token_not_name
# 需要导入模块: from Quasar.parser.tokens.css_tokens import CSSTokenizer [as 别名]
# 或者: from Quasar.parser.tokens.css_tokens.CSSTokenizer import tokenize_stream [as 别名]
def test_delim_token_not_name():
token_stream = CSSTokenizer("#\n")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], DelimToken)
assert token_stream.tokens[0].value == '#'
assert isinstance(token_stream.tokens[1], WhitespaceToken)
assert token_stream.tokens[1].value == ' '