本文整理汇总了Python中Quasar.parser.tokens.css_tokens.CSSTokenizer类的典型用法代码示例。如果您正苦于以下问题:Python CSSTokenizer类的具体用法?Python CSSTokenizer怎么用?Python CSSTokenizer使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了CSSTokenizer类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_hash_token_name_code_point
def test_hash_token_name_code_point():
token_stream = CSSTokenizer("#mydiv")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], HashToken)
assert token_stream.tokens[0].value == 'mydiv'
assert token_stream.tokens[0].type_flag == 'id'
示例2: test_CDO_token
def test_CDO_token():
raise SkipTest # this is hanging somewhere...
token_stream = CSSTokenizer("<!--")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], CDOToken)
assert token_stream.tokens[0].value == ''
示例3: test_mixed_whitespace
def test_mixed_whitespace():
token_stream = CSSTokenizer(" \t\r \n")
token_stream.consume_next_code_point()
token_stream.consume_whitespace_token()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], WhitespaceToken)
assert token_stream.tokens[0].value == ' '
示例4: test_consume_numeric_token_number1
def test_consume_numeric_token_number1():
token_stream = CSSTokenizer("12345")
token_stream.consume_numeric_token()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], NumberToken)
assert token_stream.tokens[0].type_ == 'integer'
assert token_stream.tokens[0].value == 12345
assert token_stream.tokens[0].string == '12345'
示例5: test_negative_float_scientific_notation_big_e_signed_negative
def test_negative_float_scientific_notation_big_e_signed_negative():
token_stream = CSSTokenizer("-123.45E-9")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], NumberToken)
assert token_stream.tokens[0].value == -0.00000012345
assert token_stream.tokens[0].type_ == 'number'
assert token_stream.tokens[0].string == '-123.45E-9'
示例6: test_positive_float_scientific_notation_big_e_signed_negative
def test_positive_float_scientific_notation_big_e_signed_negative():
token_stream = CSSTokenizer("+123.45E-9")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], NumberToken)
assert_almost_equal(token_stream.tokens[0].value, 1.2345e-7)
assert token_stream.tokens[0].type_ == 'number'
assert token_stream.tokens[0].string == '+123.45E-9'
示例7: test_negative_float_scientific_notation_little_e_signed_positive
def test_negative_float_scientific_notation_little_e_signed_positive():
token_stream = CSSTokenizer("-123.45e+9")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], NumberToken)
assert token_stream.tokens[0].value == -1.2345e11
assert token_stream.tokens[0].type_ == 'number'
assert token_stream.tokens[0].string == '-123.45e+9'
示例8: test_signed_positive_float_scientific_notation_big_e_unsigned
def test_signed_positive_float_scientific_notation_big_e_unsigned():
token_stream = CSSTokenizer("+123.45E9")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], NumberToken)
assert token_stream.tokens[0].value == 1.2345e11
assert token_stream.tokens[0].type_ == 'number'
assert token_stream.tokens[0].string == '+123.45E9'
示例9: test_signed_negative_float_normal_notation
def test_signed_negative_float_normal_notation():
token_stream = CSSTokenizer("-123.45")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], NumberToken)
assert token_stream.tokens[0].value == -123.45
assert token_stream.tokens[0].type_ == 'number'
assert token_stream.tokens[0].string == '-123.45'
示例10: test_unsigned_float_scientific_notation_little_e_unsigned
def test_unsigned_float_scientific_notation_little_e_unsigned():
token_stream = CSSTokenizer("123.45e9")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], NumberToken)
assert token_stream.tokens[0].value == 1.2345e11
assert token_stream.tokens[0].type_ == 'number'
assert token_stream.tokens[0].string == '123.45e9'
示例11: test_negative_integer_scientific_notation_big_e_signed_negative
def test_negative_integer_scientific_notation_big_e_signed_negative():
token_stream = CSSTokenizer("-12345E-9")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], NumberToken)
assert_almost_equal(token_stream.tokens[0].value,- 1.2345e-5)
assert token_stream.tokens[0].type_ == 'number'
assert token_stream.tokens[0].string == '-12345E-9'
示例12: test_positive_integer_scientific_notation_little_e_signed_positive
def test_positive_integer_scientific_notation_little_e_signed_positive():
token_stream = CSSTokenizer("+12345e+9")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], NumberToken)
assert token_stream.tokens[0].value == 1.2345e13
assert token_stream.tokens[0].type_ == 'number'
assert token_stream.tokens[0].string == '+12345e+9'
示例13: test_signed_negative_integer_scientific_notation_big_e_unsigned
def test_signed_negative_integer_scientific_notation_big_e_unsigned():
token_stream = CSSTokenizer("-12345E9")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], NumberToken)
assert token_stream.tokens[0].value == -1.2345e13
assert token_stream.tokens[0].type_ == 'number'
assert token_stream.tokens[0].string == '-12345E9'
示例14: test_hash_token_valid_escape
def test_hash_token_valid_escape():
raise SkipTest # I'm having issues b/c I'm using a narrow build...
token_stream = CSSTokenizer("#\\9")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], HashToken)
assert token_stream.tokens[0].value == '9'
assert token_stream.tokens[0].type_flag == 'id'
示例15: test_delim_token_not_name
def test_delim_token_not_name():
token_stream = CSSTokenizer("#\n")
token_stream.tokenize_stream()
assert not token_stream.stream
assert isinstance(token_stream.tokens[0], DelimToken)
assert token_stream.tokens[0].value == '#'
assert isinstance(token_stream.tokens[1], WhitespaceToken)
assert token_stream.tokens[1].value == ' '