本文整理汇总了Python中google.protobuf.text_format.Tokenizer方法的典型用法代码示例。如果您正苦于以下问题:Python text_format.Tokenizer方法的具体用法?Python text_format.Tokenizer怎么用?Python text_format.Tokenizer使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类google.protobuf.text_format
的用法示例。
在下文中一共展示了text_format.Tokenizer方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: testConsumeAbstractIntegers
# 需要导入模块: from google.protobuf import text_format [as 别名]
# 或者: from google.protobuf.text_format import Tokenizer [as 别名]
def testConsumeAbstractIntegers(self):
# This test only tests the failures in the integer parsing methods as well
# as the '0' special cases.
int64_max = (1 << 63) - 1
uint32_max = (1 << 32) - 1
text = '-1 %d %d' % (uint32_max + 1, int64_max + 1)
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertEqual(-1, tokenizer.ConsumeInteger())
self.assertEqual(uint32_max + 1, tokenizer.ConsumeInteger())
self.assertEqual(int64_max + 1, tokenizer.ConsumeInteger())
self.assertTrue(tokenizer.AtEnd())
text = '-0 0 0 1.2'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertEqual(0, tokenizer.ConsumeInteger())
self.assertEqual(0, tokenizer.ConsumeInteger())
self.assertEqual(True, tokenizer.TryConsumeInteger())
self.assertEqual(False, tokenizer.TryConsumeInteger())
with self.assertRaises(text_format.ParseError):
tokenizer.ConsumeInteger()
self.assertEqual(1.2, tokenizer.ConsumeFloat())
self.assertTrue(tokenizer.AtEnd())
示例2: testConsumeByteString
# 需要导入模块: from google.protobuf import text_format [as 别名]
# 或者: from google.protobuf.text_format import Tokenizer [as 别名]
def testConsumeByteString(self):
text = '"string1\''
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = 'string1"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\xt"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\x"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
示例3: testConsumeAndCheckTrailingComment
# 需要导入模块: from google.protobuf import text_format [as 别名]
# 或者: from google.protobuf.text_format import Tokenizer [as 别名]
def testConsumeAndCheckTrailingComment(self):
text = 'some_number: 4 # some comment' # trailing comment on the same line
tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
self.assertRaises(text_format.ParseError,
tokenizer.ConsumeCommentOrTrailingComment)
self.assertEqual('some_number', tokenizer.ConsumeIdentifier())
self.assertEqual(tokenizer.token, ':')
tokenizer.NextToken()
self.assertRaises(text_format.ParseError,
tokenizer.ConsumeCommentOrTrailingComment)
self.assertEqual(4, tokenizer.ConsumeInteger())
self.assertFalse(tokenizer.AtEnd())
self.assertEqual((True, '# some comment'),
tokenizer.ConsumeCommentOrTrailingComment())
self.assertTrue(tokenizer.AtEnd())
示例4: testConsumeAbstractIntegers
# 需要导入模块: from google.protobuf import text_format [as 别名]
# 或者: from google.protobuf.text_format import Tokenizer [as 别名]
def testConsumeAbstractIntegers(self):
# This test only tests the failures in the integer parsing methods as well
# as the '0' special cases.
int64_max = (1 << 63) - 1
uint32_max = (1 << 32) - 1
text = '-1 %d %d' % (uint32_max + 1, int64_max + 1)
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertEqual(-1, tokenizer.ConsumeInteger())
self.assertEqual(uint32_max + 1, tokenizer.ConsumeInteger())
self.assertEqual(int64_max + 1, tokenizer.ConsumeInteger())
self.assertTrue(tokenizer.AtEnd())
text = '-0 0'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertEqual(0, tokenizer.ConsumeInteger())
self.assertEqual(0, tokenizer.ConsumeInteger())
self.assertTrue(tokenizer.AtEnd())
示例5: testConsumeIntegers
# 需要导入模块: from google.protobuf import text_format [as 别名]
# 或者: from google.protobuf.text_format import Tokenizer [as 别名]
def testConsumeIntegers(self):
# This test only tests the failures in the integer parsing methods as well
# as the '0' special cases.
int64_max = (1 << 63) - 1
uint32_max = (1 << 32) - 1
text = '-1 %d %d' % (uint32_max + 1, int64_max + 1)
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError,
text_format._ConsumeUint32, tokenizer)
self.assertRaises(text_format.ParseError,
text_format._ConsumeUint64, tokenizer)
self.assertEqual(-1, text_format._ConsumeInt32(tokenizer))
self.assertRaises(text_format.ParseError,
text_format._ConsumeUint32, tokenizer)
self.assertRaises(text_format.ParseError,
text_format._ConsumeInt32, tokenizer)
self.assertEqual(uint32_max + 1, text_format._ConsumeInt64(tokenizer))
self.assertRaises(text_format.ParseError,
text_format._ConsumeInt64, tokenizer)
self.assertEqual(int64_max + 1, text_format._ConsumeUint64(tokenizer))
self.assertTrue(tokenizer.AtEnd())
text = '-0 -0 0 0'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertEqual(0, text_format._ConsumeUint32(tokenizer))
self.assertEqual(0, text_format._ConsumeUint64(tokenizer))
self.assertEqual(0, text_format._ConsumeUint32(tokenizer))
self.assertEqual(0, text_format._ConsumeUint64(tokenizer))
self.assertTrue(tokenizer.AtEnd())
示例6: testConsumeBool
# 需要导入模块: from google.protobuf import text_format [as 别名]
# 或者: from google.protobuf.text_format import Tokenizer [as 别名]
def testConsumeBool(self):
text = 'not-a-bool'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeBool)
示例7: testSkipComment
# 需要导入模块: from google.protobuf import text_format [as 别名]
# 或者: from google.protobuf.text_format import Tokenizer [as 别名]
def testSkipComment(self):
tokenizer = text_format.Tokenizer('# some comment'.splitlines())
self.assertTrue(tokenizer.AtEnd())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeComment)
示例8: testConsumeTwoComments
# 需要导入模块: from google.protobuf import text_format [as 别名]
# 或者: from google.protobuf.text_format import Tokenizer [as 别名]
def testConsumeTwoComments(self):
text = '# some comment\n# another comment'
tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
self.assertEqual('# some comment', tokenizer.ConsumeComment())
self.assertFalse(tokenizer.AtEnd())
self.assertEqual('# another comment', tokenizer.ConsumeComment())
self.assertTrue(tokenizer.AtEnd())
示例9: testConsumeTrailingComment
# 需要导入模块: from google.protobuf import text_format [as 别名]
# 或者: from google.protobuf.text_format import Tokenizer [as 别名]
def testConsumeTrailingComment(self):
text = 'some_number: 4\n# some comment'
tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
self.assertRaises(text_format.ParseError, tokenizer.ConsumeComment)
self.assertEqual('some_number', tokenizer.ConsumeIdentifier())
self.assertEqual(tokenizer.token, ':')
tokenizer.NextToken()
self.assertRaises(text_format.ParseError, tokenizer.ConsumeComment)
self.assertEqual(4, tokenizer.ConsumeInteger())
self.assertFalse(tokenizer.AtEnd())
self.assertEqual('# some comment', tokenizer.ConsumeComment())
self.assertTrue(tokenizer.AtEnd())
示例10: testConsumeLineComment
# 需要导入模块: from google.protobuf import text_format [as 别名]
# 或者: from google.protobuf.text_format import Tokenizer [as 别名]
def testConsumeLineComment(self):
tokenizer = text_format.Tokenizer('# some comment'.splitlines(),
skip_comments=False)
self.assertFalse(tokenizer.AtEnd())
self.assertEqual((False, '# some comment'),
tokenizer.ConsumeCommentOrTrailingComment())
self.assertTrue(tokenizer.AtEnd())
示例11: testConsumeTwoLineComments
# 需要导入模块: from google.protobuf import text_format [as 别名]
# 或者: from google.protobuf.text_format import Tokenizer [as 别名]
def testConsumeTwoLineComments(self):
text = '# some comment\n# another comment'
tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
self.assertEqual((False, '# some comment'),
tokenizer.ConsumeCommentOrTrailingComment())
self.assertFalse(tokenizer.AtEnd())
self.assertEqual((False, '# another comment'),
tokenizer.ConsumeCommentOrTrailingComment())
self.assertTrue(tokenizer.AtEnd())