本文整理汇总了Python中antlr4.TokenStreamRewriter.TokenStreamRewriter.getDefaultText方法的典型用法代码示例。如果您正苦于以下问题:Python TokenStreamRewriter.getDefaultText方法的具体用法?Python TokenStreamRewriter.getDefaultText怎么用?Python TokenStreamRewriter.getDefaultText使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类antlr4.TokenStreamRewriter.TokenStreamRewriter
的用法示例。
在下文中一共展示了TokenStreamRewriter.getDefaultText方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: testReplaceThenDeleteMiddleIndex
# 需要导入模块: from antlr4.TokenStreamRewriter import TokenStreamRewriter [as 别名]
# 或者: from antlr4.TokenStreamRewriter.TokenStreamRewriter import getDefaultText [as 别名]
def testReplaceThenDeleteMiddleIndex(self):
input = InputStream('abc')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.replaceRange(0, 2, 'x')
rewriter.insertBeforeIndex(1, '0')
with self.assertRaises(ValueError) as ctx:
rewriter.getDefaultText()
self.assertEqual(
'insert op <[email protected][@1,1:1=\'b\',<2>,1:1]:"0"> within boundaries of previous <[email protected][@0,0:0=\'a\',<1>,1:0]..[@2,2:2=\'c\',<3>,1:2]:"x">',
str(ctx.exception)
)
示例2: testReplaceThenReplaceLowerIndexedSuperset
# 需要导入模块: from antlr4.TokenStreamRewriter import TokenStreamRewriter [as 别名]
# 或者: from antlr4.TokenStreamRewriter.TokenStreamRewriter import getDefaultText [as 别名]
def testReplaceThenReplaceLowerIndexedSuperset(self):
input = InputStream('abcccba')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.replaceRange(2, 4, 'xyz')
rewriter.replaceRange(1, 3, 'foo')
with self.assertRaises(ValueError) as ctx:
rewriter.getDefaultText()
msg = str(ctx.exception)
self.assertEqual(
"""replace op boundaries of <[email protected][@1,1:1='b',<2>,1:1]..[@3,3:3='c',<3>,1:3]:"foo"> overlap with previous <[email protected][@2,2:2='c',<3>,1:2]..[@4,4:4='c',<3>,1:4]:"xyz">""",
msg
)
示例3: testOverlappingReplace2
# 需要导入模块: from antlr4.TokenStreamRewriter import TokenStreamRewriter [as 别名]
# 或者: from antlr4.TokenStreamRewriter.TokenStreamRewriter import getDefaultText [as 别名]
def testOverlappingReplace2(self):
input = InputStream('abc')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.replaceRange(0, 3, 'bar')
rewriter.replaceRange(1, 2, 'foo')
with self.assertRaises(ValueError) as ctx:
rewriter.getDefaultText()
self.assertEqual(
"""replace op boundaries of <[email protected][@1,1:1='b',<2>,1:1]..[@2,2:2='c',<3>,1:2]:"foo"> overlap with previous <[email protected][@0,0:0='a',<1>,1:0]..[@3,3:2='<EOF>',<-1>,1:3]:"bar">""",
str(ctx.exception)
)
示例4: testReplaceRangeThenInsertAtRightEdge
# 需要导入模块: from antlr4.TokenStreamRewriter import TokenStreamRewriter [as 别名]
# 或者: from antlr4.TokenStreamRewriter.TokenStreamRewriter import getDefaultText [as 别名]
def testReplaceRangeThenInsertAtRightEdge(self):
input = InputStream('abcccba')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.replaceRange(2, 4, 'x')
rewriter.insertBeforeIndex(4, 'y')
with self.assertRaises(ValueError) as ctx:
rewriter.getDefaultText()
msg = str(ctx.exception)
self.assertEqual(
"insert op <[email protected][@4,4:4='c',<3>,1:4]:\"y\"> within boundaries of previous <[email protected][@2,2:2='c',<3>,1:2]..[@4,4:4='c',<3>,1:4]:\"x\">",
msg
)
示例5: testInsertBeforeIndexZero
# 需要导入模块: from antlr4.TokenStreamRewriter import TokenStreamRewriter [as 别名]
# 或者: from antlr4.TokenStreamRewriter.TokenStreamRewriter import getDefaultText [as 别名]
def testInsertBeforeIndexZero(self):
input = InputStream('abc')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.insertBeforeIndex(0, '0')
self.assertEqual(rewriter.getDefaultText(), '0abc')
示例6: testInsertAfterLastIndex
# 需要导入模块: from antlr4.TokenStreamRewriter import TokenStreamRewriter [as 别名]
# 或者: from antlr4.TokenStreamRewriter.TokenStreamRewriter import getDefaultText [as 别名]
def testInsertAfterLastIndex(self):
input = InputStream('abc')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.insertAfter(10, 'x')
self.assertEqual(rewriter.getDefaultText(), 'abcx')
示例7: testReplaceSubsetThenFetch
# 需要导入模块: from antlr4.TokenStreamRewriter import TokenStreamRewriter [as 别名]
# 或者: from antlr4.TokenStreamRewriter.TokenStreamRewriter import getDefaultText [as 别名]
def testReplaceSubsetThenFetch(self):
input = InputStream('abcccba')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.replaceRange(2, 4, 'xyz')
self.assertEqual('abxyzba', rewriter.getDefaultText())
示例8: testReplaceMiddleIndex
# 需要导入模块: from antlr4.TokenStreamRewriter import TokenStreamRewriter [as 别名]
# 或者: from antlr4.TokenStreamRewriter.TokenStreamRewriter import getDefaultText [as 别名]
def testReplaceMiddleIndex(self):
input = InputStream('abc')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.replaceIndex(1, 'x')
self.assertEqual(rewriter.getDefaultText(), 'axc')
示例9: testReplaceAll
# 需要导入模块: from antlr4.TokenStreamRewriter import TokenStreamRewriter [as 别名]
# 或者: from antlr4.TokenStreamRewriter.TokenStreamRewriter import getDefaultText [as 别名]
def testReplaceAll(self):
input = InputStream('abcccba')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.replaceRange(0, 6, 'x')
self.assertEqual('x', rewriter.getDefaultText())
示例10: testCombineInsertOnLeftWithReplace
# 需要导入模块: from antlr4.TokenStreamRewriter import TokenStreamRewriter [as 别名]
# 或者: from antlr4.TokenStreamRewriter.TokenStreamRewriter import getDefaultText [as 别名]
def testCombineInsertOnLeftWithReplace(self):
input = InputStream('abc')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.replaceRange(0, 2, 'foo')
rewriter.insertBeforeIndex(0, 'z')
self.assertEqual('zfoo', rewriter.getDefaultText())
示例11: test2InsertBeforeAfterMiddleIndex
# 需要导入模块: from antlr4.TokenStreamRewriter import TokenStreamRewriter [as 别名]
# 或者: from antlr4.TokenStreamRewriter.TokenStreamRewriter import getDefaultText [as 别名]
def test2InsertBeforeAfterMiddleIndex(self):
input = InputStream('abc')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.insertBeforeIndex(1, 'x')
rewriter.insertAfter(1, 'x')
self.assertEqual(rewriter.getDefaultText(), 'axbxc')
示例12: testCombineInserts
# 需要导入模块: from antlr4.TokenStreamRewriter import TokenStreamRewriter [as 别名]
# 或者: from antlr4.TokenStreamRewriter.TokenStreamRewriter import getDefaultText [as 别名]
def testCombineInserts(self):
input = InputStream('abc')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.insertBeforeIndex(0, 'x')
rewriter.insertBeforeIndex(0, 'y')
self.assertEqual('yxabc', rewriter.getDefaultText())
示例13: testInsertThenReplaceSameIndex
# 需要导入模块: from antlr4.TokenStreamRewriter import TokenStreamRewriter [as 别名]
# 或者: from antlr4.TokenStreamRewriter.TokenStreamRewriter import getDefaultText [as 别名]
def testInsertThenReplaceSameIndex(self):
input = InputStream('abc')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.insertBeforeIndex(0, '0')
rewriter.replaceIndex(0, 'x')
self.assertEqual('0xbc', rewriter.getDefaultText())
示例14: testReplaceRangeThenInsertAfterRightEdge
# 需要导入模块: from antlr4.TokenStreamRewriter import TokenStreamRewriter [as 别名]
# 或者: from antlr4.TokenStreamRewriter.TokenStreamRewriter import getDefaultText [as 别名]
def testReplaceRangeThenInsertAfterRightEdge(self):
input = InputStream('abcccba')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.replaceRange(2, 4, 'x')
rewriter.insertAfter(4, 'y')
self.assertEqual('abxyba', rewriter.getDefaultText())
示例15: testCombineInsertOnLeftWithDelete
# 需要导入模块: from antlr4.TokenStreamRewriter import TokenStreamRewriter [as 别名]
# 或者: from antlr4.TokenStreamRewriter.TokenStreamRewriter import getDefaultText [as 别名]
def testCombineInsertOnLeftWithDelete(self):
input = InputStream('abc')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.delete('default', 0, 2)
rewriter.insertBeforeIndex(0, 'z')
self.assertEqual('z', rewriter.getDefaultText())