本文整理汇总了Python中antlr4.TokenStreamRewriter.TokenStreamRewriter类的典型用法代码示例。如果您正苦于以下问题:Python TokenStreamRewriter类的具体用法?Python TokenStreamRewriter怎么用?Python TokenStreamRewriter使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了TokenStreamRewriter类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: testInsertAfterLastIndex
def testInsertAfterLastIndex(self):
input = InputStream('abc')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.insertAfter(10, 'x')
self.assertEqual(rewriter.getDefaultText(), 'abcx')
示例2: testInsertBeforeIndexZero
def testInsertBeforeIndexZero(self):
input = InputStream('abc')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.insertBeforeIndex(0, '0')
self.assertEqual(rewriter.getDefaultText(), '0abc')
示例3: testReplaceSubsetThenFetch
def testReplaceSubsetThenFetch(self):
input = InputStream('abcccba')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.replaceRange(2, 4, 'xyz')
self.assertEqual('abxyzba', rewriter.getDefaultText())
示例4: testReplaceMiddleIndex
def testReplaceMiddleIndex(self):
input = InputStream('abc')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.replaceIndex(1, 'x')
self.assertEqual(rewriter.getDefaultText(), 'axc')
示例5: testReplaceAll
def testReplaceAll(self):
input = InputStream('abcccba')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.replaceRange(0, 6, 'x')
self.assertEqual('x', rewriter.getDefaultText())
示例6: testCombineInserts
def testCombineInserts(self):
input = InputStream('abc')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.insertBeforeIndex(0, 'x')
rewriter.insertBeforeIndex(0, 'y')
self.assertEqual('yxabc', rewriter.getDefaultText())
示例7: testOverlappingReplace4
def testOverlappingReplace4(self):
input = InputStream('abcc')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.replaceRange(1, 2, 'foo')
rewriter.replaceRange(1, 3, 'bar')
self.assertEqual('abar', rewriter.getDefaultText())
示例8: testDropIdenticalReplace
def testDropIdenticalReplace(self):
input = InputStream('abcc')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.replaceRange(1, 2, 'foo')
rewriter.replaceRange(1, 2, 'foo')
self.assertEqual('afooc', rewriter.getDefaultText())
示例9: testToStringStartStop
def testToStringStartStop(self):
input = InputStream('x = 3 * 0;')
lexer = TestLexer2(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.replaceRange(4, 8, '0')
self.assertEqual(rewriter.getDefaultText(), 'x = 0;')
self.assertEqual(rewriter.getText('default', 0, 9), 'x = 0;')
self.assertEqual(rewriter.getText('default', 4, 8), '0')
示例10: testPreservesOrderOfContiguousInserts
def testPreservesOrderOfContiguousInserts(self):
"""
Test for fix for: https://github.com/antlr/antlr4/issues/550
"""
input = InputStream('aa')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.insertBeforeIndex(0, '<b>')
rewriter.insertAfter(0, '</b>')
rewriter.insertBeforeIndex(1, '<b>')
rewriter.insertAfter(1, '</b>')
self.assertEqual('<b>a</b><b>a</b>', rewriter.getDefaultText())
示例11: testReplaceThenReplaceLowerIndexedSuperset
def testReplaceThenReplaceLowerIndexedSuperset(self):
input = InputStream('abcccba')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.replaceRange(2, 4, 'xyz')
rewriter.replaceRange(1, 3, 'foo')
with self.assertRaises(ValueError) as ctx:
rewriter.getDefaultText()
msg = str(ctx.exception)
self.assertEqual(
"""replace op boundaries of <[email protected][@1,1:1='b',<2>,1:1]..[@3,3:3='c',<3>,1:3]:"foo"> overlap with previous <[email protected][@2,2:2='c',<3>,1:2]..[@4,4:4='c',<3>,1:4]:"xyz">""",
msg
)
示例12: testOverlappingReplace2
def testOverlappingReplace2(self):
input = InputStream('abc')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.replaceRange(0, 3, 'bar')
rewriter.replaceRange(1, 2, 'foo')
with self.assertRaises(ValueError) as ctx:
rewriter.getDefaultText()
self.assertEqual(
"""replace op boundaries of <[email protected][@1,1:1='b',<2>,1:1]..[@2,2:2='c',<3>,1:2]:"foo"> overlap with previous <[email protected][@0,0:0='a',<1>,1:0]..[@3,3:2='<EOF>',<-1>,1:3]:"bar">""",
str(ctx.exception)
)
示例13: testInsertBeforeTokenThenDeleteThatToken
def testInsertBeforeTokenThenDeleteThatToken(self):
input = InputStream('abc')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.insertBeforeIndex(1, 'foo')
rewriter.replaceRange(1, 2, 'foo')
self.assertEqual('afoofoo', rewriter.getDefaultText())
示例14: testLeaveAloneDisjointInsert2
def testLeaveAloneDisjointInsert2(self):
input = InputStream('abcc')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.replaceRange(2, 3, 'foo')
rewriter.insertBeforeIndex(1, 'x')
self.assertEqual('axbfoo', rewriter.getDefaultText())
示例15: testInsertThenReplaceSameIndex
def testInsertThenReplaceSameIndex(self):
input = InputStream('abc')
lexer = TestLexer(input)
stream = CommonTokenStream(lexer=lexer)
stream.fill()
rewriter = TokenStreamRewriter(tokens=stream)
rewriter.insertBeforeIndex(0, '0')
rewriter.replaceIndex(0, 'x')
self.assertEqual('0xbc', rewriter.getDefaultText())