本文整理汇总了Java中org.antlr.v4.runtime.UnbufferedTokenStream类的典型用法代码示例。如果您正苦于以下问题:Java UnbufferedTokenStream类的具体用法?Java UnbufferedTokenStream怎么用?Java UnbufferedTokenStream使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
UnbufferedTokenStream类属于org.antlr.v4.runtime包,在下文中一共展示了UnbufferedTokenStream类的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: compileToExpressionTree
import org.antlr.v4.runtime.UnbufferedTokenStream; //导入依赖的package包/类
@Override
public TransformExpressionTree compileToExpressionTree(String expression) {
CharStream charStream = new ANTLRInputStream(expression);
PQL2Lexer lexer = new PQL2Lexer(charStream);
lexer.setTokenFactory(new CommonTokenFactory(true));
TokenStream tokenStream = new UnbufferedTokenStream<CommonToken>(lexer);
PQL2Parser parser = new PQL2Parser(tokenStream);
parser.setErrorHandler(new BailErrorStrategy());
// Parse
ParseTree parseTree = parser.expression();
ParseTreeWalker walker = new ParseTreeWalker();
Pql2AstListener listener = new Pql2AstListener(expression, _splitInClause);
walker.walk(listener, parseTree);
final AstNode rootNode = listener.getRootNode();
return TransformExpressionTree.buildTree(rootNode);
}
示例2: parseWhileAqlExpression
import org.antlr.v4.runtime.UnbufferedTokenStream; //导入依赖的package包/类
/**
* Parses while matching an AQL expression.
*
* @param expression
* the expression to parse
* @return the corresponding {@link AstResult}
*/
private AstResult parseWhileAqlExpression(String expression) {
final IQueryBuilderEngine.AstResult result;
if (expression != null && expression.length() > 0) {
AstBuilderListener astBuilder = AQL56Compatibility.createAstBuilderListener(queryEnvironment);
CharStream input = new UnbufferedCharStream(new StringReader(expression), expression.length());
QueryLexer lexer = new QueryLexer(input);
lexer.setTokenFactory(new CommonTokenFactory(true));
lexer.removeErrorListeners();
lexer.addErrorListener(astBuilder.getErrorListener());
TokenStream tokens = new UnbufferedTokenStream<CommonToken>(lexer);
QueryParser parser = new QueryParser(tokens);
parser.addParseListener(astBuilder);
parser.removeErrorListeners();
parser.addErrorListener(astBuilder.getErrorListener());
// parser.setTrace(true);
parser.expression();
result = astBuilder.getAstResult();
} else {
ErrorExpression errorExpression = (ErrorExpression) EcoreUtil
.create(AstPackage.eINSTANCE.getErrorExpression());
List<org.eclipse.acceleo.query.ast.Error> errors = new ArrayList<org.eclipse.acceleo.query.ast.Error>(1);
errors.add(errorExpression);
final Map<Object, Integer> positions = new HashMap<Object, Integer>();
if (expression != null) {
positions.put(errorExpression, Integer.valueOf(0));
}
final BasicDiagnostic diagnostic = new BasicDiagnostic();
diagnostic.add(new BasicDiagnostic(Diagnostic.ERROR, AstBuilderListener.PLUGIN_ID, 0,
"null or empty string.", new Object[] {errorExpression }));
result = new AstResult(errorExpression, positions, positions, errors, diagnostic);
}
return result;
}
示例3: parseWhileAqlTypeLiteral
import org.antlr.v4.runtime.UnbufferedTokenStream; //导入依赖的package包/类
/**
* Parses while matching an AQL expression.
*
* @param queryEnvironment
* the {@link IReadOnlyQueryEnvironment}
* @param type
* the type to parse
* @return the corresponding {@link AstResult}
*/
private AstResult parseWhileAqlTypeLiteral(IReadOnlyQueryEnvironment queryEnvironment, String type) {
final IQueryBuilderEngine.AstResult result;
if (type != null && type.length() > 0) {
AstBuilderListener astBuilder = AQL56Compatibility
.createAstBuilderListener((IQueryEnvironment) queryEnvironment);
CharStream input = new UnbufferedCharStream(new StringReader(type), type.length());
QueryLexer lexer = new QueryLexer(input);
lexer.setTokenFactory(new CommonTokenFactory(true));
lexer.removeErrorListeners();
lexer.addErrorListener(astBuilder.getErrorListener());
TokenStream tokens = new UnbufferedTokenStream<CommonToken>(lexer);
QueryParser parser = new QueryParser(tokens);
parser.addParseListener(astBuilder);
parser.removeErrorListeners();
parser.addErrorListener(astBuilder.getErrorListener());
// parser.setTrace(true);
parser.typeLiteral();
result = astBuilder.getAstResult();
} else {
ErrorTypeLiteral errorTypeLiteral = (ErrorTypeLiteral) EcoreUtil
.create(AstPackage.eINSTANCE.getErrorTypeLiteral());
List<org.eclipse.acceleo.query.ast.Error> errors = new ArrayList<org.eclipse.acceleo.query.ast.Error>(1);
errors.add(errorTypeLiteral);
final Map<Object, Integer> positions = new HashMap<Object, Integer>();
if (type != null) {
positions.put(errorTypeLiteral, Integer.valueOf(0));
}
final BasicDiagnostic diagnostic = new BasicDiagnostic();
diagnostic.add(new BasicDiagnostic(Diagnostic.ERROR, AstBuilderListener.PLUGIN_ID, 0, "null or empty type.",
new Object[] {errorTypeLiteral }));
result = new AstResult(errorTypeLiteral, positions, positions, errors, diagnostic);
}
return result;
}
示例4: fromString
import org.antlr.v4.runtime.UnbufferedTokenStream; //导入依赖的package包/类
public static StringTemplate fromString(String pattern) {
class DescriptiveErrorListener extends BaseErrorListener {
public List<String> errors = new ArrayList<>();
@Override
public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol,
int line, int charPositionInLine,
String msg, RecognitionException e) {
errors.add(String.format("%d:%d: %s", line, charPositionInLine, msg));
}
}
final DescriptiveErrorListener error_listener = new DescriptiveErrorListener();
final StringSubstitutionLexer lexer = new StringSubstitutionLexer(CharStreams.fromString(pattern));
lexer.removeErrorListeners();
lexer.addErrorListener(error_listener);
final StringSubstitutionParser parser = new StringSubstitutionParser(new UnbufferedTokenStream(lexer));
parser.removeErrorListeners();
parser.addErrorListener(error_listener);
parser.setErrorHandler(new BailErrorStrategy());
final StringSubstitutionParser.ExprContext result = parser.expr();
if (result.exception != null)
throw new IllegalArgumentException("errors during parsing: " + pattern, result.exception);
else if (!error_listener.errors.isEmpty())
throw new IllegalArgumentException("syntax errors during parsing:\n" + String.join("\n", error_listener.errors.stream().map(s -> " " + s).collect(Collectors.toList())));
return result.s;
}
示例5: parse
import org.antlr.v4.runtime.UnbufferedTokenStream; //导入依赖的package包/类
public static Map<String, Any2<String, Number>> parse(String pattern) {
class DescriptiveErrorListener extends BaseErrorListener {
public List<String> errors = new ArrayList<>();
@Override
public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol,
int line, int charPositionInLine,
String msg, RecognitionException e) {
errors.add(String.format("%d:%d: %s", line, charPositionInLine, msg));
}
}
final DescriptiveErrorListener error_listener = new DescriptiveErrorListener();
final CollectdTagsLexer lexer = new CollectdTagsLexer(new ANTLRInputStream(pattern));
lexer.removeErrorListeners();
lexer.addErrorListener(error_listener);
final CollectdTagsParser parser = new CollectdTagsParser(new UnbufferedTokenStream(lexer));
parser.removeErrorListeners();
parser.addErrorListener(error_listener);
parser.setErrorHandler(new BailErrorStrategy());
final CollectdTagsParser.ExprContext result = parser.expr();
if (result.exception != null)
throw new IllegalArgumentException("errors during parsing: " + pattern, result.exception);
else if (!error_listener.errors.isEmpty())
throw new IllegalArgumentException("syntax errors during parsing:\n" + String.join("\n", error_listener.errors.stream().map(s -> " " + s).collect(Collectors.toList())));
return result.result;
}
示例6: parseSQL
import org.antlr.v4.runtime.UnbufferedTokenStream; //导入依赖的package包/类
private static ParseTree parseSQL(String sql) {
CharStream charStream = new ANTLRInputStream(sql);
RQLLexer lexer = new RQLLexer(charStream);
lexer.setTokenFactory(new CommonTokenFactory(true));
TokenStream tokenStream = new UnbufferedTokenStream<CommonToken>(lexer);
RQLParser parser = new RQLParser(tokenStream);
parser.setErrorHandler(new BailErrorStrategy());
return parser.root();
}
示例7: parseList
import org.antlr.v4.runtime.UnbufferedTokenStream; //导入依赖的package包/类
/**
* @param reader a reader representing several queries
* @return a list of queries
*/
@Override
public <T extends Query<?>> Stream<T> parseList(Reader reader) {
UnbufferedCharStream charStream = new UnbufferedCharStream(reader);
GraqlErrorListener errorListener = GraqlErrorListener.withoutQueryString();
GraqlLexer lexer = createLexer(charStream, errorListener);
/*
We tell the lexer to copy the text into each generated token.
Normally when calling `Token#getText`, it will look into the underlying `TokenStream` and call
`TokenStream#size` to check it is in-bounds. However, `UnbufferedTokenStream#size` is not supported
(because then it would have to read the entire input). To avoid this issue, we set this flag which will
copy over the text into each `Token`, s.t. that `Token#getText` will just look up the copied text field.
*/
lexer.setTokenFactory(new CommonTokenFactory(true));
// Use an unbuffered token stream so we can handle extremely large input strings
UnbufferedTokenStream tokenStream = new UnbufferedTokenStream(ChannelTokenSource.of(lexer));
GraqlParser parser = createParser(tokenStream, errorListener);
/*
The "bail" error strategy prevents us reading all the way to the end of the input, e.g.
```
match $x isa person; insert $x has name "Bob"; match $x isa movie; get;
^
```
In this example, when ANTLR reaches the indicated `match`, it considers two possibilities:
1. this is the end of the query
2. the user has made a mistake. Maybe they accidentally pasted the `match` here.
Because of case 2, ANTLR will parse beyond the `match` in order to produce a more helpful error message.
This causes memory issues for very large queries, so we use the simpler "bail" strategy that will
immediately stop when it hits `match`.
*/
parser.setErrorHandler(new BailErrorStrategy());
// This is a lazy iterator that will only consume a single query at a time, without parsing any further.
// This means it can pass arbitrarily long streams of queries in constant memory!
Iterable<T> queryIterator = () -> new AbstractIterator<T>() {
@Nullable
@Override
protected T computeNext() {
int latestToken = tokenStream.LA(1);
if (latestToken == Token.EOF) {
endOfData();
return null;
} else {
// This will parse and consume a single query, even if it doesn't reach an EOF
// When we next run it, it will start where it left off in the stream
return (T) QUERY.parse(parser, errorListener);
}
}
};
return StreamSupport.stream(queryIterator.spliterator(), false);
}