本文整理匯總了Java中org.antlr.v4.runtime.misc.ParseCancellationException.getCause方法的典型用法代碼示例。如果您正苦於以下問題:Java ParseCancellationException.getCause方法的具體用法?Java ParseCancellationException.getCause怎麽用?Java ParseCancellationException.getCause使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.antlr.v4.runtime.misc.ParseCancellationException
的用法示例。
在下文中一共展示了ParseCancellationException.getCause方法的13個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: getErrorMessage
import org.antlr.v4.runtime.misc.ParseCancellationException; //導入方法依賴的package包/類
public String getErrorMessage(ParseCancellationException error) {
if (error.getCause() != null) {
String message = error.getCause().getMessage();
if (error.getCause() instanceof SemanticException) {
SemanticException semanticException = (SemanticException) error.getCause();
if (semanticException.getNode() != null) {
TerminalNode startNode = getStartNode(semanticException.getNode());
if (startNode != null) {
String prefix = String.format("[line:%d, col:%d] ", startNode.getSymbol().getLine(),
startNode.getSymbol().getCharPositionInLine());
message = prefix + message;
}
}
return message;
} else if (error.getCause() instanceof RecognitionException) {
return getErrorMessage((RecognitionException) error.getCause());
} else {
return error.getCause().getMessage();
}
}
return error.getMessage();
}
示例2: compile
import org.antlr.v4.runtime.misc.ParseCancellationException; //導入方法依賴的package包/類
public Walk compile( CharStream input, ErrorHandler errors ) {
try {
return compile( input );
} catch (ParseCancellationException ex) {
int line = ErrorHandler.UNKNOWN;
int column = ErrorHandler.UNKNOWN;
String msg = "Parser Cancelled.";
Throwable cause = ex.getCause();
if( cause instanceof InputMismatchException ) {
InputMismatchException immEx = (InputMismatchException) cause;
Token offender = immEx.getOffendingToken();
if( offender != null ) {
line = offender.getLine();
column = offender.getCharPositionInLine();
String txt = offender.getText();
if(txt != null) {
msg = " Unexpected Token '" + txt + "'.";
}
}
}
errors.parseError( line, column, msg );
}
return getProgram();
}
示例3: parse
import org.antlr.v4.runtime.misc.ParseCancellationException; //導入方法依賴的package包/類
private TemplateContext parse(Reader reader) throws IOException, ParseException {
ANTLRInputStream inputStream = new ANTLRInputStream(reader);
StencilLexer lexer = new StencilLexer(inputStream);
CommonTokenStream tokenStream = new CommonTokenStream(lexer);
StencilParser parser = new StencilParser(tokenStream);
parser.setErrorHandler(new StencilErrorStrategy());
try {
return parser.template();
}
catch(ParseCancellationException e) {
RecognitionException re = (RecognitionException) e.getCause();
throw new ParseException("syntax error at " + re.getOffendingToken().getText(), re.getOffendingToken().getLine(), re.getOffendingToken().getCharPositionInLine()+1);
}
}
示例4: execute
import org.antlr.v4.runtime.misc.ParseCancellationException; //導入方法依賴的package包/類
public <T extends ExecutionResult> T execute() {
assertNotNull(arguments, "Arguments can't be null");
LOGGER.info("Executing request. Operation name: {}. Request: {} ", operationName, requestQuery);
Parser parser = new Parser();
Document document;
try {
document = parser.parseDocument(requestQuery);
} catch (ParseCancellationException e) {
RecognitionException recognitionException = (RecognitionException) e.getCause();
SourceLocation sourceLocation = new SourceLocation(recognitionException.getOffendingToken().getLine(), recognitionException.getOffendingToken().getCharPositionInLine());
InvalidSyntaxError invalidSyntaxError = new InvalidSyntaxError(sourceLocation);
return (T) new GraphQLRxExecutionResult(Observable.just(null), Observable.just(Arrays.asList(invalidSyntaxError)));
}
Validator validator = new Validator();
List<ValidationError> validationErrors = validator.validateDocument(graphQLSchemaHolder.getGraphQLSchema(), document);
if (validationErrors.size() > 0) {
return (T) new GraphQLRxExecutionResult(Observable.just(null), Observable.just(validationErrors));
}
if (executionStrategy == null) {
if (executorService == null) {
executionStrategy = new GraphQLDefaultRxExecutionStrategy(graphQLSchemaHolder, maxQueryDepth, maxQueryComplexity);
} else {
executionStrategy = new GraphQLExecutorServiceRxExecutionStrategy(graphQLSchemaHolder, executorService, maxQueryDepth, maxQueryComplexity);
}
}
RxExecution execution = new RxExecution(graphQLSchemaHolder, maxQueryDepth, maxQueryComplexity, executionStrategy);
ExecutionResult executionResult = execution.execute(graphQLSchemaHolder.getGraphQLSchema(), context, document, operationName, arguments);
return (T) (executionResult instanceof GraphQLRxExecutionResult ?
executionResult : new GraphQLRxExecutionResult(Observable.just(executionResult.getData()), Observable.just(executionResult.getErrors())));
}
示例5: parse
import org.antlr.v4.runtime.misc.ParseCancellationException; //導入方法依賴的package包/類
private QueryListener parse(Function<HeroicQueryParser, ParserRuleContext> op, String input) {
final HeroicQueryLexer lexer = new HeroicQueryLexer(new ANTLRInputStream(input));
final CommonTokenStream tokens = new CommonTokenStream(lexer);
final HeroicQueryParser parser = new HeroicQueryParser(tokens);
parser.removeErrorListeners();
parser.setErrorHandler(new BailErrorStrategy());
final ParserRuleContext context;
try {
context = op.apply(parser);
} catch (final ParseCancellationException e) {
if (!(e.getCause() instanceof RecognitionException)) {
throw e;
}
throw toParseException((RecognitionException) e.getCause());
}
final QueryListener listener = new QueryListener();
ParseTreeWalker.DEFAULT.walk(listener, context);
final Token last = lexer.getToken();
if (last.getType() != Token.EOF) {
throw new ParseException(
String.format("garbage at end of string: '%s'", last.getText()), null,
last.getLine(), last.getCharPositionInLine());
}
return listener;
}
示例6: run
import org.antlr.v4.runtime.misc.ParseCancellationException; //導入方法依賴的package包/類
public String run(String inputString) throws Throwable {
ANTLRInputStream input = new ANTLRInputStream(inputString);
MainLexer lexer = new MainLexer(input);
CommonTokenStream tokens = new CommonTokenStream(lexer);
MainParser parser = new MainParser(tokens);
parser.removeErrorListeners();
parser.setErrorHandler(new BailErrorStrategy());
ParseTree tree = null;
String output = "";
StringBuilder error = new StringBuilder();
try {
tree = parser.source();
output = this.visit(tree);
} catch (ParseCancellationException pce) {
Throwable cause = pce.getCause();
if (cause instanceof RecognitionException) {
RecognitionException re = (RecognitionException) cause;
error.append(com.github.charmoniumq.assembler.backend.ErrorMessages.recognitionException(re));
} else if (cause instanceof PostProcessError) {
PostProcessError us = (PostProcessError) cause;
error.append(com.github.charmoniumq.assembler.backend.ErrorMessages.undefinedSymbol(us));
} else if (cause instanceof com.github.charmoniumq.assembler.backend.InternalError) {
error.append("Internal error\nContact the maintainer.\nPresent your source code, assembler version, and the following message:\n");
error.append(cause.getMessage() + "\n");
// no stack trace necessary, since the location of an error can be found by the message text
} else {
error.append( "Unknown error\nContact the maintainer.\nPresent your source code, assembler version, and the following message:\n");
error.append(Utils.stackTraceString(pce));
error.append(Utils.stackTraceString(cause));
}
Throwable monkeyPoop = new Throwable(error.toString());
throw monkeyPoop;
}
return output;
}
示例7: ResourceQuery
import org.antlr.v4.runtime.misc.ParseCancellationException; //導入方法依賴的package包/類
public ResourceQuery(KMContextImpl context, String query, Resource baseResource) throws InvalidQueryException {
this.context = context;
KloudmakeLangParser parser = new KloudmakeLangParser(new CommonTokenStream(new KloudmakeLangLexer(new ANTLRInputStream(query))));
parser.setErrorHandler(new BailErrorStrategy());
try {
expression = Expression.create(parser.query().queryExpression(), query, context, baseResource);
} catch (ParseCancellationException e) {
RecognitionException cause = (RecognitionException) e.getCause();
throw new InvalidQueryException(cause.getOffendingToken(), query);
}
}
示例8: handleException
import org.antlr.v4.runtime.misc.ParseCancellationException; //導入方法依賴的package包/類
private DSLScript handleException(ParseCancellationException e) throws InvalidScriptException {
if (e.getCause() instanceof InputMismatchException) {
InputMismatchException cause = (InputMismatchException) e.getCause();
Token offendingToken = cause.getOffendingToken();
throw new InvalidScriptException(offendingToken.getLine() + ":" + offendingToken.getCharPositionInLine(), offendingToken.getText(), cause);
} else {
throw new InvalidScriptException(e.getMessage(), e);
}
}
示例9: parse
import org.antlr.v4.runtime.misc.ParseCancellationException; //導入方法依賴的package包/類
public Program parse(CharStream stream) throws IOException {
/*
// In order to require less memory: use unbuffered streams and avoid constructing a full parse tree.
ASPCore2Lexer lexer = new ASPCore2Lexer(new UnbufferedCharStream(is));
lexer.setTokenFactory(new CommonTokenFactory(true));
final ASPCore2Parser parser = new ASPCore2Parser(new UnbufferedTokenStream<>(lexer));
parser.setBuildParseTree(false);
*/
CommonTokenStream tokens = new CommonTokenStream(
new ASPCore2Lexer(stream)
);
final ASPCore2Parser parser = new ASPCore2Parser(tokens);
// Try SLL parsing mode (faster but may terminate incorrectly).
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
parser.removeErrorListeners();
parser.setErrorHandler(new BailErrorStrategy());
final CustomErrorListener errorListener = new CustomErrorListener(stream.getSourceName());
ASPCore2Parser.ProgramContext programContext;
try {
// Parse program
programContext = parser.program();
} catch (ParseCancellationException e) {
// Recognition exception may be caused simply by SLL parsing failing,
// retry with LL parser and DefaultErrorStrategy printing errors to console.
if (e.getCause() instanceof RecognitionException) {
tokens.seek(0);
parser.addErrorListener(errorListener);
parser.setErrorHandler(new DefaultErrorStrategy());
parser.getInterpreter().setPredictionMode(PredictionMode.LL);
// Re-run parse.
programContext = parser.program();
} else {
throw e;
}
}
// If the our SwallowingErrorListener has handled some exception during parsing
// just re-throw that exception.
// At this time, error messages will be already printed out to standard error
// because ANTLR by default adds an org.antlr.v4.runtime.ConsoleErrorListener
// to every parser.
// That ConsoleErrorListener will print useful messages, but not report back to
// our code.
// org.antlr.v4.runtime.BailErrorStrategy cannot be used here, because it would
// abruptly stop parsing as soon as the first error is reached (i.e. no recovery
// is attempted) and the user will only see the first error encountered.
if (errorListener.getRecognitionException() != null) {
throw errorListener.getRecognitionException();
}
// Abort parsing if there were some (recoverable) syntax errors.
if (parser.getNumberOfSyntaxErrors() != 0) {
throw new ParseCancellationException();
}
// Construct internal program representation.
ParseTreeVisitor visitor = new ParseTreeVisitor(externals);
return visitor.translate(programContext);
}
示例10: parse
import org.antlr.v4.runtime.misc.ParseCancellationException; //導入方法依賴的package包/類
@Override
@RuleDependency(recognizer=GrammarParser.class, rule=GrammarParser.RULE_ruleSpec, version=0, dependents=Dependents.SELF)
public void parse(ParserTaskManager taskManager, ParseContext context, DocumentSnapshot snapshot, Collection<? extends ParserDataDefinition<?>> requestedData, ParserResultHandler results)
throws InterruptedException, ExecutionException {
if (requestedData.contains(GrammarParserDataDefinitions.CURRENT_RULE_CONTEXT)) {
CurrentRuleContextData data = null;
if (context.getPosition() != null) {
int caretOffset = context.getPosition().getOffset();
Future<ParserData<List<Anchor>>> result =
taskManager.getData(snapshot, GrammarParserDataDefinitions.DYNAMIC_ANCHOR_POINTS, EnumSet.of(ParserDataOptions.SYNCHRONOUS));
ParserData<List<Anchor>> anchorsData = result.get();
List<Anchor> anchors = anchorsData.getData();
GrammarParser.RuleSpecContext ruleContext = null;
int grammarType = -1;
Future<ParserData<FileModel>> fileModelResult =
taskManager.getData(snapshot, GrammarParserDataDefinitions.FILE_MODEL, EnumSet.of(ParserDataOptions.ALLOW_STALE, ParserDataOptions.SYNCHRONOUS));
ParserData<FileModel> fileModelData = fileModelResult.get();
FileModel fileModel = fileModelData.getData();
if (anchors != null) {
Anchor enclosing = null;
/*
* parse the current rule
*/
for (Anchor anchor : anchors) {
if (anchor instanceof GrammarParserAnchorListener.GrammarTypeAnchor) {
grammarType = ((GrammarParserAnchorListener.GrammarTypeAnchor)anchor).getGrammarType();
continue;
}
if (anchor.getSpan().getStartPosition(snapshot).getOffset() <= caretOffset && anchor.getSpan().getEndPosition(snapshot).getOffset() > caretOffset) {
enclosing = anchor;
} else if (anchor.getSpan().getStartPosition(snapshot).getOffset() > caretOffset) {
break;
}
}
if (enclosing != null) {
CharStream input = new DocumentSnapshotCharStream(snapshot);
input.seek(enclosing.getSpan().getStartPosition(snapshot).getOffset());
GrammarLexer lexer = new GrammarLexer(input);
CommonTokenStream tokens = new TaskTokenStream(lexer);
GrammarParser parser = GrammarParserFactory.DEFAULT.getParser(tokens);
try {
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
parser.removeErrorListeners();
parser.setBuildParseTree(true);
parser.setErrorHandler(new BailErrorStrategy());
ruleContext = parser.ruleSpec();
} catch (ParseCancellationException ex) {
if (ex.getCause() instanceof RecognitionException) {
// retry with default error handler
tokens.reset();
parser.getInterpreter().setPredictionMode(PredictionMode.LL);
parser.setInputStream(tokens);
parser.setErrorHandler(new DefaultErrorStrategy());
ruleContext = parser.ruleSpec();
} else {
throw ex;
}
}
}
}
data = new CurrentRuleContextData(snapshot, grammarType, fileModel, ruleContext);
}
results.addResult(new BaseParserData<>(context, GrammarParserDataDefinitions.CURRENT_RULE_CONTEXT, snapshot, data));
}
}
示例11: parse
import org.antlr.v4.runtime.misc.ParseCancellationException; //導入方法依賴的package包/類
@Override
@RuleDependency(recognizer=GrammarParser.class, rule=GrammarParser.RULE_grammarSpec, version=0, dependents=Dependents.SELF)
public void parse(ParserTaskManager taskManager, ParseContext context, DocumentSnapshot snapshot, Collection<? extends ParserDataDefinition<?>> requestedData, ParserResultHandler results)
throws InterruptedException, ExecutionException {
boolean legacyMode = GrammarEditorKit.isLegacyMode(snapshot);
if (legacyMode) {
ParserData<List<Anchor>> emptyResult = new BaseParserData<>(context, GrammarParserDataDefinitions.REFERENCE_ANCHOR_POINTS, snapshot, null);
results.addResult(emptyResult);
return;
}
synchronized (lock) {
ParserData<GrammarSpecContext> parseTreeResult = taskManager.getData(snapshot, GrammarParserDataDefinitions.REFERENCE_PARSE_TREE, EnumSet.of(ParserDataOptions.NO_UPDATE)).get();
ParserData<List<Anchor>> anchorPointsResult = taskManager.getData(snapshot, GrammarParserDataDefinitions.REFERENCE_ANCHOR_POINTS, EnumSet.of(ParserDataOptions.NO_UPDATE)).get();
ParserData<FileModel> fileModelResult = taskManager.getData(snapshot, GrammarParserDataDefinitions.FILE_MODEL, EnumSet.of(ParserDataOptions.NO_UPDATE)).get();
if (parseTreeResult == null || anchorPointsResult == null || fileModelResult == null) {
Future<ParserData<Tagger<TokenTag<Token>>>> futureTokensData = taskManager.getData(snapshot, GrammarParserDataDefinitions.LEXER_TOKENS);
Tagger<TokenTag<Token>> tagger = futureTokensData.get().getData();
TaggerTokenSource tokenSource = new TaggerTokenSource(tagger, snapshot);
// DocumentSnapshotCharStream input = new DocumentSnapshotCharStream(snapshot);
// input.setSourceName((String)document.getDocument().getProperty(Document.TitleProperty));
// GrammarLexer lexer = new GrammarLexer(input);
InterruptableTokenStream tokenStream = new InterruptableTokenStream(tokenSource);
GrammarSpecContext parseResult;
GrammarParser parser = GrammarParserFactory.DEFAULT.getParser(tokenStream);
try {
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
parser.removeErrorListeners();
parser.setBuildParseTree(true);
parser.setErrorHandler(new BailErrorStrategy());
parseResult = parser.grammarSpec();
} catch (ParseCancellationException ex) {
if (ex.getCause() instanceof RecognitionException) {
// retry with default error handler
tokenStream.reset();
parser.getInterpreter().setPredictionMode(PredictionMode.LL);
parser.addErrorListener(DescriptiveErrorListener.INSTANCE);
parser.setInputStream(tokenStream);
parser.setErrorHandler(new DefaultErrorStrategy());
parseResult = parser.grammarSpec();
} else {
throw ex;
}
}
parseTreeResult = new BaseParserData<>(context, GrammarParserDataDefinitions.REFERENCE_PARSE_TREE, snapshot, parseResult);
if (anchorPointsResult == null && snapshot.getVersionedDocument().getDocument() != null) {
GrammarParserAnchorListener listener = new GrammarParserAnchorListener(snapshot);
ParseTreeWalker.DEFAULT.walk(listener, parseResult);
anchorPointsResult = new BaseParserData<>(context, GrammarParserDataDefinitions.REFERENCE_ANCHOR_POINTS, snapshot, listener.getAnchors());
}
if (fileModelResult == null) {
FileModelImpl fileModel = null;
if (snapshot.getVersionedDocument().getFileObject() != null) {
CodeModelBuilderListener codeModelBuilderListener = new CodeModelBuilderListener(snapshot, tokenStream);
ParseTreeWalker.DEFAULT.walk(codeModelBuilderListener, parseResult);
fileModel = codeModelBuilderListener.getFileModel();
if (fileModel != null) {
updateCodeModelCache(fileModel);
}
}
fileModelResult = new BaseParserData<>(context, GrammarParserDataDefinitions.FILE_MODEL, snapshot, fileModel);
}
}
results.addResult(parseTreeResult);
results.addResult(fileModelResult);
if (anchorPointsResult != null) {
results.addResult(anchorPointsResult);
}
}
}
示例12: parse
import org.antlr.v4.runtime.misc.ParseCancellationException; //導入方法依賴的package包/類
@Override
@RuleDependency(recognizer=TemplateParser.class, rule=TemplateParser.RULE_group, version=0, dependents=Dependents.SELF)
public void parse(ParserTaskManager taskManager, ParseContext context, DocumentSnapshot snapshot, Collection<? extends ParserDataDefinition<?>> requestedData, ParserResultHandler results)
throws InterruptedException, ExecutionException {
if (context.getPosition() == null) {
return;
}
int caretOffset = context.getPosition().getOffset();
Future<ParserData<List<Anchor>>> result =
taskManager.getData(snapshot, TemplateParserDataDefinitions.DYNAMIC_ANCHOR_POINTS, EnumSet.of(ParserDataOptions.SYNCHRONOUS));
ParserData<List<Anchor>> anchorsData = result.get();
List<Anchor> anchors = anchorsData.getData();
TemplateParser.GroupContext ruleContext = null;
if (anchors != null) {
Anchor enclosing = null;
// int grammarType = -1;
/*
* parse the current template
*/
for (Anchor anchor : anchors) {
// if (anchor instanceof TemplateParserAnchorListener.TemplateTypeAnchor) {
// grammarType = ((TemplateParserAnchorListener.TemplateTypeAnchor)anchor).getGrammarType();
// continue;
// }
if (anchor.getSpan().getStartPosition(snapshot).getOffset() <= caretOffset && anchor.getSpan().getEndPosition(snapshot).getOffset() > caretOffset) {
enclosing = anchor;
} else if (anchor.getSpan().getStartPosition(snapshot).getOffset() > caretOffset) {
break;
}
}
if (enclosing != null) {
CharStream input = new DocumentSnapshotCharStream(snapshot);
input.seek(enclosing.getSpan().getStartPosition(snapshot).getOffset());
TemplateLexer lexer = new TemplateLexer(input);
CommonTokenStream tokens = new TaskTokenStream(lexer);
TemplateParser parser = TemplateParserFactory.DEFAULT.getParser(tokens);
try {
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
parser.removeErrorListeners();
parser.setBuildParseTree(true);
parser.setErrorHandler(new BailErrorStrategy());
ruleContext = parser.group();
} catch (ParseCancellationException ex) {
if (ex.getCause() instanceof RecognitionException) {
// retry with default error handler
tokens.reset();
parser.getInterpreter().setPredictionMode(PredictionMode.LL);
parser.setInputStream(tokens);
parser.setErrorHandler(new DefaultErrorStrategy());
ruleContext = parser.group();
} else {
throw ex;
}
}
}
}
CurrentTemplateContextData data = new CurrentTemplateContextData(snapshot, ruleContext);
results.addResult(new BaseParserData<>(context, TemplateParserDataDefinitions.CURRENT_TEMPLATE_CONTEXT, snapshot, data));
}
示例13: parse
import org.antlr.v4.runtime.misc.ParseCancellationException; //導入方法依賴的package包/類
@Override
@RuleDependency(recognizer=TemplateParser.class, rule=TemplateParser.RULE_groupFile, version=4, dependents=Dependents.SELF)
public void parse(ParserTaskManager taskManager, ParseContext context, DocumentSnapshot snapshot, Collection<? extends ParserDataDefinition<?>> requestedData, ParserResultHandler results) throws InterruptedException, ExecutionException {
synchronized (lock) {
ParserData<GroupFileContext> parseTreeResult = taskManager.getData(snapshot, TemplateParserDataDefinitions.REFERENCE_PARSE_TREE, EnumSet.of(ParserDataOptions.NO_UPDATE)).get();
ParserData<List<Anchor>> anchorPointsResult = taskManager.getData(snapshot, TemplateParserDataDefinitions.REFERENCE_ANCHOR_POINTS, EnumSet.of(ParserDataOptions.NO_UPDATE)).get();
ParserData<FileModel> fileModelResult = taskManager.getData(snapshot, TemplateParserDataDefinitions.FILE_MODEL, EnumSet.of(ParserDataOptions.NO_UPDATE)).get();
if (parseTreeResult == null || anchorPointsResult == null || fileModelResult == null) {
Future<ParserData<Tagger<TokenTag<Token>>>> futureTokensData = taskManager.getData(snapshot, TemplateParserDataDefinitions.LEXER_TOKENS);
Tagger<TokenTag<Token>> tagger = futureTokensData.get().getData();
TaggerTokenSource tokenSource = new TaggerTokenSource(tagger, snapshot);
// DocumentSnapshotCharStream input = new DocumentSnapshotCharStream(snapshot);
// input.setSourceName((String)document.getDocument().getProperty(Document.TitleProperty));
// GrammarLexer lexer = new GrammarLexer(input);
InterruptableTokenStream tokenStream = new InterruptableTokenStream(tokenSource);
TemplateParser.GroupFileContext parseResult;
TemplateParser parser = TemplateParserFactory.DEFAULT.getParser(tokenStream);
try {
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
parser.removeErrorListeners();
parser.setBuildParseTree(true);
parser.setErrorHandler(new BailErrorStrategy());
parseResult = parser.groupFile();
} catch (ParseCancellationException ex) {
if (ex.getCause() instanceof RecognitionException) {
// retry with default error handler
tokenStream.reset();
parser.getInterpreter().setPredictionMode(PredictionMode.LL);
parser.addErrorListener(DescriptiveErrorListener.INSTANCE);
parser.setInputStream(tokenStream);
parser.setErrorHandler(new DefaultErrorStrategy());
parseResult = parser.groupFile();
} else {
throw ex;
}
}
parseTreeResult = new BaseParserData<>(context, TemplateParserDataDefinitions.REFERENCE_PARSE_TREE, snapshot, parseResult);
if (anchorPointsResult == null && snapshot.getVersionedDocument().getDocument() != null) {
TemplateParserAnchorListener listener = new TemplateParserAnchorListener(snapshot);
ParseTreeWalker.DEFAULT.walk(listener, parseResult);
anchorPointsResult = new BaseParserData<>(context, TemplateParserDataDefinitions.REFERENCE_ANCHOR_POINTS, snapshot, listener.getAnchors());
}
if (fileModelResult == null) {
CodeModelBuilderListener codeModelBuilderListener = new CodeModelBuilderListener(snapshot, tokenStream);
ParseTreeWalker.DEFAULT.walk(codeModelBuilderListener, parseResult);
fileModelResult = new BaseParserData<>(context, TemplateParserDataDefinitions.FILE_MODEL, snapshot, codeModelBuilderListener.getFileModel());
}
}
results.addResult(parseTreeResult);
results.addResult(fileModelResult);
if (anchorPointsResult != null) {
results.addResult(anchorPointsResult);
}
}
}