本文整理匯總了Java中org.antlr.runtime.Token.EOF_TOKEN屬性的典型用法代碼示例。如果您正苦於以下問題:Java Token.EOF_TOKEN屬性的具體用法?Java Token.EOF_TOKEN怎麽用?Java Token.EOF_TOKEN使用的例子?那麽, 這裏精選的屬性代碼示例或許可以為您提供幫助。您也可以進一步了解該屬性所在類org.antlr.runtime.Token
的用法示例。
在下文中一共展示了Token.EOF_TOKEN屬性的12個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: createContextsForLastCompleteNode
@Override
protected void createContextsForLastCompleteNode(EObject previousModel, boolean strict) {
String currentNodePrefix = getPrefix(currentNode);
if (!Strings.isEmpty(currentNodePrefix) && !currentNode.getText().equals(currentNodePrefix)) {
lexer.setCharStream(new ANTLRStringStream(currentNodePrefix));
Token token = lexer.nextToken();
if (token == Token.EOF_TOKEN) { // error case - nothing could be parsed
return;
}
while (token != Token.EOF_TOKEN) {
if (isErrorToken(token))
return;
token = lexer.nextToken();
}
}
String prefix = "";
Collection<FollowElement> followElements = getParser().getFollowElements(rootNode, 0, completionOffset, strict);
doCreateContexts(lastCompleteNode, currentNode, prefix, previousModel, followElements);
}
示例2: createTokenSource
@Override
protected TokenSource createTokenSource(String string) {
List<Token> tokens = highlightingParser.getTokens(string);
Iterator<Token> iter = tokens.iterator();
return new TokenSource() {
@Override
public Token nextToken() {
if (iter.hasNext()) {
return iter.next();
}
return Token.EOF_TOKEN;
}
@Override
public String getSourceName() {
return "Text: " + string;
}
};
}
示例3: announce
@Override
protected void announce(Token start, Token stop, AbstractElement element) {
if (start != null && start != Token.EOF_TOKEN) {
if (start == stop) {
announce(start, element);
} else {
CommonToken castedStart = (CommonToken) start;
if (stop == null) { // possible error condition
if (start.getTokenIndex() == state.lastErrorIndex) {
return;
}
}
CommonToken castedEnd = (CommonToken) stop;
Integer newType = rewriter.rewrite(castedStart, element);
if (newType != null && castedEnd != null && castedEnd != Token.EOF_TOKEN) {
LazyTokenStream castedInput = (LazyTokenStream) this.input;
for (int i = castedStart.getTokenIndex() + 1; i < castedEnd.getTokenIndex(); i++) {
Token token = castedInput.get(i);
if (token.getChannel() != Token.HIDDEN_CHANNEL)
token.setType(newType);
}
castedEnd.setType(newType);
}
}
}
}
示例4: createContextsForLastCompleteNode
@Override
protected void createContextsForLastCompleteNode(EObject previousModel, boolean strict) {
String currentNodePrefix = getPrefix(currentNode);
if (!Strings.isEmpty(currentNodePrefix) && !currentNode.getText().equals(currentNodePrefix)) {
lexer.setCharStream(new ANTLRStringStream(currentNodePrefix));
Token token = lexer.nextToken();
if (token == Token.EOF_TOKEN) {
return;
}
while (token != Token.EOF_TOKEN) {
if (isErrorToken(token)) {
return;
}
token = lexer.nextToken();
}
}
String prefix = "";
Collection<FollowElement> followElements = parseFollowElements(completionOffset, strict);
doCreateContexts(lastCompleteNode, currentNode, prefix, previousModel, followElements);
}
示例5: createContextsForLastCompleteNode
protected void createContextsForLastCompleteNode(EObject previousModel, boolean strict) {
String currentNodePrefix = getPrefix(currentNode);
if (!Strings.isEmpty(currentNodePrefix) && !currentNode.getText().equals(currentNodePrefix)) {
lexer.setCharStream(new ANTLRStringStream(currentNodePrefix));
Token token = lexer.nextToken();
if (token == Token.EOF_TOKEN) { // error case - nothing could be parsed
return;
}
while(token != Token.EOF_TOKEN) {
if (isErrorToken(token))
return;
token = lexer.nextToken();
}
}
String prefix = "";
String completeInput = getInputToParse(document, completionOffset);
Collection<FollowElement> followElements = parser.getFollowElements(completeInput, strict);
doCreateContexts(lastCompleteNode, currentNode, prefix, previousModel, followElements);
}
示例6: nextToken
/**
* Implementation of the {@link TokenSource} interface. Return new tokens as long as there are some, afterwards
* return {@link Token#EOF_TOKEN}.
*/
@Override
public Token nextToken() {
if (next != null) {
Token result = next;
next = null;
return result;
}
if (!leafNodes.hasNext()) {
return Token.EOF_TOKEN;
}
ILeafNode leaf = leafNodes.next();
if (leaf.getTotalOffset() >= endOffset) {
leafNodes = Iterators.emptyIterator();
return Token.EOF_TOKEN;
}
if (leaf.getTotalEndOffset() <= startOffset) {
return nextToken();
}
if (leaf.getTotalEndOffset() > endOffset) {
return toPrefixToken(leaf);
}
SyntaxErrorMessage syntaxErrorMessage = leaf.getSyntaxErrorMessage();
if (syntaxErrorMessage != null && SEMICOLON_INSERTED.equals(syntaxErrorMessage.getIssueCode())) {
return toASIToken(leaf);
}
if (leaf.isHidden()) {
return processHiddenToken(leaf);
}
int tokenType = tokenTypeMapper.getInternalTokenType(leaf);
return new CommonToken(tokenType, leaf.getText());
}
示例7: processHiddenToken
/**
* Skips the given leaf as it's hidden. If it was the last token to be returned, a hidden token may be syntesized if
* would affect the semicolon insertion.
*/
private Token processHiddenToken(ILeafNode leaf) {
Token result = nextToken();
if (result == Token.EOF_TOKEN && Strings.countLineBreaks(leaf.getText()) > 0) {
next = result;
CommonToken hidden = new CommonToken(tokenTypeMapper.getInternalTokenType(leaf), leaf.getText());
hidden.setChannel(Token.HIDDEN_CHANNEL);
return hidden;
}
return result;
}
示例8: exhaustTokenSource
private void exhaustTokenSource() {
LazyTokenStream casted = (LazyTokenStream) this.input;
int marked = casted.mark();
try {
while (casted.LT(1) != Token.EOF_TOKEN) {
casted.consume();
}
} finally {
casted.rewind(marked);
}
}
示例9: nextToken
@Override
public Token nextToken() {
while (true) {
this.state.token = null;
this.state.channel = Token.DEFAULT_CHANNEL;
this.state.tokenStartCharIndex = input.index();
this.state.tokenStartCharPositionInLine = input.getCharPositionInLine();
this.state.tokenStartLine = input.getLine();
this.state.text = null;
if (input.LA(1) == CharStream.EOF) {
return Token.EOF_TOKEN;
}
try {
mTokens();
if (this.state.token == null) {
emit();
}
else if (this.state.token == Token.SKIP_TOKEN) {
continue;
}
return this.state.token;
}
catch (RecognitionException re) {
reportError(re);
if (re instanceof NoViableAltException ||
re instanceof FailedPredicateException) {
recover(re);
}
// create token that holds mismatched char
Token t = new CommonToken(input, Token.INVALID_TOKEN_TYPE, Token.HIDDEN_CHANNEL,
this.state.tokenStartCharIndex, getCharIndex() - 1);
t.setLine(this.state.tokenStartLine);
t.setCharPositionInLine(this.state.tokenStartCharPositionInLine);
tokenErrorMap.put(t, getErrorMessage(re, this.getTokenNames()));
emit(t);
return this.state.token;
}
}
}
示例10: nextToken
@Override
public Token nextToken() {
if (iter.hasNext()) {
ILookAheadTerminal lookAhead = iter.next();
return lookAhead.getToken();
}
return Token.EOF_TOKEN;
}
示例11: error
@Override
protected void error(NoViableAltException nvae) {
if (nvae.token == Token.EOF_TOKEN) {
int lookAheadAddOn = getRecognizer().lookAheadAddOn;
int lookAhead = ((XtextTokenStream)nvae.input).getCurrentLookAhead();
if ((lookAhead >= lookAheadAddOn && lookAheadAddOn > 0) || (lookAhead == 0 && lookAheadAddOn > 0) || lookAhead == -1)
getRecognizer().failedPredicateAtEOF = true;
}
super.error(nvae);
}
示例12: nextToken
@Override
public Token nextToken() {
if (tokenCount == 0)
return Token.EOF_TOKEN;
return new CommonToken(tokenCount--, "Text");
}