本文整理汇总了Java中org.fife.ui.rsyntaxtextarea.TokenTypes类的典型用法代码示例。如果您正苦于以下问题:Java TokenTypes类的具体用法?Java TokenTypes怎么用?Java TokenTypes使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
TokenTypes类属于org.fife.ui.rsyntaxtextarea包,在下文中一共展示了TokenTypes类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testOperators
import org.fife.ui.rsyntaxtextarea.TokenTypes; //导入依赖的package包/类
@Test
public void testOperators() {
String code = "^ @ : = < > + - / *";
Segment segment = createSegment(code);
TokenMaker tm = createTokenMaker();
Token token = tm.getTokenList(segment, TokenTypes.NULL, 0);
String[] operators = code.split(" +");
for (int i = 0; i < operators.length; i++) {
Assert.assertEquals(operators[i], token.getLexeme());
Assert.assertEquals(TokenTypes.OPERATOR, token.getType());
if (i < operators.length - 1) {
token = token.getNextToken();
Assert.assertTrue("Not a whitespace token: " + token, token.isWhitespace());
Assert.assertTrue("Not a single space: " + token, token.is(TokenTypes.WHITESPACE, " "));
}
token = token.getNextToken();
}
Assert.assertTrue(token.getType() == TokenTypes.NULL);
}
示例2: testJS_StringLiterals_invalid
import org.fife.ui.rsyntaxtextarea.TokenTypes; //导入依赖的package包/类
@Test
public void testJS_StringLiterals_invalid() {
String[] stringLiterals = {
"\"\\xG7\"", // Invalid hex/octal escape
"\"foo\\ubar\"", "\"\\u00fg\"", // Invalid Unicode escape
"\"My name is \\ubar and I \\", // Continued onto another line
"\"This is unterminated and ", // Unterminated string
};
for (String code : stringLiterals) {
Segment segment = createSegment(code);
TokenMaker tm = createTokenMaker();
Token token = tm.getTokenList(segment, TokenTypes.NULL, 0);
Assert.assertEquals("Not an ERROR_STRING_DOUBLE: " + token,
TokenTypes.ERROR_STRING_DOUBLE, token.getType());
}
}
示例3: testXML_comment_URL
import org.fife.ui.rsyntaxtextarea.TokenTypes; //导入依赖的package包/类
@Test
public void testXML_comment_URL() {
String code = "<!-- Hello world http://www.google.com -->";
Segment segment = createSegment(code);
XMLTokenMaker tm = new XMLTokenMaker();
Token token = tm.getTokenList(segment, TokenTypes.NULL, 0);
Assert.assertFalse(token.isHyperlink());
Assert.assertTrue("Token is not type MARKUP_COMMENT: " + token,
token.is(TokenTypes.MARKUP_COMMENT, "<!-- Hello world "));
token = token.getNextToken();
Assert.assertTrue(token.isHyperlink());
Assert.assertTrue(token.is(TokenTypes.MARKUP_COMMENT, "http://www.google.com"));
token = token.getNextToken();
Assert.assertFalse(token.isHyperlink());
Assert.assertTrue(token.is(TokenTypes.MARKUP_COMMENT, " -->"));
}
示例4: testJava_DataTypes
import org.fife.ui.rsyntaxtextarea.TokenTypes; //导入依赖的package包/类
@Test
public void testJava_DataTypes() {
String code = "boolean byte char double float int long short";
Segment segment = createSegment(code);
JSPTokenMaker tm = new JSPTokenMaker();
Token token = tm.getTokenList(segment, JSPTokenMaker.INTERNAL_IN_JAVA_EXPRESSION, 0);
String[] keywords = code.split(" +");
for (int i = 0; i < keywords.length; i++) {
Assert.assertEquals(keywords[i], token.getLexeme());
Assert.assertEquals(TokenTypes.DATA_TYPE, token.getType());
if (i < keywords.length - 1) {
token = token.getNextToken();
Assert.assertTrue("Not a whitespace token: " + token, token.isWhitespace());
Assert.assertTrue(token.is(TokenTypes.WHITESPACE, " "));
}
token = token.getNextToken();
}
}
示例5: testTS_StringLiterals_valid
import org.fife.ui.rsyntaxtextarea.TokenTypes; //导入依赖的package包/类
@Test
public void testTS_StringLiterals_valid() {
String[] stringLiterals = {
"\"\"", "\"hi\"", "\"\\x77\"", "\"\\u00fe\"", "\"\\\"\"",
"\"My name is Robert and I \\", // String continued on another line
};
for (String code : stringLiterals) {
Segment segment = createSegment(code);
TokenMaker tm = createTokenMaker();
Token token = tm.getTokenList(segment, TS_PREV_TOKEN_TYPE, 0);
Assert.assertEquals(TokenTypes.LITERAL_STRING_DOUBLE_QUOTE, token.getType());
}
}
示例6: testGetTokenList_whitespaceStart_LiteralSingleQuote
import org.fife.ui.rsyntaxtextarea.TokenTypes; //导入依赖的package包/类
@Test
public void testGetTokenList_whitespaceStart_LiteralSingleQuote() {
UnixShellTokenMaker tm = new UnixShellTokenMaker();
String text = " 'Hello world'";
Segment s = createSegment(text);
Token token = tm.getTokenList(s, TokenTypes.NULL, 0);
Assert.assertTrue(token.isSingleChar(TokenTypes.WHITESPACE, ' '));
token = token.getNextToken();
Assert.assertTrue(token.is(TokenTypes.LITERAL_CHAR, text.trim()));
token = token.getNextToken();
Assert.assertEquals(new TokenImpl(), token);
}
示例7: testBooleanLiterals
import org.fife.ui.rsyntaxtextarea.TokenTypes; //导入依赖的package包/类
@Test
public void testBooleanLiterals() {
String code = "true false";
Segment segment = createSegment(code);
TokenMaker tm = createTokenMaker();
Token token = tm.getTokenList(segment, TokenTypes.NULL, 0);
String[] keywords = code.split(" +");
for (int i = 0; i < keywords.length; i++) {
Assert.assertEquals(keywords[i], token.getLexeme());
Assert.assertEquals(TokenTypes.LITERAL_BOOLEAN, token.getType());
if (i < keywords.length - 1) {
token = token.getNextToken();
Assert.assertTrue("Not a whitespace token: " + token, token.isWhitespace());
Assert.assertTrue(token.is(TokenTypes.WHITESPACE, " "));
}
token = token.getNextToken();
}
Assert.assertTrue(token.getType() == TokenTypes.NULL);
}
示例8: testJS_CharLiterals_valid
import org.fife.ui.rsyntaxtextarea.TokenTypes; //导入依赖的package包/类
@Test
public void testJS_CharLiterals_valid() {
String[] charLiterals = {
"'a'", "'\\b'", "'\\t'", "'\\r'", "'\\f'", "'\\n'", "'\\u00fe'",
"'\\u00FE'", "'\\111'", "'\\222'", "'\\333'",
"'\\x77'",
"'\\11'", "'\\22'", "'\\33'",
"'\\1'",
"'My name is Robert and I \\", // Continued onto another line
};
for (String code : charLiterals) {
Segment segment = createSegment(code);
JavaScriptTokenMaker tm = new JavaScriptTokenMaker();
Token token = tm.getTokenList(segment, JS_PREV_TOKEN_TYPE, 0);
Assert.assertEquals(TokenTypes.LITERAL_CHAR, token.getType());
}
}
示例9: testJS_DataTypes
import org.fife.ui.rsyntaxtextarea.TokenTypes; //导入依赖的package包/类
@Test
public void testJS_DataTypes() {
String code = "boolean byte char double float int long short";
Segment segment = createSegment(code);
JavaScriptTokenMaker tm = new JavaScriptTokenMaker();
Token token = tm.getTokenList(segment, JS_PREV_TOKEN_TYPE, 0);
String[] keywords = code.split(" +");
for (int i = 0; i < keywords.length; i++) {
Assert.assertEquals(keywords[i], token.getLexeme());
Assert.assertEquals(TokenTypes.DATA_TYPE, token.getType());
if (i < keywords.length - 1) {
token = token.getNextToken();
Assert.assertTrue("Not a whitespace token: " + token, token.isWhitespace());
Assert.assertTrue(token.is(TokenTypes.WHITESPACE, " "));
}
token = token.getNextToken();
}
Assert.assertTrue(token.getType() == TokenTypes.NULL);
}
示例10: testOperators
import org.fife.ui.rsyntaxtextarea.TokenTypes; //导入依赖的package包/类
@Test
public void testOperators() {
String assignmentOperators = "+ - <= ^ ++ < * >= % -- > / != ? >> ! & == : >> ~ && >>>";
String nonAssignmentOperators = "= -= *= /= |= &= ^= += %= <<= >>= >>>=";
String code = assignmentOperators + " " + nonAssignmentOperators;
Segment segment = createSegment(code);
PHPTokenMaker tm = new PHPTokenMaker();
Token token = tm.getTokenList(segment, PHPTokenMaker.INTERNAL_IN_PHP, 0);
String[] keywords = code.split(" +");
for (int i = 0; i < keywords.length; i++) {
Assert.assertEquals(keywords[i], token.getLexeme());
Assert.assertEquals("Not an operator: " + token, TokenTypes.OPERATOR, token.getType());
if (i < keywords.length - 1) {
token = token.getNextToken();
Assert.assertTrue("Not a whitespace token: " + token, token.isWhitespace());
Assert.assertTrue("Not a single space: " + token, token.is(TokenTypes.WHITESPACE, " "));
}
token = token.getNextToken();
}
Assert.assertTrue(token.getType() == PHPTokenMaker.INTERNAL_IN_PHP);
}
示例11: testKeywords2
import org.fife.ui.rsyntaxtextarea.TokenTypes; //导入依赖的package包/类
@Test
public void testKeywords2() {
String code = "exit return";
Segment segment = createSegment(code);
PHPTokenMaker tm = new PHPTokenMaker();
Token token = tm.getTokenList(segment, PHPTokenMaker.INTERNAL_IN_PHP, 0);
String[] keywords = code.split(" +");
for (int i = 0; i < keywords.length; i++) {
Assert.assertEquals(keywords[i], token.getLexeme());
Assert.assertEquals(TokenTypes.RESERVED_WORD_2, token.getType());
if (i < keywords.length - 1) {
token = token.getNextToken();
Assert.assertTrue("Not a whitespace token: " + token, token.isWhitespace());
Assert.assertTrue(token.is(TokenTypes.WHITESPACE, " "));
}
token = token.getNextToken();
}
Assert.assertTrue(token.getType() == PHPTokenMaker.INTERNAL_IN_PHP);
}
示例12: testHexLiterals
import org.fife.ui.rsyntaxtextarea.TokenTypes; //导入依赖的package包/类
@Test
public void testHexLiterals() {
String[] hexLiterals = {
"0x1", "0xfe", "0x333333333333 ",
"0xf_e", "0x333_33_3", // Underscores
};
for (String code : hexLiterals) {
Segment segment = createSegment(code);
TokenMaker tm = createTokenMaker();
Token token = tm.getTokenList(segment, TokenTypes.NULL, 0);
Assert.assertEquals("Invalid hex literal: " + token, TokenTypes.LITERAL_NUMBER_HEXADECIMAL, token.getType());
}
}
示例13: testJS_Functions
import org.fife.ui.rsyntaxtextarea.TokenTypes; //导入依赖的package包/类
@Test
public void testJS_Functions() {
String code = "eval parseInt parseFloat escape unescape isNaN isFinite";
Segment segment = createSegment(code);
TokenMaker tm = createTokenMaker();
Token token = tm.getTokenList(segment, TokenTypes.NULL, 0);
String[] functions = code.split(" +");
for (int i = 0; i < functions.length; i++) {
Assert.assertEquals(functions[i], token.getLexeme());
Assert.assertEquals("Not a function token: " + token, TokenTypes.FUNCTION, token.getType());
if (i < functions.length - 1) {
token = token.getNextToken();
Assert.assertTrue("Not a whitespace token: " + token, token.isWhitespace());
Assert.assertTrue(token.is(TokenTypes.WHITESPACE, " "));
}
token = token.getNextToken();
}
Assert.assertTrue(token.getType() == TokenTypes.NULL);
}
示例14: testCharLiterals
import org.fife.ui.rsyntaxtextarea.TokenTypes; //导入依赖的package包/类
@Test
public void testCharLiterals() {
String[] chars = {
"'a'", "'\\b'", "'\\t'", "'\\r'", "'\\f'", "'\\n'", "'\\u00fe'",
"'\\u00FE'", "'\\111'", "'\\222'", "'\\333'",
"'\\11'", "'\\22'", "'\\33'",
"'\\1'",
};
for (String code : chars) {
Segment segment = createSegment(code);
TokenMaker tm = createTokenMaker();
Token token = tm.getTokenList(segment, TokenTypes.NULL, 0);
Assert.assertEquals(TokenTypes.LITERAL_CHAR, token.getType());
}
}
示例15: testJS_Functions
import org.fife.ui.rsyntaxtextarea.TokenTypes; //导入依赖的package包/类
@Test
public void testJS_Functions() {
String code = "eval parseInt parseFloat escape unescape isNaN isFinite";
Segment segment = createSegment(code);
JSPTokenMaker tm = new JSPTokenMaker();
Token token = tm.getTokenList(segment,
JSPTokenMaker.INTERNAL_IN_JS, 0);
String[] functions = code.split(" +");
for (int i = 0; i < functions.length; i++) {
Assert.assertEquals(functions[i], token.getLexeme());
Assert.assertEquals("Not a function token: " + token, TokenTypes.FUNCTION, token.getType());
if (i < functions.length - 1) {
token = token.getNextToken();
Assert.assertTrue("Not a whitespace token: " + token, token.isWhitespace());
Assert.assertTrue(token.is(TokenTypes.WHITESPACE, " "));
}
token = token.getNextToken();
}
}