本文整理汇总了Java中org.antlr.runtime.ANTLRInputStream类的典型用法代码示例。如果您正苦于以下问题:Java ANTLRInputStream类的具体用法?Java ANTLRInputStream怎么用?Java ANTLRInputStream使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
ANTLRInputStream类属于org.antlr.runtime包,在下文中一共展示了ANTLRInputStream类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: createOperand
import org.antlr.runtime.ANTLRInputStream; //导入依赖的package包/类
@Override
public Operand createOperand(String operand) throws MoqlException {
// TODO Auto-generated method stub
Validate.notEmpty(operand, "Parameter 'operand' is empty!");
Operand pseudoOperand = createPseudoOperand(operand);
if (pseudoOperand != null)
return pseudoOperand;
try {
ANTLRInputStream is = new ANTLRInputStream(new ByteArrayInputStream(operand.getBytes()));
OperandLexer lexer = new OperandLexer(is);
CommonTokenStream tokens = new CommonTokenStream(lexer);
OperandParser parser = new OperandParser(tokens);
parser.setFunctionFactory(functionFactory);
return parser.operand();
} catch (Exception e) {
// TODO Auto-generated catch block
throw new MoqlException(StringFormater.format("Create operand '{}' failed!", operand), e);
}
}
示例2: parseMoql
import org.antlr.runtime.ANTLRInputStream; //导入依赖的package包/类
/**
*
* @param moql
* @return SelectorMetadata or SetlectorMetadata
* @throws MoqlException
*/
public static SelectorDefinition parseMoql(String moql) throws MoqlException {
Validate.notEmpty(moql, "Parameter 'moql' is empty!");
try {
ANTLRInputStream is = new ANTLRInputStream(new ByteArrayInputStream(
moql.getBytes()));
SelectorLexer lexer = new SelectorLexer(is);
CommonTokenStream tokens = new CommonTokenStream(lexer);
SelectorParser parser = new SelectorParser(tokens);
return parser.selector();
} catch (Exception e) {
// TODO Auto-generated catch block
throw new MoqlException(StringFormater.format("Parse moql '{}' failed!",
moql), e);
}
}
示例3: parseCondition
import org.antlr.runtime.ANTLRInputStream; //导入依赖的package包/类
public static ConditionMetadata parseCondition(String condition)
throws MoqlException {
Validate.notEmpty(condition, "Parameter 'condition' is empty!");
try {
ANTLRInputStream is = new ANTLRInputStream(new ByteArrayInputStream(
condition.getBytes()));
FilterLexer lexer = new FilterLexer(is);
CommonTokenStream tokens = new CommonTokenStream(lexer);
FilterParser parser = new FilterParser(tokens);
return new ConditionMetadata(parser.searchCondition());
} catch (Exception e) {
// TODO Auto-generated catch block
throw new MoqlException(StringFormater.format(
"Parse condition '{}' failed!", condition), e);
}
}
示例4: urlStream
import org.antlr.runtime.ANTLRInputStream; //导入依赖的package包/类
public static CSSInputStream urlStream(URL source, String encoding) throws IOException {
CSSInputStream stream = new CSSInputStream();
stream.base = source;
if (encoding != null)
stream.encoding = encoding;
else
stream.encoding = Charset.defaultCharset().name();
URLConnection con = source.openConnection();
InputStream is;
if ("gzip".equalsIgnoreCase(con.getContentEncoding()))
is = new GZIPInputStream(con.getInputStream());
else
is = con.getInputStream();
stream.input = new ANTLRInputStream(is, stream.encoding);
stream.source = is;
stream.url = source;
return stream;
}
示例5: parse
import org.antlr.runtime.ANTLRInputStream; //导入依赖的package包/类
/**
* Parse the specified file and create a ParsedModule object
* @param path
* @param preprocessed
* @return
* @throws IOException
*/
public static ParsedModule parse(String moduleName, String path,
boolean preprocessed) throws IOException {
FileInputStream inputStream = setupInput(path);
/* Parse the input file and build AST */
ANTLRInputStream antlrInput = new ANTLRInputStream(inputStream);
LineMapping lineMapping;
if (preprocessed) {
int startMark = antlrInput.mark();
lineMapping = parsePreprocOutput(antlrInput);
antlrInput.rewind(startMark);
} else {
// Treat # lines as comments. All input from same file
lineMapping = LineMapping.makeSimple(path);
}
SwiftAST tree = runANTLR(antlrInput, lineMapping);
return new ParsedModule(moduleName, path, tree, lineMapping);
}
示例6: getTokens
import org.antlr.runtime.ANTLRInputStream; //导入依赖的package包/类
private CommonTokenStream getTokens(String fname){
try {
InputStream is;
FileInputStream fis = new FileInputStream(fname);
if(fname.toLowerCase().endsWith(".gz")){
is = new GZIPInputStream(fis);
}else{
is = fis;
}
ANTLRInputStream input = new ANTLRInputStream(is);
MLNLexer lexer = new MLNLexer(input);
CommonTokenStream tokens = new CommonTokenStream(lexer);
is.close();
return tokens;
} catch (Exception e) {
ExceptionMan.handle(e);
}
return null;
}
示例7: load
import org.antlr.runtime.ANTLRInputStream; //导入依赖的package包/类
/**
* Load an FCL file and create a "Fuzzy inference system (FIS)"
* @param fileName : FCL file name
* @param verbose : Be verbose?
* @return A new FIS or null on error
*/
public static FIS load(InputStream inputStream, boolean verbose) {
// Parse file (lexer first, then parser)
FclLexer lexer;
try {
lexer = new FclLexer(new ANTLRInputStream(inputStream));
} catch (IOException e1) {
System.err.println("Error reading inputStream'" + inputStream + "'");
return null;
}
// Parse tree and create FIS
try {
return createFromLexer(lexer, verbose);
} catch (RecognitionException e) {
throw new RuntimeException(e);
}
}
示例8: loadFromPlainText
import org.antlr.runtime.ANTLRInputStream; //导入依赖的package包/类
@Override
public IASLanSpec loadFromPlainText(String plainTextSpec, ErrorGatherer err, ISymbolsProvider... extraDefaults) {
ByteArrayInputStream bais = new ByteArrayInputStream(plainTextSpec.getBytes());
try {
ANTLRInputStream antlrStream = new ANTLRInputStream(bais);
aslanLexer lexer = new aslanLexer(antlrStream);
lexer.setErrorGatherer(err);
CommonTokenStream tokens = new CommonTokenStream(lexer);
aslanParser parser = new aslanParser(tokens);
parser.setErrorGatherer(err);
parser.setExtraSymbolsProviders(extraDefaults);
IASLanSpec spec = parser.aslanSpecification();
spec.finish();
return spec;
}
catch (Exception e) {
err.addException(ASLanErrorMessages.GENERIC_ERROR, e.getMessage());
return null;
}
}
示例9: fromString
import org.antlr.runtime.ANTLRInputStream; //导入依赖的package包/类
public static IGroundTerm fromString(String term) {
ByteArrayInputStream bais = new ByteArrayInputStream(term.getBytes());
try {
ANTLRInputStream antlrStream = new ANTLRInputStream(bais);
ErrorGatherer err = new ErrorGatherer(OutputFormatErrorMessages.DEFAULT);
ofLexer lexer = new ofLexer(antlrStream);
lexer.setErrorGatherer(err);
CommonTokenStream tokens = new CommonTokenStream(lexer);
ofParser parser = new ofParser(tokens);
parser.setErrorGatherer(err);
IGroundTerm t = parser.term();
if (err.getErrors().size() == 0) {
return t;
}
else {
return null;
}
}
catch (Exception e) {
// silently ignore any errors for now
return null;
}
}
示例10: main
import org.antlr.runtime.ANTLRInputStream; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
ANTLRInputStream input =
new ANTLRInputStream(
new FileInputStream(
new File(args[0])));
MegaLLexer lexer = new MegaLLexer(input);
CommonTokenStream tokens = new CommonTokenStream(lexer);
MegaLParser parser = new MegaLParser(tokens);
parser.megal();
if (parser.error)
System.exit(-1);
}
示例11: main
import org.antlr.runtime.ANTLRInputStream; //导入依赖的package包/类
public static void main(String[] args) throws IOException, RecognitionException {
String path = args[0];
String pkg = args[1];
String stem = args[2];
FileInputStream stream = new FileInputStream(path + File.separatorChar + stem + ".yapg");
ANTLRInputStream antlr = new ANTLRInputStream(stream);
GrammarLexer lexer = new GrammarLexer(antlr);
CommonTokenStream tokens = new CommonTokenStream(lexer);
GrammarParser parser = new GrammarParser(tokens);
Grammar g = parser.parseGrammar();
if (parser.error) throw new RecognitionException();
Generator.generate(path, pkg, stem, g);
}
示例12: testLexer
import org.antlr.runtime.ANTLRInputStream; //导入依赖的package包/类
public void testLexer() throws IOException, RecognitionException
{
ClassLoader cl = FTSTest.class.getClassLoader();
InputStream modelStream = cl.getResourceAsStream("org/alfresco/repo/search/impl/parsers/fts_test.gunit");
CharStream input = new ANTLRInputStream(modelStream);
gUnitExecutor executer = new gUnitExecutor(parse(input), "FTS");
String result = executer.execTest();
System.out.print(executer.execTest()); // unit test result
assertEquals("Failures: " + result, 0, executer.failures.size());
assertEquals("Invalids " + result, 0, executer.invalids.size());
}
示例13: testLexerOutput
import org.antlr.runtime.ANTLRInputStream; //导入依赖的package包/类
public void testLexerOutput() throws IOException
{
String str = "~woof^2";
CharStream input = new ANTLRInputStream(new ByteArrayInputStream(str.getBytes("UTF-8")));
FTSLexer lexer = new FTSLexer(input);
CommonTokenStream tokenStream = new CommonTokenStream(lexer);
for(CommonToken token : (List<CommonToken>)tokenStream.getTokens())
{
System.out.println(token.toString());
}
}
示例14: getDotTreeGraphFromFile
import org.antlr.runtime.ANTLRInputStream; //导入依赖的package包/类
private void getDotTreeGraphFromFile (String dotFileName) {
FileInputStream file;
try {
file = new FileInputStream(dotFileName);
ANTLRInputStream input = new ANTLRInputStream(file);
DotLexer lexer = new DotLexer(input);
CommonTokenStream tokens = new CommonTokenStream(lexer);
DotParser parser = new DotParser(tokens);
graph_return ret = parser.graph();
CommonTree tree = ret.getTree();
CommonTreeNodeStream ctnsNodes = new CommonTreeNodeStream(tree);
DotTree dotTree = new DotTree(ctnsNodes);
graphObjRet = dotTree.graph().graphObj;
removeQuotesFromPropertyValueOfGrpah(graphObjRet);
// System.out.println (graphObjRet.id);
for (DotTree.Node n : graphObjRet.getNodes()) {
//System.out.println (n.toString());
}
for (DotTree.NodePair np : graphObjRet.getNodePairs()) {
//System.out.println (np.toString());
}
} catch (Exception e) {
System.out.println("error in reading file named - "+dotFileName);
//e.printStackTrace();
}
}
示例15: getModelANTLR
import org.antlr.runtime.ANTLRInputStream; //导入依赖的package包/类
private Model getModelANTLR(String filename) throws IOException, RecognitionException, ParseException
{
// Create an input character stream from standard in
FileInputStream in = new FileInputStream(new File(filename));
ANTLRInputStream input = new ANTLRInputStream(in);
// Create an ModelLexer that feeds from that stream
ModelLexer lexer = new ModelLexer(input);
// Create a stream of tokens fed by the lexer
CommonTokenStream tokens = new CommonTokenStream(lexer);
// Create a parser that feeds off the token stream
ModelParser parser = new ModelParser(tokens);
// return model
Model m = parser.model();
return m;
}