本文整理汇总了Java中opennlp.tools.parser.ParserModel类的典型用法代码示例。如果您正苦于以下问题:Java ParserModel类的具体用法?Java ParserModel怎么用?Java ParserModel使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
ParserModel类属于opennlp.tools.parser包,在下文中一共展示了ParserModel类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: doInitialize
import opennlp.tools.parser.ParserModel; //导入依赖的package包/类
@Override
public void doInitialize(final UimaContext aContext) throws ResourceInitializationException {
try {
parserChunkingModel.loadModel(ParserModel.class, getClass().getResourceAsStream("en-parser-chunking.bin"));
} catch (final BaleenException be) {
getMonitor().error("Unable to load OpenNLP Language Models", be);
throw new ResourceInitializationException(be);
}
try {
parser = ParserFactory.create((ParserModel) parserChunkingModel.getModel());
} catch (final Exception e) {
getMonitor().error("Unable to create OpenNLP parser", e);
throw new ResourceInitializationException(e);
}
}
示例2: doInitialize
import opennlp.tools.parser.ParserModel; //导入依赖的package包/类
@Override
public void doInitialize(final UimaContext aContext) throws ResourceInitializationException {
try {
parserChunkingModel.loadModel(ParserModel.class, getClass().getResourceAsStream("en_parser_chunking.bin"));
} catch (final BaleenException be) {
getMonitor().error("Unable to load OpenNLP Language Models", be);
throw new ResourceInitializationException(be);
}
try {
parser = ParserFactory.create((ParserModel) parserChunkingModel.getModel());
} catch (final Exception e) {
getMonitor().error("Unable to create OpenNLP parser", e);
throw new ResourceInitializationException(e);
}
}
示例3: initialize
import opennlp.tools.parser.ParserModel; //导入依赖的package包/类
@SuppressWarnings("unchecked")
@Override
public void initialize(UimaContext ctx) throws ResourceInitializationException {
super.initialize(ctx);
inputTypesHelper = InitializableFactory.create(
ctx,
inputTypesHelperClassName,
InputTypesHelper.class);
try {
InputStream modelInputStream = IoUtil.getInputStream(ParserAnnotator.class, parserModelPath);
ParserModel parserModel = new ParserModel(modelInputStream);
if (useTagsFromCas) {
this.casTagger = new CasPosTagger<TOKEN_TYPE, SENTENCE_TYPE>(inputTypesHelper);
this.parser = new Parser(parserModel, beamSize, advancePercentage, casTagger);
} else {
this.parser = new Parser(parserModel, beamSize, advancePercentage);
}
} catch (IOException e) {
throw new ResourceInitializationException(e);
}
}
示例4: scoreStructure
import opennlp.tools.parser.ParserModel; //导入依赖的package包/类
public double scoreStructure(String ca, String q, String passage, boolean verbose) throws InvalidFormatException, IOException{
POSTaggerME parserModel = new POSTaggerME(new POSModel(new FileInputStream(new File("en-pos-model.bin"))));
Tokenizer tokenizer = new TokenizerME(new TokenizerModel(new FileInputStream(new File("en-token.bin"))));
Parser parser = ParserFactory.create(new ParserModel(new FileInputStream(new File("en-parser.bin"))));
double score = 0;
Parse[] questionParse = ParserTool.parseLine(q, parser, 1);
Parse[] passageParse = ParserTool.parseLine(q, parser, 1);
if (passage.contains(ca)) {
for (int i =0; i < questionParse.length; i++) {
score += matchChildren(questionParse[i],passageParse[i]);
}
}
return score;
}
示例5: loadModel
import opennlp.tools.parser.ParserModel; //导入依赖的package包/类
private final ParserModel loadModel(final String lang, final String model) {
final long lStartTime = new Date().getTime();
try {
synchronized (parseModels) {
if (!parseModels.containsKey(lang)) {
parseModels.put(lang, new ParserModel(new FileInputStream(model)));
}
}
} catch (final IOException e) {
e.printStackTrace();
}
final long lEndTime = new Date().getTime();
final long difference = lEndTime - lStartTime;
System.err.println("ixa-pipe-parse model loaded in: " + difference
+ " miliseconds ... [DONE]");
return parseModels.get(lang);
}
示例6: EnglishIndexer
import opennlp.tools.parser.ParserModel; //导入依赖的package包/类
public EnglishIndexer() throws Exception {
mDicts = new EnglishDictionaries();
mBeamSize = ConfigurationManager.getConfiguration().getInt("BeamSize");
InputStream modelStream = null;
modelStream = getClass().getResourceAsStream("/opennlp15model-sa/en-sent.bin");
SentenceModel model = new SentenceModel(modelStream);
mSentenceDetector = new SentenceDetectorME(model);
modelStream.close();
modelStream = getClass().getResourceAsStream("/opennlp15model-sa/en-token.bin");
mTokenizer = new EnglishTokenizer(modelStream, mDicts);
modelStream.close();
// The parser model is about 15x the size of chunking model.
// Keep this in mind when using Deep Parsing.
modelStream = getClass().getResourceAsStream("/opennlp15model-sa/en-pos-maxent.bin");
//POSModel posModel = POSTaggerUtils.createPOSModel(modelStream);
POSModel posModel = new POSModel(modelStream);
mTagDictionary = posModel.getTagDictionary();
mPosTagger = new POSTaggerME(posModel);
modelStream.close();
modelStream = getClass().getResourceAsStream("/opennlp15model-sa/en-chunker.bin");
ChunkerModel chunkerModel = new ChunkerModel(modelStream);
mChunker = new ChunkerME(chunkerModel);
modelStream.close();
modelStream = getClass().getResourceAsStream("/opennlp15model-sa/en-parser-chunking.bin");
ParserModel parserModel = new ParserModel(modelStream);
mParser = ParserFactory.create(parserModel);
modelStream.close();
}
示例7: loadResource
import opennlp.tools.parser.ParserModel; //导入依赖的package包/类
private void loadResource() throws InvalidFormatException, IOException {
if (parser == null) {
InputStream is = OpenNLPParser.class.getClassLoader().getResourceAsStream(PARSER_MODEL);
ParserModel model = new ParserModel(is);
parser = ParserFactory.create(model);
is.close();
}
}
示例8: parse
import opennlp.tools.parser.ParserModel; //导入依赖的package包/类
public static Parse parse(String input) {
ParserModel model = (ParserModel) models.get(ParserModel.class);
Parser parser = ParserFactory.create(model);
Parse topParses[] = ParserTool.parseLine(input, parser, tokenizer(), 1);
return topParses[0];
}
示例9: OpenNLP
import opennlp.tools.parser.ParserModel; //导入依赖的package包/类
public OpenNLP() throws Exception {
tagger = new POSTaggerME(new POSModel(this.getClass().getClassLoader()
.getResourceAsStream("models/en-pos-maxent.bin")));
parser = ParserFactory.create(new ParserModel(this.getClass().getClassLoader()
.getResourceAsStream("models/en-parser-chunking.bin")));
tokenizer = new TokenizerME(new TokenizerModel(this.getClass().getClassLoader()
.getResourceAsStream("models/en-token.bin")));
stemmer = new EnglishStemmer();
log.info("OpenNLP models loaded");
}
示例10: ParserExtractor
import opennlp.tools.parser.ParserModel; //导入依赖的package包/类
public ParserExtractor() {
sentenceDetector = new SentenceDetector(Consts.EN_SENT_MODEL);
try (InputStream modelIn = ParserExtractor.class.getClassLoader()
.getResourceAsStream(Consts.EN_PARSER_MODEL);){
ParserModel model = new ParserModel(modelIn);
parser = ParserFactory.create(model);
} catch (IOException e) {
e.printStackTrace();
}
}
示例11: init
import opennlp.tools.parser.ParserModel; //导入依赖的package包/类
public void init() throws InvalidFormatException{
File modelsDir = new File(this.modelsPath);
this.parserMFile = new File(modelsDir, "en-parser-chunking.bin");
this.sentDetectorMFile = new File(modelsDir, "en-sent.bin");
this.posMFile = new File(modelsDir,"en-pos-maxent.bin");
InputStream sentModelIn = null;
FileInputStream parserStream;
try {
//for finding sentences
sentModelIn = new FileInputStream(sentDetectorMFile);
this.sentenceModel = new SentenceModel(sentModelIn);
//for finding POS
FileInputStream posModelStream = new FileInputStream(posMFile);
POSModel model = new POSModel(posModelStream);
this.tagger = new POSTaggerME(model);
//for parsing
parserStream = new FileInputStream(parserMFile);
this.parserModel = new ParserModel(parserStream);
} catch (FileNotFoundException e2) {
e2.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
this.modelsAreInitialized=true;
}
示例12: init
import opennlp.tools.parser.ParserModel; //导入依赖的package包/类
public void init() throws InvalidFormatException{
File modelsDir = new File(this.modelsPath);
this.parserMFile = new File(modelsDir, "en-parser-chunking.bin");
this.sentDetectorMFile = new File(modelsDir, "en-sent.bin");
this.chunkerMFile=new File(modelsDir,"en-chunker.bin");
this.posMFile = new File(modelsDir,"en-pos-maxent.bin");
InputStream sentModelIn = null;
FileInputStream parserStream;
try {
//for finding sentences
sentModelIn = new FileInputStream(sentDetectorMFile);
this.sentenceModel = new SentenceModel(sentModelIn);
//for finding POS
FileInputStream posModelStream = new FileInputStream(posMFile);
POSModel model = new POSModel(posModelStream);
this.tagger = new POSTaggerME(model);
//for parsing
parserStream = new FileInputStream(parserMFile);
this.parserModel = new ParserModel(parserStream);
} catch (FileNotFoundException e2) {
// TODO Auto-generated catch block
e2.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
this.modelsAreInitialized=true;
}
示例13: SentenceSimilarity
import opennlp.tools.parser.ParserModel; //导入依赖的package包/类
public SentenceSimilarity() {
File modelsDir = new File(this.modelsPath);
this.parserMFile = new File(modelsDir, "en-parser-chunking.bin");
this.sentDetectorMFile = new File(modelsDir, "en-sent.bin");
this.posMFile = new File(modelsDir,"en-pos-maxent.bin");
InputStream sentModelIn = null;
FileInputStream parserStream;
try {
//for finding sentences
sentModelIn = new FileInputStream(sentDetectorMFile);
this.sentenceModel = new SentenceModel(sentModelIn);
//for finding POS
FileInputStream posModelStream = new FileInputStream(posMFile);
POSModel model = new POSModel(posModelStream);
this.tagger = new POSTaggerME(model);
//for parsing
parserStream = new FileInputStream(parserMFile);
this.parserModel = new ParserModel(parserStream);
} catch (FileNotFoundException e2) {
// TODO Auto-generated catch block
e2.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
sentenceDetector = new SentenceDetectorME(this.sentenceModel);
parser = ParserFactory.create(
this.parserModel,
20, // beam size
0.95); // advance percentage
}
示例14: init
import opennlp.tools.parser.ParserModel; //导入依赖的package包/类
public void init() throws InvalidFormatException{
File modelsDir = new File(this.modelsPath);
this.parserMFile = new File(modelsDir, "en-parser-chunking.bin");
this.sentDetectorMFile = new File(modelsDir, "en-sent.bin");
this.posMFile = new File(modelsDir,"en-pos-maxent.bin");
this.nerMFile = new File(modelsDir,"en-ner-person.bin");
InputStream sentModelIn = null;
InputStream nerModelIn = null;
FileInputStream parserStream;
try {
//for finding sentences
sentModelIn = new FileInputStream(sentDetectorMFile);
this.sentenceModel = new SentenceModel(sentModelIn);
//for finding POS
FileInputStream posModelStream = new FileInputStream(posMFile);
POSModel model = new POSModel(posModelStream);
this.tagger = new POSTaggerME(model);
//for parsing
parserStream = new FileInputStream(parserMFile);
this.parserModel = new ParserModel(parserStream);
nerModelIn = new FileInputStream(nerMFile);
this.nerModel = new TokenNameFinderModel(nerModelIn);
} catch (FileNotFoundException e2) {
// TODO Auto-generated catch block
e2.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
this.modelsAreInitialized=true;
}
示例15: init
import opennlp.tools.parser.ParserModel; //导入依赖的package包/类
public static void init() throws InvalidFormatException {
File modelsDir = new File(modelsPath);
parserMFile = new File(modelsDir, "en-parser-chunking.bin");
sentDetectorMFile = new File(modelsDir, "en-sent.bin");
posMFile = new File(modelsDir, "en-pos-maxent.bin");
InputStream sentModelIn = null;
FileInputStream parserStream;
try {
// for finding sentences
sentModelIn = new FileInputStream(sentDetectorMFile);
sentenceModel = new SentenceModel(sentModelIn);
// for finding POS
FileInputStream posModelStream = new FileInputStream(posMFile);
POSModel model = new POSModel(posModelStream);
tagger = new POSTaggerME(model);
// for parsing
parserStream = new FileInputStream(parserMFile);
parserModel = new ParserModel(parserStream);
} catch (FileNotFoundException e2) {
// TODO Auto-generated catch block
e2.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}