本文整理汇总了Java中edu.uw.easysrl.util.Util.deserialize方法的典型用法代码示例。如果您正苦于以下问题:Java Util.deserialize方法的具体用法?Java Util.deserialize怎么用?Java Util.deserialize使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类edu.uw.easysrl.util.Util
的用法示例。
在下文中一共展示了Util.deserialize方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: evaluate
import edu.uw.easysrl.util.Util; //导入方法依赖的package包/类
private void evaluate(final double testingSupertaggerBeam, final Optional<Double> supertaggerWeight)
throws IOException {
final int maxSentenceLength = 70;
final POSTagger posTagger = POSTagger
.getStanfordTagger(new File(dataParameters.getExistingModel(), "posTagger"));
final SRLParser parser = new JointSRLParser(EasySRL.makeParser(trainingParameters.getModelFolder()
.getAbsolutePath(), testingSupertaggerBeam, ParsingAlgorithm.ASTAR, 20000, true, supertaggerWeight, 1,
70), posTagger);
final SRLParser backoff = new BackoffSRLParser(parser, new PipelineSRLParser(EasySRL.makeParser(dataParameters
.getExistingModel().getAbsolutePath(), 0.0001, ParsingAlgorithm.ASTAR, 100000, false, Optional.empty(),
1, 70), Util.deserialize(new File(dataParameters.getExistingModel(), "labelClassifier")), posTagger));
final Results results = SRLEvaluation
.evaluate(backoff, ParallelCorpusReader.getPropBank00(), maxSentenceLength);
System.out.println("Final result: F1=" + results.getF1());
}
示例2: build
import edu.uw.easysrl.util.Util; //导入方法依赖的package包/类
public AbstractParser build() {
try {
if (modelFactory == null) {
if (jointModel) {
final Map<FeatureKey, Integer> keyToIndex = Util.deserialize(new File(modelFolder, "featureToIndex"));
final double[] weights = Util.deserialize(new File(modelFolder, "weights"));
if (supertaggerWeight != null) {
weights[0] = supertaggerWeight;
}
modelFactory = new SRLFactoredModelFactory(weights,
Util.<FeatureSet>deserialize(new File(modelFolder, "features"))
.setSupertaggingFeature(new File(modelFolder, "/pipeline"), supertaggerBeam),
lexicalCategories, cutoffs, keyToIndex);
} else {
final Tagger tagger = !useSupertaggedInput ?
Tagger.make(modelFolder, supertaggerBeam, 50, cutoffs) :
null;
modelFactory = new SupertagFactoredModelFactory(tagger, lexicalCategories, nbest > 1);
}
}
return build2();
} catch (final IOException e) {
throw new UncheckedIOException(e);
}
}
示例3: main
import edu.uw.easysrl.util.Util; //导入方法依赖的package包/类
public static void main(final String[] args) throws IOException {
final String folder = Util.getHomeFolder() + "/Downloads/lstm_models/model";
final String pipelineFolder = folder + "/pipeline";
final POSTagger posTagger = POSTagger.getStanfordTagger(new File(pipelineFolder, "posTagger"));
final PipelineSRLParser pipeline = new PipelineSRLParser(new ParserAStar.Builder(new File(pipelineFolder))
.supertaggerBeam(0.00001).build(), Util.deserialize(new File(pipelineFolder, "labelClassifier")),
posTagger);
for (final double beta : Arrays.asList(0.01, 0.005, 0.001)) {
// for (final Double supertaggerWeight : Arrays.asList(null)) {
final Double supertaggerWeight = null;
final SRLParser jointAstar = new BackoffSRLParser(
new JointSRLParser(new ParserAStar.Builder(new File(folder)).maxChartSize(20000).supertaggerBeam(beta)
.supertaggerWeight(supertaggerWeight).build(), posTagger), pipeline);
evaluate(jointAstar,
// pipeline,
// // BrownPropbankReader.readCorpus()//
ParallelCorpusReader.getPropBank00()
// ParallelCorpusReader.getPropBank23()
, 70);
// }
}
}
示例4: makePipelineParser
import edu.uw.easysrl.util.Util; //导入方法依赖的package包/类
private static PipelineSRLParser makePipelineParser(final File folder,
final CommandLineArguments commandLineOptions, final double supertaggerBeam,
final boolean outputDependencies) throws IOException {
final POSTagger posTagger = POSTagger.getStanfordTagger(new File(folder, "posTagger"));
final File labelClassifier = new File(folder, "labelClassifier");
final LabelClassifier classifier = labelClassifier.exists() && outputDependencies ? Util
.deserialize(labelClassifier) : CCGBankEvaluation.dummyLabelClassifier;
return new PipelineSRLParser(new ParserAStar.Builder(folder).maxChartSize(100000)
.supertaggerBeam(supertaggerBeam).nBest(commandLineOptions.getNbest())
.maximumSentenceLength(commandLineOptions.getMaxLength()).build(), classifier, posTagger);
}
示例5: makeParser
import edu.uw.easysrl.util.Util; //导入方法依赖的package包/类
@Deprecated
public static Parser makeParser(final CommandLineArguments commandLineOptions, final int maxChartSize,
final boolean joint, final Optional<Double> supertaggerWeight, final boolean loadSupertagger)
throws IOException {
final File modelFolder = Util.getFile(commandLineOptions.getModel());
Coindexation.parseMarkedUpFile(new File(modelFolder, "markedup"));
final File cutoffsFile = new File(modelFolder, "cutoffs");
final CutoffsDictionaryInterface cutoffs = cutoffsFile.exists() ? Util.deserialize(cutoffsFile) : null;
ModelFactory modelFactory;
final Collection<Category> lexicalCategories = TaggerEmbeddings.loadCategories(new File(modelFolder,
"categories"));
if (joint) {
final Map<FeatureKey, Integer> keyToIndex = Util.deserialize(new File(modelFolder, "featureToIndex"));
final double[] weights = Util.deserialize(new File(modelFolder, "weights"));
if (supertaggerWeight.isPresent()) {
weights[0] = supertaggerWeight.get();
}
modelFactory = new SRLFactoredModelFactory(weights, Util.<FeatureSet> deserialize(
new File(modelFolder, "features")).setSupertaggingFeature(new File(modelFolder, "/pipeline"),
commandLineOptions.getSupertaggerbeam()), lexicalCategories, cutoffs, keyToIndex);
} else {
final Tagger tagger = loadSupertagger ? Tagger.make(modelFolder, commandLineOptions.getSupertaggerbeam(),
50, cutoffs) : null;
modelFactory = new SupertagFactoredModelFactory(tagger, lexicalCategories,
commandLineOptions.getNbest() > 1);
}
return makeParser(commandLineOptions, maxChartSize, modelFactory);
}
示例6: makeParser
import edu.uw.easysrl.util.Util; //导入方法依赖的package包/类
private static SRLParser makeParser(final String folder) throws IOException {
final int nbest = 10;
final String pipelineFolder = folder + "/pipeline";
final POSTagger posTagger = POSTagger.getStanfordTagger(new File(pipelineFolder, "posTagger"));
final PipelineSRLParser pipeline = new PipelineSRLParser(new ParserAStar.Builder(new File(pipelineFolder))
.supertaggerBeam(0.0001).nBest(nbest).maximumSentenceLength(100).build(), Util.deserialize(new File(
pipelineFolder, "labelClassifier")), posTagger);
final SRLParser jointAstar = new SemanticParser(new BackoffSRLParser(new JointSRLParser(
new ParserAStar.Builder(new File(folder)).supertaggerBeam(0.005).nBest(nbest)
.maximumSentenceLength(100).build(), posTagger), pipeline),
CompositeLexicon.makeDefault(new File(folder, "lexicon")));
return jointAstar;
}
示例7: loadData
import edu.uw.easysrl.util.Util; //导入方法依赖的package包/类
@Override
public void loadData(final int firstSentence, final int lastSentence,
final TrainingDataParameters dataParameters,
final File modelFolder, final Logger trainingLogger)
throws IOException, RemoteException {
System.out.println("Loading sentences from: " + firstSentence + "\t"
+ lastSentence);
final Collection<Sentence> sentences = new ArrayList<>();
final Iterator<Sentence> sentenceIt = ParallelCorpusReader.READER
.readCorpus(false);
int i = 0;
while (sentenceIt.hasNext()) {
final Sentence sentence = sentenceIt.next();
if (i < firstSentence) {
} else if (i == lastSentence) {
break;
} else {
System.out.println("Sentence "
+ sentence.getCCGBankDependencyParse().getFile()
+ "."
+ sentence.getCCGBankDependencyParse()
.getSentenceNumber());
sentences.add(sentence);
}
i++;
}
final Training.TrainingParameters trainingParameters = Util
.deserialize(new File(modelFolder, "parameters"));
final CutoffsDictionaryInterface cutoffs = Util.deserialize(new File(
modelFolder, "cutoffs"));
final Map<FeatureKey, Integer> featureToIndex = Util
.deserialize(trainingParameters.getFeatureToIndexFile());
final List<TrainingExample> data = new TrainingDataLoader(cutoffs,
dataParameters, true).makeTrainingData(sentences.iterator(),
false);
lossFunction = Optimization.getUnregularizedLossFunction(data,
featureToIndex, trainingParameters, trainingLogger);
}