本文整理汇总了Java中edu.stanford.nlp.stats.Counters.subtractInPlace方法的典型用法代码示例。如果您正苦于以下问题:Java Counters.subtractInPlace方法的具体用法?Java Counters.subtractInPlace怎么用?Java Counters.subtractInPlace使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类edu.stanford.nlp.stats.Counters
的用法示例。
在下文中一共展示了Counters.subtractInPlace方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getSamples
import edu.stanford.nlp.stats.Counters; //导入方法依赖的package包/类
RVFDataset<String, String> getSamples(Random random, Index<String> featureIndex) {
List<List<ScoredFeaturizedTranslation<IString, String>>> nbestlists = MERT.nbest.nbestLists();
Set<String> featureWhiteList = OptimizerUtils.featureWhiteList(MERT.nbest, minFeatureSegmentCount);
int totalFeatureCount = OptimizerUtils.featureWhiteList(MERT.nbest, 0).size();
System.err.printf("Min Feature Segment Count: %d Features Filterd to: %d from: %d\n", minFeatureSegmentCount, featureWhiteList.size(), totalFeatureCount);
System.err.printf("White List Features:\n%s\n", featureWhiteList);
Index<String> labelIndex = new HashIndex<String>();
labelIndex.add("0");
labelIndex.add("1");
RVFDataset<String,String> dataset = new RVFDataset<String, String>(xi*nbestlists.size(), featureIndex, labelIndex);
for (int i = 0; i < nbestlists.size(); i++) {
List<Pair<Double, Pair<Integer, Integer>>> v = new ArrayList<Pair<Double, Pair<Integer, Integer>>>();
System.err.printf("Creating Eval Metric for list %d...\n", i);
EvaluationMetric<IString, String> evalMetric =
CorpusLevelMetricFactory.newMetric(mert.evalMetric, mert.references.subList(i, i+1));
System.err.printf("Sampling n-best list: %d\n", i);
for (int g = 0; g < gamma; g++) {
int jMax = nbestlists.get(i).size();
int j = random.nextInt(jMax);
int jPrime = random.nextInt(jMax);
// sentence level evaluation metric
double gJ = evalMetric.score(nbestlists.get(i).subList(j, j+1));
double gJPrime = evalMetric.score(nbestlists.get(i).subList(jPrime, jPrime+1));
double absDiff = Math.abs(gJ-gJPrime);
if (absDiff >= nThreshold) {
if (gJ > gJPrime) {
v.add(new Pair<Double, Pair<Integer,Integer>>(absDiff, new Pair<Integer,Integer>(j, jPrime)));
} else {
v.add(new Pair<Double, Pair<Integer,Integer>>(absDiff, new Pair<Integer,Integer>(jPrime, j)));
}
}
}
Collections.sort(v);
Collections.reverse(v);
List<Pair<Double, Pair<Integer, Integer>>> selectedV = v.subList(0, Math.min(xi, v.size()));
System.err.printf("\taccepted samples: %d\n", selectedV.size());
for (Pair<Double, Pair<Integer, Integer>> selectedPair : selectedV) {
Counter<String> plusFeatures = OptimizerUtils.featureValueCollectionToCounter(
nbestlists.get(i).get(selectedPair.second.first).features);
Counter<String> minusFeatures = OptimizerUtils.featureValueCollectionToCounter(
nbestlists.get(i).get(selectedPair.second.second).features);
Counter<String> gtVector = new ClassicCounter<String>(plusFeatures);
Counters.subtractInPlace(gtVector, minusFeatures);
Counters.retainKeys(gtVector, featureWhiteList);
RVFDatum<String, String> datumGt = new RVFDatum<String, String>(gtVector, "1");
Counter<String> ltVector = new ClassicCounter<String>(minusFeatures);
Counters.subtractInPlace(ltVector, plusFeatures);
Counters.retainKeys(ltVector, featureWhiteList);
RVFDatum<String, String> datumLt = new RVFDatum<String, String>(ltVector, "0");
dataset.add(datumGt);
dataset.add(datumLt);
}
}
return dataset;
}
示例2: optimize
import edu.stanford.nlp.stats.Counters; //导入方法依赖的package包/类
@SuppressWarnings({ "unchecked" })
@Override
public Counter<String> optimize(Counter<String> initialWts) {
List<ScoredFeaturizedTranslation<IString, String>> targets = (new HillClimbingMultiTranslationMetricMax<IString, String>(
emetric)).maximize(nbest);
Counter<String> wts = new ClassicCounter<String>(initialWts);
int changes = 0, totalChanges = 0, iter = 0;
do {
for (int i = 0; i < targets.size(); i++) {
// get current classifier argmax
Scorer<String> scorer = new DenseScorer(wts, MERT.featureIndex);
GreedyMultiTranslationMetricMax<IString, String> argmaxByScore = new GreedyMultiTranslationMetricMax<IString, String>(
new ScorerWrapperEvaluationMetric<IString, String>(scorer));
List<List<ScoredFeaturizedTranslation<IString, String>>> nbestSlice = Arrays
.asList(nbest.nbestLists().get(i));
List<ScoredFeaturizedTranslation<IString, String>> current = argmaxByScore
.maximize(new FlatNBestList(nbestSlice));
Counter<String> dir = MERT.summarizedAllFeaturesVector(Arrays
.asList(targets.get(i)));
Counters
.subtractInPlace(dir, MERT.summarizedAllFeaturesVector(current));
Counter<String> newWts = mert.lineSearch(nbest, wts, dir, emetric);
double ssd = MERT.wtSsd(wts, newWts);
System.err.printf(
"%d.%d - ssd: %e changes(total: %d iter: %d) apply: %f\n", iter, i,
ssd, totalChanges, changes,
MERT.evalAtPoint(nbest, newWts, emetric));
wts = newWts;
if (ssd >= MERT.NO_PROGRESS_SSD) {
changes++;
totalChanges++;
}
}
iter++;
} while (changes != 0);
return wts;
}
示例3: optimize
import edu.stanford.nlp.stats.Counters; //导入方法依赖的package包/类
@Override
public Counter<String> optimize(Counter<String> initialWts) {
System.err.printf("RandomAltPairs forceBetter = %b\n", forceBetter);
Counter<String> wts = initialWts;
for (int noProgress = 0; noProgress < MERT.NO_PROGRESS_LIMIT;) {
Counter<String> dir;
List<ScoredFeaturizedTranslation<IString, String>> rTrans;
Scorer<String> scorer = new DenseScorer(wts, MERT.featureIndex);
dir = MERT.summarizedAllFeaturesVector(rTrans = (forceBetter ? mert
.randomBetterTranslations(nbest, wts, emetric) : mert
.randomTranslations(nbest)));
Counter<String> newWts1 = mert.lineSearch(nbest, wts, dir, emetric); // search toward random better translation
MultiTranslationMetricMax<IString, String> oneBestSearch = new HillClimbingMultiTranslationMetricMax<IString, String>(
new ScorerWrapperEvaluationMetric<IString, String>(scorer));
List<ScoredFeaturizedTranslation<IString, String>> oneBest = oneBestSearch
.maximize(nbest);
Counters.subtractInPlace(dir, wts);
System.err.printf("Random alternate score: %.5f \n",
emetric.score(rTrans));
Counter<String> newWts = mert.lineSearch(nbest, newWts1, dir, emetric);
double eval = MERT.evalAtPoint(nbest, newWts, emetric);
double ssd = 0;
for (String k : newWts.keySet()) {
double diff = wts.getCount(k) - newWts.getCount(k);
ssd += diff * diff;
}
System.err.printf("Eval: %.5f SSD: %e (no progress: %d)\n", eval, ssd,
noProgress);
wts = newWts;
if (ssd < MERT.NO_PROGRESS_SSD)
noProgress++;
else
noProgress = 0;
}
return wts;
}