本文整理汇总了Java中edu.stanford.nlp.classify.LinearClassifierFactory类的典型用法代码示例。如果您正苦于以下问题:Java LinearClassifierFactory类的具体用法?Java LinearClassifierFactory怎么用?Java LinearClassifierFactory使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
LinearClassifierFactory类属于edu.stanford.nlp.classify包,在下文中一共展示了LinearClassifierFactory类的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: trainRVF
import edu.stanford.nlp.classify.LinearClassifierFactory; //导入依赖的package包/类
public LinearClassifier trainRVF(List<HashMap<String, Double>> list_feature2values,
List<String> list_labels) {
List<Datum<String, String>> trainingData = new ArrayList<Datum<String, String>>();
for (int i = 0; i < list_feature2values.size(); i++) {
HashMap<String, Double> feature2values = list_feature2values.get(i);
String label = list_labels.get(i);
Datum<String, String> d = new RVFDatum(Counters.fromMap(feature2values), label);
trainingData.add(d);
}
// Build a classifier factory
LinearClassifierFactory<String, String> factory = new LinearClassifierFactory<String, String>();
factory.setSigma(3);
factory.setEpsilon(15);
factory.useQuasiNewton();
factory.setVerbose(true);
LinearClassifier<String, String> classifier = factory.trainClassifier(trainingData);
// {
// ArrayList<String> temp = new ArrayList<String>();
// temp.add("NS=" + GREEN);
// System.out.println(classifier.scoreOf(new BasicDatum<String,
// String>(temp, BROKEN), BROKEN));
// }
this.classifier = classifier;
return classifier;
}
示例2: adaptMaxEnt
import edu.stanford.nlp.classify.LinearClassifierFactory; //导入依赖的package包/类
private void adaptMaxEnt(Dataset<String, String> adapt) {
if (classifier instanceof LinearClassifier) {
// So far the adaptation is only done on Gaussian Prior. Haven't checked how it'll work on other kinds of priors. -pichuan
int prior = LogPrior.LogPriorType.QUADRATIC.ordinal();
if (flags.useHuber) {
throw new UnsupportedOperationException();
} else if (flags.useQuartic) {
throw new UnsupportedOperationException();
}
LinearClassifierFactory<String, String> lcf = new LinearClassifierFactory<String, String>(flags.tolerance, flags.useSum, prior, flags.adaptSigma, flags.epsilon, flags.QNsize);
((LinearClassifier<String, String>)classifier).adaptWeights(adapt,lcf);
} else {
throw new UnsupportedOperationException();
}
}
示例3: trainSemiSup
import edu.stanford.nlp.classify.LinearClassifierFactory; //导入依赖的package包/类
private void trainSemiSup(Dataset<String, String> data, Dataset<String, String> biasedData, double[][] confusionMatrix) {
int prior = LogPrior.LogPriorType.QUADRATIC.ordinal();
if (flags.useHuber) {
prior = LogPrior.LogPriorType.HUBER.ordinal();
} else if (flags.useQuartic) {
prior = LogPrior.LogPriorType.QUARTIC.ordinal();
}
LinearClassifierFactory<String, String> lcf;
lcf = new LinearClassifierFactory<String, String>(flags.tolerance, flags.useSum, prior, flags.sigma, flags.epsilon, flags.QNsize);
if (flags.useQN) {
lcf.useQuasiNewton();
} else{
lcf.useConjugateGradientAscent();
}
this.classifier = (LinearClassifier<String, String>) lcf.trainClassifierSemiSup(data, biasedData, confusionMatrix, null);
}
示例4: trainBasic
import edu.stanford.nlp.classify.LinearClassifierFactory; //导入依赖的package包/类
public LinearClassifier trainBasic(
List<List<String>> list_features, List<String> list_labels) {
List<Datum<String, String>> trainingData = new ArrayList<Datum<String, String>>();
for (int i = 0; i < list_features.size(); i++) {
List<String> features = list_features.get(i);
String label = list_labels.get(i);
Datum<String, String> d = new BasicDatum<String, String>(features, label);
trainingData.add(d);
}
// Build a classifier factory
LinearClassifierFactory<String, String> factory = new LinearClassifierFactory<String, String>();
// factory.setTol(tol);
// factory.setSigma(1);
// factory.setEpsilon(0.01);
// factory.useQuasiNewton();
factory.setVerbose(true);
LinearClassifier<String, String> classifier = factory.trainClassifier(trainingData);
// {
// ArrayList<String> temp = new ArrayList<String>();
// temp.add("NS=" + GREEN);
// System.out.println(classifier.scoreOf(new BasicDatum<String,
// String>(temp, BROKEN), BROKEN));
// }
this.classifier = classifier;
return classifier;
}
示例5: trainMaxEnt
import edu.stanford.nlp.classify.LinearClassifierFactory; //导入依赖的package包/类
private void trainMaxEnt(Dataset<String, String> train) {
int prior = LogPrior.LogPriorType.QUADRATIC.ordinal();
if (flags.useHuber) {
prior = LogPrior.LogPriorType.HUBER.ordinal();
} else if (flags.useQuartic) {
prior = LogPrior.LogPriorType.QUARTIC.ordinal();
}
LinearClassifier<String, String> lc;
if (flags.useNB) {
lc = new NBLinearClassifierFactory<String, String>(flags.sigma).trainClassifier(train);
} else {
LinearClassifierFactory<String, String> lcf = new LinearClassifierFactory<String, String>(flags.tolerance, flags.useSum, prior, flags.sigma, flags.epsilon, flags.QNsize);
if (flags.useQN) {
lcf.useQuasiNewton(flags.useRobustQN);
} else if(flags.useStochasticQN) {
lcf.useStochasticQN(flags.initialGain,flags.stochasticBatchSize);
} else if(flags.useSMD) {
lcf.useStochasticMetaDescent(flags.initialGain, flags.stochasticBatchSize,flags.stochasticMethod,flags.SGDPasses);
} else if(flags.useSGD) {
lcf.useStochasticGradientDescent(flags.gainSGD,flags.stochasticBatchSize);
} else if(flags.useSGDtoQN) {
lcf.useStochasticGradientDescentToQuasiNewton(flags.initialGain, flags.stochasticBatchSize,
flags.SGDPasses, flags.QNPasses, flags.SGD2QNhessSamples,
flags.QNsize, flags.outputIterationsToFile);
} else if(flags.useHybrid) {
lcf.useHybridMinimizer(flags.initialGain, flags.stochasticBatchSize ,flags.stochasticMethod ,flags.hybridCutoffIteration );
} else {
lcf.useConjugateGradientAscent();
}
lc = lcf.trainClassifier(train);
}
this.classifier = lc;
}
示例6: retrain
import edu.stanford.nlp.classify.LinearClassifierFactory; //导入依赖的package包/类
/**
* @param featureLabels retrain docs
* @param featureIndex featureIndex of original dataset (used in training)
* @param labelIndex labelIndex of original dataset (used in training)
*/
public void retrain(ObjectBank<List<IN>> featureLabels, Index<String> featureIndex, Index<String> labelIndex) {
int fs = featureIndex.size(); // old dim
int ls = labelIndex.size(); // old dim
Dataset<String, String> adapt = getDataset(featureLabels, featureIndex, labelIndex);
int prior = LogPrior.LogPriorType.QUADRATIC.ordinal();
LinearClassifier<String, String> lc = (LinearClassifier<String, String>) classifier;
LinearClassifierFactory<String, String> lcf = new LinearClassifierFactory<String, String>(flags.tolerance, flags.useSum, prior, flags.sigma, flags.epsilon, flags.QNsize);
double[][] weights = lc.weights(); // old dim
Index<String> newF = adapt.featureIndex;
Index<String> newL = adapt.labelIndex;
int newFS = newF.size();
int newLS = newL.size();
double[] x = new double[newFS*newLS]; // new dim
//System.err.println("old ["+fs+"]"+"["+ls+"]");
//System.err.println("new ["+newFS+"]"+"["+newLS+"]");
//System.err.println("new ["+newFS*newLS+"]");
for (int i = 0; i < fs; i++) {
for (int j = 0; j < ls; j++) {
String f = featureIndex.get(i);
String l = labelIndex.get(j);
int newi = newF.indexOf(f)*newLS+newL.indexOf(l);
x[newi] = weights[i][j];
//if (newi == 144745*2) {
//System.err.println("What??"+i+"\t"+j);
//}
}
}
//System.err.println("x[144745*2]"+x[144745*2]);
weights = lcf.trainWeights(adapt, x);
//System.err.println("x[144745*2]"+x[144745*2]);
//System.err.println("weights[144745]"+"[0]="+weights[144745][0]);
lc.setWeights(weights);
/*
int delme = 0;
if (true) {
for (double[] dd : weights) {
delme++;
for (double d : dd) {
}
}
}
System.err.println(weights[delme-1][0]);
System.err.println("size of weights: "+delme);
*/
}