本文整理汇总了Java中weka.classifiers.Evaluation.weightedFMeasure方法的典型用法代码示例。如果您正苦于以下问题:Java Evaluation.weightedFMeasure方法的具体用法?Java Evaluation.weightedFMeasure怎么用?Java Evaluation.weightedFMeasure使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类weka.classifiers.Evaluation
的用法示例。
在下文中一共展示了Evaluation.weightedFMeasure方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: trainClassifier
import weka.classifiers.Evaluation; //导入方法依赖的package包/类
public void trainClassifier(Classifier classifier, File trainingDataset,
FileOutputStream trainingModel, Integer
crossValidationFoldNumber) throws Exception {
CSVLoader csvLoader = new CSVLoader();
csvLoader.setSource(trainingDataset);
Instances instances = csvLoader.getDataSet();
switch(classifier) {
case KNN:
int K = (int) Math.ceil(Math.sqrt(instances.numInstances()));
this.classifier = new IBk(K);
break;
case NB:
this.classifier = new NaiveBayes();
}
if(instances.classIndex() == -1) {
instances.setClassIndex(instances.numAttributes() - 1);
}
this.classifier.buildClassifier(instances);
if(crossValidationFoldNumber > 0) {
Evaluation evaluation = new Evaluation(instances);
evaluation.crossValidateModel(this.classifier, instances, crossValidationFoldNumber,
new Random(1));
kappa = evaluation.kappa();
fMeasure = evaluation.weightedFMeasure();
confusionMatrix = evaluation.toMatrixString("Confusion matrix: ");
}
ObjectOutputStream outputStream = new ObjectOutputStream(trainingModel);
outputStream.writeObject(this.classifier);
outputStream.flush();
outputStream.close();
}
示例2: getClassifierFScore
import weka.classifiers.Evaluation; //导入方法依赖的package包/类
public static double getClassifierFScore(int numTopics, String categoryName) throws Exception
{
//int numTopics=40;
int seed = 1;
int folds = 10;
DataSource trainSource = new DataSource("inputFiles/rawFiles/ARFF-files/"+categoryName+"-ARFF/"
+categoryName+"-"+numTopics+".ARFF");
Instances trainingSet = trainSource.getDataSet();
if (trainingSet.classIndex() == -1)
trainingSet.setClassIndex(trainingSet.numAttributes() - 1);
// Resample for minority class
Resample reSample=new Resample();
reSample.setInputFormat(trainingSet);
//reSample.s(1);
trainingSet=Filter.useFilter(trainingSet, reSample);
// trainingSet=Filter.useFilter(trainingSet, reSample);
// trainingSet=Filter.useFilter(trainingSet, reSample);
// trainingSet=Filter.useFilter(trainingSet, reSample);
Random rand = new Random(seed);
trainingSet.randomize(rand);
if (trainingSet.classAttribute().isNominal())
trainingSet.stratify(folds);
RandomForest classifier=new RandomForest();
//System.out.println("Training with "+classifier.getClass().getName());
//System.out.println(trainingSet.numInstances());
//classifier.buildClassifier(trainingSet);
// perform cross-validation
//Object[] obj={"hello"};
Evaluation eval = new Evaluation(trainingSet);
//Object[] forPredictionsPrinting = {"a","10","true"};
eval.crossValidateModel(classifier, trainingSet, 10, new Random(1), new Object[] { });
return eval.weightedFMeasure();
}
示例3: computeSingleRunResults
import weka.classifiers.Evaluation; //导入方法依赖的package包/类
public void computeSingleRunResults(MethodEvaluation methodEvaluation)
{
Evaluation evaluation = methodEvaluation.getEvaluation();
int hamIndex = HAM.ordinal();
int spamIndex = SPAM.ordinal();
Double hamPrecision = 100.0 * evaluation.precision(hamIndex);
Double spamPrecision = 100.0 * evaluation.precision(spamIndex);
Double weightedPrecision = 100.0 * evaluation.weightedPrecision();
Double hamRecall = 100.0 * evaluation.recall(hamIndex);
Double spamRecall = 100.0 * evaluation.recall(spamIndex);
Double weightedRecall = 100.0 * evaluation.weightedRecall();
Double hamAreaUnderPRC = 100.0 * evaluation.areaUnderPRC(hamIndex);
Double spamAreaUnderPRC = 100.0 * evaluation.areaUnderPRC(spamIndex);
Double weightedAreaUnderPRC = 100.0 * evaluation.weightedAreaUnderPRC();
Double hamAreaUnderROC = 100.0 * evaluation.areaUnderROC(hamIndex);
Double spamAreaUnderROC = 100.0 * evaluation.areaUnderROC(spamIndex);
Double weightedAreaUnderROC = 100.0 * evaluation.weightedAreaUnderROC();
Double hamFMeasure = 100.0 * evaluation.fMeasure(hamIndex);
Double spamFMeasure = 100.0 * evaluation.fMeasure(spamIndex);
Double weightedFMeasure = 100.0 * evaluation.weightedFMeasure();
Double trainTime = (double) (methodEvaluation.getTrainEnd() - methodEvaluation.getTrainStart());
Double testTime = (double) (methodEvaluation.getTestEnd() - methodEvaluation.getTestStart());
addSingleRunResult(Metric.HAM_PRECISION, hamPrecision);
addSingleRunResult(Metric.SPAM_PRECISION, spamPrecision);
addSingleRunResult(Metric.WEIGHTED_PRECISION, weightedPrecision);
addSingleRunResult(Metric.HAM_RECALL, hamRecall);
addSingleRunResult(Metric.SPAM_RECALL, spamRecall);
addSingleRunResult(Metric.WEIGHTED_RECALL, weightedRecall);
addSingleRunResult(Metric.HAM_AREA_UNDER_PRC, hamAreaUnderPRC);
addSingleRunResult(Metric.SPAM_AREA_UNDER_PRC, spamAreaUnderPRC);
addSingleRunResult(Metric.WEIGHTED_AREA_UNDER_PRC, weightedAreaUnderPRC);
addSingleRunResult(Metric.HAM_AREA_UNDER_ROC, hamAreaUnderROC);
addSingleRunResult(Metric.SPAM_AREA_UNDER_ROC, spamAreaUnderROC);
addSingleRunResult(Metric.WEIGHTED_AREA_UNDER_ROC, weightedAreaUnderROC);
addSingleRunResult(Metric.HAM_F_MEASURE, hamFMeasure);
addSingleRunResult(Metric.SPAM_F_MEASURE, spamFMeasure);
addSingleRunResult(Metric.WEIGHTED_F_MEASURE, weightedFMeasure);
addSingleRunResult(Metric.TRAIN_TIME, trainTime);
addSingleRunResult(Metric.TEST_TIME, testTime);
}
示例4: getClassifierFScore
import weka.classifiers.Evaluation; //导入方法依赖的package包/类
public static double getClassifierFScore(int numTopics, String categoryName) throws Exception
{
//int numTopics=40;
int seed = 1;
int folds = 10;
DataSource trainSource = new DataSource("inputFiles/rawFiles/ARFF-files/"+categoryName+"-ARFF/"
+categoryName+"-"+numTopics+".ARFF");
Instances trainingSet = trainSource.getDataSet();
if (trainingSet.classIndex() == -1)
trainingSet.setClassIndex(trainingSet.numAttributes() - 1);
// Resample for minority class
Resample reSample=new Resample();
reSample.setInputFormat(trainingSet);
//reSample.s(1);
trainingSet=Filter.useFilter(trainingSet, reSample);
// trainingSet=Filter.useFilter(trainingSet, reSample);
// trainingSet=Filter.useFilter(trainingSet, reSample);
// trainingSet=Filter.useFilter(trainingSet, reSample);
Random rand = new Random(seed);
trainingSet.randomize(rand);
if (trainingSet.classAttribute().isNominal())
trainingSet.stratify(folds);
RandomForest classifier=new RandomForest();
//System.out.println("Training with "+classifier.getClass().getName());
//System.out.println(trainingSet.numInstances());
//classifier.buildClassifier(trainingSet);
// perform cross-validation
//Object[] obj={"hello"};
Evaluation eval = new Evaluation(trainingSet);
//Object[] forPredictionsPrinting = {"a","10","true"};
eval.crossValidateModel(classifier, trainingSet, 10, new Random(1), new Object[] { });
//eval.crossValidateModel(classifier, trainingSet, 10, new Random(1));
// for (int n = 0; n < folds; n++) {
// System.out.println("Running fold:"+n);
// Instances train = trainingSet.trainCV(folds, n);
// Instances test = trainingSet.testCV(folds, n);
//
// // build and evaluate classifier
// classifier = (RandomForest) Classifier.makeCopy(classifier);
// classifier.buildClassifier(train);
// eval.evaluateModel(classifier, test);
//
// }
//System.out.println(eval.toSummaryString("=== " + folds + "-fold Cross-validation ===\n", false));
//System.out.println(eval.toClassDetailsString()+"\n"+eval.toMatrixString()+"\n");
return eval.weightedFMeasure();
}