本文整理匯總了Java中weka.classifiers.Evaluation.weightedFMeasure方法的典型用法代碼示例。如果您正苦於以下問題:Java Evaluation.weightedFMeasure方法的具體用法?Java Evaluation.weightedFMeasure怎麽用?Java Evaluation.weightedFMeasure使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類weka.classifiers.Evaluation
的用法示例。
在下文中一共展示了Evaluation.weightedFMeasure方法的4個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: trainClassifier
import weka.classifiers.Evaluation; //導入方法依賴的package包/類
public void trainClassifier(Classifier classifier, File trainingDataset,
FileOutputStream trainingModel, Integer
crossValidationFoldNumber) throws Exception {
CSVLoader csvLoader = new CSVLoader();
csvLoader.setSource(trainingDataset);
Instances instances = csvLoader.getDataSet();
switch(classifier) {
case KNN:
int K = (int) Math.ceil(Math.sqrt(instances.numInstances()));
this.classifier = new IBk(K);
break;
case NB:
this.classifier = new NaiveBayes();
}
if(instances.classIndex() == -1) {
instances.setClassIndex(instances.numAttributes() - 1);
}
this.classifier.buildClassifier(instances);
if(crossValidationFoldNumber > 0) {
Evaluation evaluation = new Evaluation(instances);
evaluation.crossValidateModel(this.classifier, instances, crossValidationFoldNumber,
new Random(1));
kappa = evaluation.kappa();
fMeasure = evaluation.weightedFMeasure();
confusionMatrix = evaluation.toMatrixString("Confusion matrix: ");
}
ObjectOutputStream outputStream = new ObjectOutputStream(trainingModel);
outputStream.writeObject(this.classifier);
outputStream.flush();
outputStream.close();
}
示例2: getClassifierFScore
import weka.classifiers.Evaluation; //導入方法依賴的package包/類
public static double getClassifierFScore(int numTopics, String categoryName) throws Exception
{
//int numTopics=40;
int seed = 1;
int folds = 10;
DataSource trainSource = new DataSource("inputFiles/rawFiles/ARFF-files/"+categoryName+"-ARFF/"
+categoryName+"-"+numTopics+".ARFF");
Instances trainingSet = trainSource.getDataSet();
if (trainingSet.classIndex() == -1)
trainingSet.setClassIndex(trainingSet.numAttributes() - 1);
// Resample for minority class
Resample reSample=new Resample();
reSample.setInputFormat(trainingSet);
//reSample.s(1);
trainingSet=Filter.useFilter(trainingSet, reSample);
// trainingSet=Filter.useFilter(trainingSet, reSample);
// trainingSet=Filter.useFilter(trainingSet, reSample);
// trainingSet=Filter.useFilter(trainingSet, reSample);
Random rand = new Random(seed);
trainingSet.randomize(rand);
if (trainingSet.classAttribute().isNominal())
trainingSet.stratify(folds);
RandomForest classifier=new RandomForest();
//System.out.println("Training with "+classifier.getClass().getName());
//System.out.println(trainingSet.numInstances());
//classifier.buildClassifier(trainingSet);
// perform cross-validation
//Object[] obj={"hello"};
Evaluation eval = new Evaluation(trainingSet);
//Object[] forPredictionsPrinting = {"a","10","true"};
eval.crossValidateModel(classifier, trainingSet, 10, new Random(1), new Object[] { });
return eval.weightedFMeasure();
}
示例3: computeSingleRunResults
import weka.classifiers.Evaluation; //導入方法依賴的package包/類
public void computeSingleRunResults(MethodEvaluation methodEvaluation)
{
Evaluation evaluation = methodEvaluation.getEvaluation();
int hamIndex = HAM.ordinal();
int spamIndex = SPAM.ordinal();
Double hamPrecision = 100.0 * evaluation.precision(hamIndex);
Double spamPrecision = 100.0 * evaluation.precision(spamIndex);
Double weightedPrecision = 100.0 * evaluation.weightedPrecision();
Double hamRecall = 100.0 * evaluation.recall(hamIndex);
Double spamRecall = 100.0 * evaluation.recall(spamIndex);
Double weightedRecall = 100.0 * evaluation.weightedRecall();
Double hamAreaUnderPRC = 100.0 * evaluation.areaUnderPRC(hamIndex);
Double spamAreaUnderPRC = 100.0 * evaluation.areaUnderPRC(spamIndex);
Double weightedAreaUnderPRC = 100.0 * evaluation.weightedAreaUnderPRC();
Double hamAreaUnderROC = 100.0 * evaluation.areaUnderROC(hamIndex);
Double spamAreaUnderROC = 100.0 * evaluation.areaUnderROC(spamIndex);
Double weightedAreaUnderROC = 100.0 * evaluation.weightedAreaUnderROC();
Double hamFMeasure = 100.0 * evaluation.fMeasure(hamIndex);
Double spamFMeasure = 100.0 * evaluation.fMeasure(spamIndex);
Double weightedFMeasure = 100.0 * evaluation.weightedFMeasure();
Double trainTime = (double) (methodEvaluation.getTrainEnd() - methodEvaluation.getTrainStart());
Double testTime = (double) (methodEvaluation.getTestEnd() - methodEvaluation.getTestStart());
addSingleRunResult(Metric.HAM_PRECISION, hamPrecision);
addSingleRunResult(Metric.SPAM_PRECISION, spamPrecision);
addSingleRunResult(Metric.WEIGHTED_PRECISION, weightedPrecision);
addSingleRunResult(Metric.HAM_RECALL, hamRecall);
addSingleRunResult(Metric.SPAM_RECALL, spamRecall);
addSingleRunResult(Metric.WEIGHTED_RECALL, weightedRecall);
addSingleRunResult(Metric.HAM_AREA_UNDER_PRC, hamAreaUnderPRC);
addSingleRunResult(Metric.SPAM_AREA_UNDER_PRC, spamAreaUnderPRC);
addSingleRunResult(Metric.WEIGHTED_AREA_UNDER_PRC, weightedAreaUnderPRC);
addSingleRunResult(Metric.HAM_AREA_UNDER_ROC, hamAreaUnderROC);
addSingleRunResult(Metric.SPAM_AREA_UNDER_ROC, spamAreaUnderROC);
addSingleRunResult(Metric.WEIGHTED_AREA_UNDER_ROC, weightedAreaUnderROC);
addSingleRunResult(Metric.HAM_F_MEASURE, hamFMeasure);
addSingleRunResult(Metric.SPAM_F_MEASURE, spamFMeasure);
addSingleRunResult(Metric.WEIGHTED_F_MEASURE, weightedFMeasure);
addSingleRunResult(Metric.TRAIN_TIME, trainTime);
addSingleRunResult(Metric.TEST_TIME, testTime);
}
示例4: getClassifierFScore
import weka.classifiers.Evaluation; //導入方法依賴的package包/類
public static double getClassifierFScore(int numTopics, String categoryName) throws Exception
{
//int numTopics=40;
int seed = 1;
int folds = 10;
DataSource trainSource = new DataSource("inputFiles/rawFiles/ARFF-files/"+categoryName+"-ARFF/"
+categoryName+"-"+numTopics+".ARFF");
Instances trainingSet = trainSource.getDataSet();
if (trainingSet.classIndex() == -1)
trainingSet.setClassIndex(trainingSet.numAttributes() - 1);
// Resample for minority class
Resample reSample=new Resample();
reSample.setInputFormat(trainingSet);
//reSample.s(1);
trainingSet=Filter.useFilter(trainingSet, reSample);
// trainingSet=Filter.useFilter(trainingSet, reSample);
// trainingSet=Filter.useFilter(trainingSet, reSample);
// trainingSet=Filter.useFilter(trainingSet, reSample);
Random rand = new Random(seed);
trainingSet.randomize(rand);
if (trainingSet.classAttribute().isNominal())
trainingSet.stratify(folds);
RandomForest classifier=new RandomForest();
//System.out.println("Training with "+classifier.getClass().getName());
//System.out.println(trainingSet.numInstances());
//classifier.buildClassifier(trainingSet);
// perform cross-validation
//Object[] obj={"hello"};
Evaluation eval = new Evaluation(trainingSet);
//Object[] forPredictionsPrinting = {"a","10","true"};
eval.crossValidateModel(classifier, trainingSet, 10, new Random(1), new Object[] { });
//eval.crossValidateModel(classifier, trainingSet, 10, new Random(1));
// for (int n = 0; n < folds; n++) {
// System.out.println("Running fold:"+n);
// Instances train = trainingSet.trainCV(folds, n);
// Instances test = trainingSet.testCV(folds, n);
//
// // build and evaluate classifier
// classifier = (RandomForest) Classifier.makeCopy(classifier);
// classifier.buildClassifier(train);
// eval.evaluateModel(classifier, test);
//
// }
//System.out.println(eval.toSummaryString("=== " + folds + "-fold Cross-validation ===\n", false));
//System.out.println(eval.toClassDetailsString()+"\n"+eval.toMatrixString()+"\n");
return eval.weightedFMeasure();
}