當前位置: 首頁>>代碼示例>>Java>>正文


Java Evaluation.toSummaryString方法代碼示例

本文整理匯總了Java中weka.classifiers.Evaluation.toSummaryString方法的典型用法代碼示例。如果您正苦於以下問題:Java Evaluation.toSummaryString方法的具體用法?Java Evaluation.toSummaryString怎麽用?Java Evaluation.toSummaryString使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在weka.classifiers.Evaluation的用法示例。


在下文中一共展示了Evaluation.toSummaryString方法的11個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: printClassifierResults

import weka.classifiers.Evaluation; //導入方法依賴的package包/類
/**
 *  Prints the results stored in an Evaluation object to standard output
 *  (summary, class results and confusion matrix)
 * 
 * @param Evaluation eval
 * @throws Exception
 */
public void printClassifierResults (Evaluation eval) throws Exception
{
	// Print the result à la Weka explorer:
       String strSummary = eval.toSummaryString();
       System.out.println(strSummary);
         
       // Print per class results
       String resPerClass = eval.toClassDetailsString();
       System.out.println(resPerClass);
       
       // Get the confusion matrix
       String cMatrix = eval.toMatrixString();
       System.out.println(cMatrix);	
       
       System.out.println();
}
 
開發者ID:Elhuyar,項目名稱:Elixa,代碼行數:24,代碼來源:WekaWrapper.java

示例2: classify

import weka.classifiers.Evaluation; //導入方法依賴的package包/類
public void classify(String trainingFile,String testingFile) {

        try {
            initTrainingSet(trainingFile);
            initTestingSet(testingFile);

            J48 cModel = new J48();
            cModel.setUnpruned(true);
            cModel.buildClassifier(TrainingSet);

            Evaluation eTest = new Evaluation(TrainingSet);
            eTest.evaluateModel(cModel, TestingSet);


            //print out the results
            System.out.println("=====================================================================");
            System.out.println("Results for "+this.getClass().getSimpleName());
            String strSummary = eTest.toSummaryString();
            System.out.println(strSummary);

            System.out.println("F-measure : "+eTest.weightedFMeasure());
            System.out.println("precision : "+eTest.weightedPrecision());
            System.out.println("recall : "+eTest.weightedRecall());
            System.out.println("=====================================================================");


        } catch (Exception e) {
            e.printStackTrace();
        }

    }
 
開發者ID:catchsudheera,項目名稱:sanwada,代碼行數:32,代碼來源:FeatureSet01.java

示例3: classifyMyInstances

import weka.classifiers.Evaluation; //導入方法依賴的package包/類
public String classifyMyInstances(Instances inst) throws Exception {
    Instances data = inst;
    String summary = "";
    WekaConfig conf = WekaConfig.getInstance();
    String algorithm = conf.getAlgorithm();
    Classifier clas = null;

    if (conf.isFilterBool()) {
        FilterSet filtr = new FilterSet();
        switch (conf.getFilter()) {
            case "CSF greedy":
                data = filtr.filterCFS_Greedy(inst);
                break;
            case "CSF best first":
                data = filtr.filterCFS_BestFirst(inst);
                break;
            case "Filtered CSF greedy":
                data = filtr.filterFilteredCSF_Greedy(inst);
                break;
            case "Filtered CSF best first":
                data = filtr.filterFilteredCSF_BestFirst(inst);
                break;
            case "Consistency greedy":
                data = filtr.filterConsinstency_Greedy(inst);
                break;
            case "Consistency best first":
                data = filtr.filterConsinstency_BestFirst(inst);
                break;
        }
    }

    switch (algorithm) {
        case "J48":
            summary += "J48 \n";
            clas = classifyJ48(data);
            break;
        case "Naive Bayes":
            summary += "Naive Bayes \n";
            clas = classifyNaiveBayes(data);
            break;
        case "Lazy IBk":
            summary += "Lazy IBk \n";
            clas = classifyIBk(data);
            break;
        case "Random Tree":
            summary += "Random Tree \n";
            clas = classifyRandomTree(data);
            break;
        case "SMO":
            summary += "SMO \n";
            clas = classifySMO(data);
            break;
        case "PART":
            summary += "PART \n";
            clas = classifyPART(data);
            break;
        case "Decision Table":
            summary += "Decision Table \n";
            clas = classifyDecisionTable(data);
            break;
        case "Multi Layer":
            summary += "Multi Layer \n";
            clas = classifyMultiLayer(data);
            break;
        case "Kstar":
            summary += "Kstar \n";
            clas = classifyKStar(data);
            break;
    }

    summary += "\n";
    summary += "---------Klasifikacja-------------- \n";
    summary += clas.toString();
    Evaluate eval = new Evaluate();
    Evaluation evalu = eval.crossValidation(clas, data, conf.getFolds());
    summary += "----------Ewaluacja---------------- \n";
    summary += evalu.toSummaryString();
    summary += evalu.toMatrixString();

    return summary;
}
 
開發者ID:andrzejtrzaska,項目名稱:VoiceStressAnalysis,代碼行數:82,代碼來源:Classification.java

示例4: classify

import weka.classifiers.Evaluation; //導入方法依賴的package包/類
public void classify(String trainingFile,String testingFile) {

        try {
            initTrainingSet(trainingFile);
            initTestingSet(testingFile);



            // train NaiveBayes
            J48 cModel = new J48();
            cModel.buildClassifier(TrainingSet);
            Instance current;
            double pred=0;
            for (int i = 0; i < TestingSet.numInstances(); i++) {
                current=TestingSet.get(i);
                if(featureVectorClassValues.get((int)pred).equalsIgnoreCase("Statement")||featureVectorClassValues.get((int)pred).equalsIgnoreCase("Backchannel Question")||featureVectorClassValues.get((int)pred).equalsIgnoreCase("Yes-No Question")||featureVectorClassValues.get((int)pred).equalsIgnoreCase("Open Question")){
                    current.setValue(featureVectorAttributes.get(0),featureVectorClassValues.get((int)pred));
                    System.out.println(pred+"  :  "+featureVectorClassValues.get((int)pred));
                    System.out.println(current.toString());
               }
                pred=cModel.classifyInstance(current);

            }




//            J48 cModel = new J48();
//            cModel.setUnpruned(true);
//            cModel.buildClassifier(TrainingSet);

            Evaluation eTest = new Evaluation(TrainingSet);
            eTest.evaluateModel(cModel, TestingSet);


            //print out the results
            System.out.println("=====================================================================");
            System.out.println("Results for "+this.getClass().getSimpleName());
            String strSummary = eTest.toSummaryString();
            System.out.println(strSummary);

            System.out.println("F-measure : "+eTest.weightedFMeasure());
            System.out.println("precision : "+eTest.weightedPrecision());
            System.out.println("recall : "+eTest.weightedRecall());
            System.out.println("=====================================================================");


            InfoGainAttributeEval infoGainAttributeEval = new InfoGainAttributeEval();
            infoGainAttributeEval.buildEvaluator(TrainingSet);

            for (int i = 0; i <featureVectorAttributes.size()-1; i++) {
                double v = infoGainAttributeEval.evaluateAttribute(i);
                System.out.print(featureVectorAttributes.get(i).name()+"\t\t");
                System.out.println(v);
            }

        } catch (Exception e) {
            e.printStackTrace();
        }

    }
 
開發者ID:catchsudheera,項目名稱:sanwada,代碼行數:62,代碼來源:FeatureSet05.java

示例5: classify

import weka.classifiers.Evaluation; //導入方法依賴的package包/類
public void classify(String trainingFile,String testingFile) {

        try {

            initiateBagOfWords(trainingFile);
            initTrainingSet(trainingFile);

            initiateBagOfWords(testingFile);
            initTestingSet(testingFile);



            J48 cModel = new J48();
            cModel.setUnpruned(true);
            cModel.buildClassifier(TrainingSet);


//            for (int i = 0; i < TestingSet.numInstances(); i++) {
//                double pred = cModel.classifyInstance(TestingSet.instance(i));
//                if (!testingUtterances.get(i).contains(TestingSet.classAttribute().value((int) pred))){
//                    System.out.print(testingUtterances.get(i));
//                    //System.out.print(", actual: " + TestingSet.classAttribute().value((int) TestingSet.instance(i).classValue()));
//                    System.out.println(", predicted: " + TestingSet.classAttribute().value((int) pred));
//                }
//            }



            Evaluation eTest = new Evaluation(TrainingSet);
            eTest.evaluateModel(cModel, TestingSet);


            //print out the results
            System.out.println("=====================================================================");
            System.out.println("Results for "+this.getClass().getSimpleName());
            String strSummary = eTest.toSummaryString();
            System.out.println(strSummary);

            System.out.println("F-measure : "+eTest.weightedFMeasure());
            System.out.println("precision : "+eTest.weightedPrecision());
            System.out.println("recall : "+eTest.weightedRecall());
            System.out.println("=====================================================================");

//            InfoGainAttributeEval infoGainAttributeEval = new InfoGainAttributeEval();
//            infoGainAttributeEval.buildEvaluator(TrainingSet);
//            double v = infoGainAttributeEval.evaluateAttribute(0);
//            System.out.print(featureVectorAttributes.get(0).name()+"\t\t");
//            System.out.println(v);
//
//            infoGainAttributeEval = new InfoGainAttributeEval();
//            infoGainAttributeEval.buildEvaluator(TrainingSet);
//             v = infoGainAttributeEval.evaluateAttribute(1);
//            System.out.print(featureVectorAttributes.get(1).name()+"\t\t");
//            System.out.println(v);
//
//            infoGainAttributeEval = new InfoGainAttributeEval();
//            infoGainAttributeEval.buildEvaluator(TrainingSet);
//             v = infoGainAttributeEval.evaluateAttribute(2);
//            System.out.print(featureVectorAttributes.get(2).name()+"\t\t");
//            System.out.println(v);
//
//            infoGainAttributeEval = new InfoGainAttributeEval();
//            infoGainAttributeEval.buildEvaluator(TrainingSet);
//             v = infoGainAttributeEval.evaluateAttribute(3);
//            System.out.print(featureVectorAttributes.get(3).name()+"\t\t");
//            System.out.println(v);

        } catch (Exception e) {
            e.printStackTrace();
        }

    }
 
開發者ID:catchsudheera,項目名稱:sanwada,代碼行數:73,代碼來源:FeatureSet04.java

示例6: classify

import weka.classifiers.Evaluation; //導入方法依賴的package包/類
public void classify(String trainingFile,String testingFile) {

        try {

           // initiateBagOfWords(trainingFile);
            initTrainingSet(trainingFile);

           // initiateBagOfWords(testingFile);
            initTestingSet(testingFile);

            StringToWordVector filter = new StringToWordVector();
            int[] indices= new int[1];
            indices[0]=6;
            filter.setAttributeIndicesArray(indices);
            filter.setInputFormat(TrainingSet);
            filter.setWordsToKeep(6);
            filter.setDoNotOperateOnPerClassBasis(false);
            filter.setTFTransform(true);
            filter.setOutputWordCounts(true);

            TrainingSet = Filter.useFilter(TrainingSet, filter);
            TestingSet = Filter.useFilter(TestingSet, filter);



            Classifier cModel = new SimpleLogistic();
            cModel.buildClassifier(TrainingSet);

            weka.core.SerializationHelper.write(System.getProperty("user.dir")+"/Classification/src/datafiles/cls.model",cModel);
            weka.core.SerializationHelper.write(System.getProperty("user.dir")+"/Classification/src/datafiles/testingSet.model",TestingSet);

            Evaluation eTest = new Evaluation(TrainingSet);
            eTest.evaluateModel(cModel, TestingSet);


            //print out the results
            System.out.println("=====================================================================");
            System.out.println("Results for "+this.getClass().getSimpleName());
            String strSummary = eTest.toSummaryString();
            System.out.println(strSummary);

            InfoGainAttributeEval infoGainAttributeEval = new InfoGainAttributeEval();
            infoGainAttributeEval.buildEvaluator(TrainingSet);

            for (int i = 0; i <featureVectorAttributes.size()-1; i++) {
                double v = infoGainAttributeEval.evaluateAttribute(i);
                System.out.print(i+" "+featureVectorAttributes.get(i).name()+"\t\t");
                System.out.println(v);
            }

            System.out.println("=====================================================================");

            System.out.println("recall : "+eTest.weightedRecall());
            System.out.println("precision : "+eTest.weightedPrecision());
            System.out.println("F-measure : "+eTest.weightedFMeasure());

            System.out.println("================= Rounded Values =========================");

            System.out.println("recall : "+Math.round(eTest.weightedRecall() * 100.0) / 100.0);
            System.out.println("precision : "+Math.round(eTest.weightedPrecision() * 100.0) / 100.0);
            System.out.println("F-measure : "+Math.round(eTest.weightedFMeasure() * 100.0) / 100.0);
            System.out.println("=====================================================================");

            printErrors(cModel);


        } catch (Exception e) {
            e.printStackTrace();
        }

    }
 
開發者ID:catchsudheera,項目名稱:sanwada,代碼行數:72,代碼來源:FeatureSetAll.java

示例7: main

import weka.classifiers.Evaluation; //導入方法依賴的package包/類
public static void main(String[] args) throws Exception {

//		// Declare two numeric attributes
//		Attribute Attribute1 = new Attribute("firstNumeric");
//		Attribute Attribute2 = new Attribute("secondNumeric");
//		
//		// Declare a nominal attribute along with its values
//		FastVector fvNominalVal = new FastVector(3);
//		fvNominalVal.addElement("blue");
//		fvNominalVal.addElement("gray");
//		fvNominalVal.addElement("black");
//		Attribute Attribute3 = new Attribute("aNominal", fvNominalVal);
//		
//		// Declare the class attribute along with its values
//		FastVector fvClassVal = new FastVector(2);
//		fvClassVal.addElement("positive");
//		fvClassVal.addElement("negative");
//		Attribute ClassAttribute = new Attribute("theClass", fvClassVal);
//		
//		// Declare the feature vector
//		FastVector fvWekaAttributes = new FastVector(4);
//		fvWekaAttributes.addElement(Attribute1);   
//		fvWekaAttributes.addElement(Attribute2);   
//		fvWekaAttributes.addElement(Attribute3);   
//		fvWekaAttributes.addElement(ClassAttribute);
//		
//		// Create an empty training set
//		Instances isTrainingSet = new Instances("Rel", fvWekaAttributes, 10);      
//		// Set class index
//		isTrainingSet.setClassIndex(3);
//		 
//		// Create the instance
//		Instance iExample = new Instance(4);
//		iExample.setValue((Attribute)fvWekaAttributes.elementAt(0), 1.0);     
//		iExample.setValue((Attribute)fvWekaAttributes.elementAt(1), 0.5);     
//		iExample.setValue((Attribute)fvWekaAttributes.elementAt(2), "gray");
//		iExample.setValue((Attribute)fvWekaAttributes.elementAt(3), "positive");
//		 
//		// add the instance
//		isTrainingSet.add(iExample);
		
		DataSource trainds = new DataSource("etc/train.csv");
		Instances train = trainds.getDataSet();
		train.setClassIndex(train.numAttributes()-1);
		
		DataSource testds = new DataSource("etc/test.csv");
		Instances test = testds.getDataSet();
		test.setClassIndex(test.numAttributes()-1);
		
		Classifier cModel = new MultilayerPerceptron();
		cModel.buildClassifier(train);
		
		// Test the model
		Evaluation eTest = new Evaluation(train);
		eTest.evaluateModel(cModel, test);
		
		// Print the result à la Weka explorer:
		String strSummary = eTest.toSummaryString();
		System.out.println(strSummary);
	}
 
開發者ID:mommi84,項目名稱:BALLAD,代碼行數:61,代碼來源:WekaPlayground.java

示例8: classify

import weka.classifiers.Evaluation; //導入方法依賴的package包/類
public void classify(String trainingFile,String testingFile) {

        try {
            initTrainingSet(trainingFile);
            initTestingSet(testingFile);


            J48 cModel = new J48();
            cModel.setUnpruned(true);
            cModel.buildClassifier(TrainingSet);

            Evaluation eTest = new Evaluation(TrainingSet);
            eTest.evaluateModel(cModel, TestingSet);


            //print out the results
            System.out.println("=====================================================================");
            System.out.println("Results for "+this.getClass().getSimpleName());
            String strSummary = eTest.toSummaryString();
            System.out.println(strSummary);

            System.out.println("F-measure : "+eTest.fMeasure(0));
            System.out.println("=====================================================================");


        } catch (Exception e) {
            e.printStackTrace();
        }

    }
 
開發者ID:catchsudheera,項目名稱:sanwada,代碼行數:31,代碼來源:FeatureSetD03.java

示例9: classify

import weka.classifiers.Evaluation; //導入方法依賴的package包/類
public void classify(String trainingFile,String testingFile) {

        try {
            initTrainingSet(trainingFile);
            initTestingSet(testingFile);


            Classifier cModel = new J48();
            cModel.buildClassifier(TrainingSet);

            Evaluation eTest = new Evaluation(TrainingSet);
            eTest.evaluateModel(cModel, TestingSet);


            //print out the results
            System.out.println("=====================================================================");
            System.out.println("Results for "+this.getClass().getSimpleName());
            String strSummary = eTest.toSummaryString();
            System.out.println(strSummary);

            System.out.println("F-measure : "+eTest.fMeasure(0));
            System.out.println("=====================================================================");


        } catch (Exception e) {
            e.printStackTrace();
        }

    }
 
開發者ID:catchsudheera,項目名稱:sanwada,代碼行數:30,代碼來源:FeatureSetD01.java

示例10: classify

import weka.classifiers.Evaluation; //導入方法依賴的package包/類
public void classify(String trainingFile,String testingFile) {

        try {
            initTrainingSet(trainingFile);
            initTestingSet(testingFile);


            J48 cModel = new J48();
            cModel.setUnpruned(true);
            cModel.buildClassifier(TrainingSet);


//            for (int i = 0; i < TestingSet.numInstances(); i++) {
//                double pred = cModel.classifyInstance(TestingSet.instance(i));
//                System.out.print("ID: " + TestingSet.instance(i).value(0));
//                System.out.print(", actual: " + TestingSet.classAttribute().value((int) TestingSet.instance(i).classValue()));
//                System.out.println(", predicted: " + TestingSet.classAttribute().value((int) pred));
//            }



            Evaluation eTest = new Evaluation(TrainingSet);
            eTest.evaluateModel(cModel, TestingSet);


            //print out the results
            System.out.println("=====================================================================");
            System.out.println("Results for "+this.getClass().getSimpleName());
            String strSummary = eTest.toSummaryString();
            System.out.println(strSummary);

            System.out.println("F-measure : "+eTest.weightedFMeasure());
            System.out.println("precision : "+eTest.weightedPrecision());
            System.out.println("recall : "+eTest.weightedRecall());
            System.out.println("=====================================================================");


        } catch (Exception e) {
            e.printStackTrace();
        }

    }
 
開發者ID:catchsudheera,項目名稱:sanwada,代碼行數:43,代碼來源:FeatureSet02.java

示例11: classify

import weka.classifiers.Evaluation; //導入方法依賴的package包/類
public void classify(String trainingFile,String testingFile) {

        try {

            initiateBagOfWords(trainingFile);
            initTrainingSet(trainingFile);

            initiateBagOfWords(testingFile);
            initTestingSet(testingFile);



            J48 cModel = new J48();
            cModel.setUnpruned(true);
            cModel.buildClassifier(TrainingSet);


//            for (int i = 0; i < TestingSet.numInstances(); i++) {
//                double pred = cModel.classifyInstance(TestingSet.instance(i));
//                if (!testingUtterances.get(i).contains(TestingSet.classAttribute().value((int) pred))){
//                    System.out.print(testingUtterances.get(i));
//                    //System.out.print(", actual: " + TestingSet.classAttribute().value((int) TestingSet.instance(i).classValue()));
//                    System.out.println(", predicted: " + TestingSet.classAttribute().value((int) pred));
//                }
//            }



            Evaluation eTest = new Evaluation(TrainingSet);
            eTest.evaluateModel(cModel, TestingSet);


            //print out the results
            System.out.println("=====================================================================");
            System.out.println("Results for "+this.getClass().getSimpleName());
            String strSummary = eTest.toSummaryString();
            System.out.println(strSummary);

            System.out.println("F-measure : "+eTest.weightedFMeasure());
            System.out.println("precision : "+eTest.weightedPrecision());
            System.out.println("recall : "+eTest.weightedRecall());
            System.out.println("=====================================================================");


        } catch (Exception e) {
            e.printStackTrace();
        }

    }
 
開發者ID:catchsudheera,項目名稱:sanwada,代碼行數:50,代碼來源:FeatureSet03.java


注:本文中的weka.classifiers.Evaluation.toSummaryString方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。