当前位置: 首页>>代码示例>>Java>>正文


Java Evaluation.crossValidateModel方法代码示例

本文整理汇总了Java中weka.classifiers.evaluation.Evaluation.crossValidateModel方法的典型用法代码示例。如果您正苦于以下问题:Java Evaluation.crossValidateModel方法的具体用法?Java Evaluation.crossValidateModel怎么用?Java Evaluation.crossValidateModel使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在weka.classifiers.evaluation.Evaluation的用法示例。


在下文中一共展示了Evaluation.crossValidateModel方法的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: getEvalResultbySMOTE

import weka.classifiers.evaluation.Evaluation; //导入方法依赖的package包/类
/***
	 * <p>To get 10-fold cross validation in one single arff in <b>path</b></p>
	 * <p>Use C4.5 and <b>SMOTE</b> to classify the dataset.</p>
	 * @param path dataset path
	 * @throws Exception
	 */
	public static void getEvalResultbySMOTE(String path, int index) throws Exception{
		
		Instances ins = DataSource.read(path);
		int numAttr = ins.numAttributes();
		ins.setClassIndex(numAttr - 1);
		
		SMOTE smote = new SMOTE();
		smote.setInputFormat(ins);
		
		/** classifiers setting*/
		J48 j48 = new J48();
//		j48.setConfidenceFactor(0.4f);
		j48.buildClassifier(ins);

		FilteredClassifier fc = new FilteredClassifier();
		fc.setClassifier(j48);
		fc.setFilter(smote);
			
		Evaluation eval = new Evaluation(ins);	
		eval.crossValidateModel(fc, ins, 10, new Random(1));
		
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(0), eval.recall(0), eval.fMeasure(0));
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(1), eval.recall(1), eval.fMeasure(1));
//		System.out.printf(" %4.3f \n\n", (1-eval.errorRate()));
		results[index][0] = eval.precision(0);
		results[index][1] = eval.recall(0);
		results[index][2] = eval.fMeasure(0);
		results[index][3] = eval.precision(1);
		results[index][4] = eval.recall(1);
		results[index][5] = eval.fMeasure(1);
		results[index][6] = 1-eval.errorRate();
				
	}
 
开发者ID:Gu-Youngfeng,项目名称:CraTer,代码行数:40,代码来源:ImbalanceProcessingAve.java

示例2: main

import weka.classifiers.evaluation.Evaluation; //导入方法依赖的package包/类
public static void main(String[] args) throws Exception{
	
	String databasePath = "data/features.arff";
	
	// Load the data in arff format
	Instances data = new Instances(new BufferedReader(new FileReader(databasePath)));
	
	// Set class the last attribute as class
	data.setClassIndex(data.numAttributes() - 1);

	// Build a basic decision tree model
	String[] options = new String[]{};
	J48 model = new J48();
	model.setOptions(options);
	model.buildClassifier(data);
	
	// Output decision tree
	System.out.println("Decision tree model:\n"+model);
	
	// Output source code implementing the decision tree
	System.out.println("Source code:\n"+model.toSource("ActivityRecognitionEngine"));
	
	// Check accuracy of model using 10-fold cross-validation
	Evaluation eval = new Evaluation(data);
	eval.crossValidateModel(model, data, 10, new Random(1), new String[] {});
	System.out.println("Model performance:\n"+eval.toSummaryString());
	
	String[] activities = new String[]{"Walk", "Walk", "Walk", "Run", "Walk", "Run", "Run", "Sit", "Sit", "Sit"};
	DiscreteLowPass dlpFilter = new DiscreteLowPass(3);
	for(String str : activities){
		System.out.println(str +" -> "+ dlpFilter.filter(str));
	}
	
}
 
开发者ID:PacktPublishing,项目名称:Machine-Learning-End-to-Endguide-for-Java-developers,代码行数:35,代码来源:ActivityRecognition.java

示例3: getEvalResultbyNo

import weka.classifiers.evaluation.Evaluation; //导入方法依赖的package包/类
/***
	 * <p>To get 10-fold cross validation in one single arff in <b>path</b></p>
	 * <p>Only use C4.5 to classify the dataset.</p>
	 * @param path dataset path
	 * @throws Exception
	 */
	public static void getEvalResultbyNo(String path, int index) throws Exception{
		
		Instances ins = DataSource.read(path);
		int numAttr = ins.numAttributes();
		ins.setClassIndex(numAttr - 1);
		
		/** classifiers setting*/
		J48 j48 = new J48();
//		j48.setConfidenceFactor(0.4f);
		j48.buildClassifier(ins);
		
		Evaluation eval = new Evaluation(ins);	
		eval.crossValidateModel(j48, ins, 10, new Random(1));
		
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(0), eval.recall(0), eval.fMeasure(0));
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(1), eval.recall(1), eval.fMeasure(1));
//		System.out.printf(" %4.3f \n\n", (1-eval.errorRate()));
		results[index][0] = eval.precision(0);
		results[index][1] = eval.recall(0);
		results[index][2] = eval.fMeasure(0);
		results[index][3] = eval.precision(1);
		results[index][4] = eval.recall(1);
		results[index][5] = eval.fMeasure(1);
		results[index][6] = 1-eval.errorRate();
			
	}
 
开发者ID:Gu-Youngfeng,项目名称:CraTer,代码行数:33,代码来源:ImbalanceProcessingAve.java

示例4: getEvalResultbyResampling

import weka.classifiers.evaluation.Evaluation; //导入方法依赖的package包/类
/***
	 * <p>To get 10-fold cross validation in one single arff in <b>path</b></p>
	 * <p>Use C4.5 and <b>Resampling</b> to classify the dataset.</p>
	 * @param path dataset path
	 * @throws Exception
	 */
	public static void getEvalResultbyResampling(String path, int index) throws Exception{
		
		Instances ins = DataSource.read(path);
		int numAttr = ins.numAttributes();
		ins.setClassIndex(numAttr - 1);
		
		Resample resample = new Resample();
		resample.setInputFormat(ins);
		
		/** classifiers setting*/
		J48 j48 = new J48();
//		j48.setConfidenceFactor(0.4f);
		j48.buildClassifier(ins);

		FilteredClassifier fc = new FilteredClassifier();
		fc.setClassifier(j48);
		fc.setFilter(resample);
			
		Evaluation eval = new Evaluation(ins);	
		eval.crossValidateModel(fc, ins, 10, new Random(1));
		
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(0), eval.recall(0), eval.fMeasure(0));
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(1), eval.recall(1), eval.fMeasure(1));
//		System.out.printf(" %4.3f \n\n", (1-eval.errorRate()));
		results[index][0] = eval.precision(0);
		results[index][1] = eval.recall(0);
		results[index][2] = eval.fMeasure(0);
		results[index][3] = eval.precision(1);
		results[index][4] = eval.recall(1);
		results[index][5] = eval.fMeasure(1);
		results[index][6] = 1-eval.errorRate();
			
	}
 
开发者ID:Gu-Youngfeng,项目名称:CraTer,代码行数:40,代码来源:ImbalanceProcessingAve.java

示例5: getEvalResultbyCost

import weka.classifiers.evaluation.Evaluation; //导入方法依赖的package包/类
/***
	 * <p>To get 10-fold cross validation in one single arff in <b>path</b></p>
	 * <p>Use C4.5 and <b>Cost-sensitive learning</b> to classify the dataset.</p>
	 * @param path dataset path
	 * @throws Exception
	 */
	public static void getEvalResultbyCost(String path, int index) throws Exception{
		
		Instances ins = DataSource.read(path);
		int numAttr = ins.numAttributes();
		ins.setClassIndex(numAttr - 1);
		
		/**Classifier setting*/
		J48 j48 = new J48();
//		j48.setConfidenceFactor(0.4f);
		j48.buildClassifier(ins);
		
		CostSensitiveClassifier csc = new CostSensitiveClassifier();
		csc.setClassifier(j48);
		csc.setCostMatrix(new CostMatrix(new BufferedReader(new FileReader("files/costm"))));
		
		Evaluation eval = new Evaluation(ins);
		
		eval.crossValidateModel(csc, ins, 10, new Random(1));
		
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(0), eval.recall(0), eval.fMeasure(0));
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(1), eval.recall(1), eval.fMeasure(1));
//		System.out.printf(" %4.3f \n\n", (1-eval.errorRate()));
		results[index][0] = eval.precision(0);
		results[index][1] = eval.recall(0);
		results[index][2] = eval.fMeasure(0);
		results[index][3] = eval.precision(1);
		results[index][4] = eval.recall(1);
		results[index][5] = eval.fMeasure(1);
		results[index][6] = 1-eval.errorRate();
			
	}
 
开发者ID:Gu-Youngfeng,项目名称:CraTer,代码行数:38,代码来源:ImbalanceProcessingAve.java

示例6: getEvalResultbyDefault

import weka.classifiers.evaluation.Evaluation; //导入方法依赖的package包/类
/***
	 * <p>To get 10-fold cross validation in one single arff in <b>path</b></p>
	 * <p>Use C4.5 and <b>SMOTE</b> to classify the dataset.</p>
	 * @param path dataset path
	 * @throws Exception
	 */
	public static void getEvalResultbyDefault(String path, int index) throws Exception{
		
		Instances ins = DataSource.read(path);
		int numAttr = ins.numAttributes();
		ins.setClassIndex(numAttr - 1);
		
		SMOTE smote = new SMOTE();
		smote.setInputFormat(ins);
		
		/** classifiers setting*/
		J48 j48 = new J48();
//		j48.setConfidenceFactor(0.4f);
		j48.buildClassifier(ins);

		FilteredClassifier fc = new FilteredClassifier();
		fc.setClassifier(j48);
		fc.setFilter(smote);
			
		Evaluation eval = new Evaluation(ins);	
		eval.crossValidateModel(fc, ins, 10, new Random(1));
		
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(0), eval.recall(0), eval.fMeasure(0));
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(1), eval.recall(1), eval.fMeasure(1));
//		System.out.printf(" %4.3f \n\n", (1-eval.errorRate()));
		results[index][0] = eval.precision(0);
		results[index][1] = eval.recall(0);
		results[index][2] = eval.fMeasure(0);
		results[index][3] = eval.precision(1);
		results[index][4] = eval.recall(1);
		results[index][5] = eval.fMeasure(1);
		results[index][6] = 1-eval.errorRate();
				
	}
 
开发者ID:Gu-Youngfeng,项目名称:CraTer,代码行数:40,代码来源:FeatureSelectionAve.java

示例7: getEvalResultbyChiSquare

import weka.classifiers.evaluation.Evaluation; //导入方法依赖的package包/类
/***
	 * <p>To get 10-fold cross validation in one single arff in <b>path</b></p>
	 * <p>Use C4.5 and <b>SMOTE</b>, combined with <b>Chi-Square</b> to classify the dataset.</p>
	 * @param path dataset path
	 * @throws Exception
	 */
	public static void getEvalResultbyChiSquare(String path, int index) throws Exception{
		
		Instances ins = DataSource.read(path);
		int numAttr = ins.numAttributes();
		ins.setClassIndex(numAttr - 1);
		
		/**chi-squared filter to process the whole dataset first*/
		ChiSquaredAttributeEval evall = new ChiSquaredAttributeEval();	
		Ranker ranker = new Ranker();
		AttributeSelection selector = new AttributeSelection();
		
		selector.setEvaluator(evall);
		selector.setSearch(ranker);
		selector.setInputFormat(ins);
		ins = Filter.useFilter(ins, selector);
		
		SMOTE smote = new SMOTE();
		smote.setInputFormat(ins);
		
		/** classifiers setting*/
		J48 j48 = new J48();
//		j48.setConfidenceFactor(0.4f);
		j48.buildClassifier(ins);

		FilteredClassifier fc = new FilteredClassifier();
		fc.setClassifier(j48);
		fc.setFilter(smote);
			
		Evaluation eval = new Evaluation(ins);	
		eval.crossValidateModel(fc, ins, 10, new Random(1));
		
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(0), eval.recall(0), eval.fMeasure(0));
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(1), eval.recall(1), eval.fMeasure(1));
//		System.out.printf(" %4.3f \n\n", (1-eval.errorRate()));
		results[index][0] = eval.precision(0);
		results[index][1] = eval.recall(0);
		results[index][2] = eval.fMeasure(0);
		results[index][3] = eval.precision(1);
		results[index][4] = eval.recall(1);
		results[index][5] = eval.fMeasure(1);
		results[index][6] = 1-eval.errorRate();
				
	}
 
开发者ID:Gu-Youngfeng,项目名称:CraTer,代码行数:50,代码来源:FeatureSelectionAve.java

示例8: getEvalResultbyInfoGain

import weka.classifiers.evaluation.Evaluation; //导入方法依赖的package包/类
/***
	 * <p>To get 10-fold cross validation in one single arff in <b>path</b></p>
	 * <p>Use C4.5 and <b>SMOTE</b>, combined with <b>Information Gain</b> to classify the dataset.</p>
	 * @param path dataset path
	 * @throws Exception
	 */
	public static void getEvalResultbyInfoGain(String path, int index) throws Exception{
		
		Instances ins = DataSource.read(path);
		int numAttr = ins.numAttributes();
		ins.setClassIndex(numAttr - 1);
		
		/**information gain filter to process the whole dataset first*/
		InfoGainAttributeEval evall = new InfoGainAttributeEval();
		Ranker ranker = new Ranker();
		AttributeSelection selector = new AttributeSelection();
		
		selector.setEvaluator(evall);
		selector.setSearch(ranker);
		selector.setInputFormat(ins);
		ins = Filter.useFilter(ins, selector);
		
		SMOTE smote = new SMOTE();
		smote.setInputFormat(ins);
		
		/** classifiers setting*/
		J48 j48 = new J48();
//		j48.setConfidenceFactor(0.4f);
		j48.buildClassifier(ins);

		FilteredClassifier fc = new FilteredClassifier();
		fc.setClassifier(j48);
		fc.setFilter(smote);
			
		Evaluation eval = new Evaluation(ins);	
		eval.crossValidateModel(fc, ins, 10, new Random(1));
		
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(0), eval.recall(0), eval.fMeasure(0));
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(1), eval.recall(1), eval.fMeasure(1));
//		System.out.printf(" %4.3f \n\n", (1-eval.errorRate()));
		results[index][0] = eval.precision(0);
		results[index][1] = eval.recall(0);
		results[index][2] = eval.fMeasure(0);
		results[index][3] = eval.precision(1);
		results[index][4] = eval.recall(1);
		results[index][5] = eval.fMeasure(1);
		results[index][6] = 1-eval.errorRate();
				
	}
 
开发者ID:Gu-Youngfeng,项目名称:CraTer,代码行数:50,代码来源:FeatureSelectionAve.java

示例9: getEvalResultbyGainRatio

import weka.classifiers.evaluation.Evaluation; //导入方法依赖的package包/类
/***
	 * <p>To get 10-fold cross validation in one single arff in <b>path</b></p>
	 * <p>Use C4.5 and <b>SMOTE</b>, combined with <b>Information Gain Ratio</b> to classify the dataset.</p>
	 * @param path dataset path
	 * @throws Exception
	 */
	public static void getEvalResultbyGainRatio(String path, int index) throws Exception{
		
		Instances ins = DataSource.read(path);
		int numAttr = ins.numAttributes();
		ins.setClassIndex(numAttr - 1);
		
		/**information gain ratio filter to process the whole dataset first*/
		GainRatioAttributeEval evall = new GainRatioAttributeEval();
		Ranker ranker = new Ranker();
		AttributeSelection selector = new AttributeSelection();
		
		selector.setEvaluator(evall);
		selector.setSearch(ranker);
		selector.setInputFormat(ins);
		ins = Filter.useFilter(ins, selector);
		
		SMOTE smote = new SMOTE();
		smote.setInputFormat(ins);
		
		/** classifiers setting*/
		J48 j48 = new J48();
//		j48.setConfidenceFactor(0.4f);
		j48.buildClassifier(ins);

		FilteredClassifier fc = new FilteredClassifier();
		fc.setClassifier(j48);
		fc.setFilter(smote);
			
		Evaluation eval = new Evaluation(ins);	
		eval.crossValidateModel(fc, ins, 10, new Random(1));
		
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(0), eval.recall(0), eval.fMeasure(0));
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(1), eval.recall(1), eval.fMeasure(1));
//		System.out.printf(" %4.3f \n\n", (1-eval.errorRate()));
		results[index][0] = eval.precision(0);
		results[index][1] = eval.recall(0);
		results[index][2] = eval.fMeasure(0);
		results[index][3] = eval.precision(1);
		results[index][4] = eval.recall(1);
		results[index][5] = eval.fMeasure(1);
		results[index][6] = 1-eval.errorRate();
				
	}
 
开发者ID:Gu-Youngfeng,项目名称:CraTer,代码行数:50,代码来源:FeatureSelectionAve.java

示例10: getEvalResultbyCorrelation

import weka.classifiers.evaluation.Evaluation; //导入方法依赖的package包/类
/***
	 * <p>To get 10-fold cross validation in one single arff in <b>path</b></p>
	 * <p>Use C4.5 and <b>SMOTE</b>, combined with <b>Correlation</b> to classify the dataset.</p>
	 * @param path dataset path
	 * @throws Exception
	 */
	public static void getEvalResultbyCorrelation(String path, int index) throws Exception{
		
		Instances ins = DataSource.read(path);
		int numAttr = ins.numAttributes();
		ins.setClassIndex(numAttr - 1);
		
		/** correlation filter to process the whole dataset first*/
		CorrelationAttributeEval evall = new CorrelationAttributeEval();
		Ranker ranker = new Ranker();
		AttributeSelection selector = new AttributeSelection();
		
		selector.setEvaluator(evall);
		selector.setSearch(ranker);
		selector.setInputFormat(ins);
		ins = Filter.useFilter(ins, selector);
		
		SMOTE smote = new SMOTE();
		smote.setInputFormat(ins);
		
		/** classifiers setting*/
		J48 j48 = new J48();
//		j48.setConfidenceFactor(0.4f);
		j48.buildClassifier(ins);

		FilteredClassifier fc = new FilteredClassifier();
		fc.setClassifier(j48);
		fc.setFilter(smote);
			
		Evaluation eval = new Evaluation(ins);	
		eval.crossValidateModel(fc, ins, 10, new Random(1));
		
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(0), eval.recall(0), eval.fMeasure(0));
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(1), eval.recall(1), eval.fMeasure(1));
//		System.out.printf(" %4.3f \n\n", (1-eval.errorRate()));
		results[index][0] = eval.precision(0);
		results[index][1] = eval.recall(0);
		results[index][2] = eval.fMeasure(0);
		results[index][3] = eval.precision(1);
		results[index][4] = eval.recall(1);
		results[index][5] = eval.fMeasure(1);
		results[index][6] = 1-eval.errorRate();
				
	}
 
开发者ID:Gu-Youngfeng,项目名称:CraTer,代码行数:50,代码来源:FeatureSelectionAve.java

示例11: getEvalResultbyReliefF

import weka.classifiers.evaluation.Evaluation; //导入方法依赖的package包/类
/***
	 * <p>To get 10-fold cross validation in one single arff in <b>path</b></p>
	 * <p>Use C4.5 and <b>SMOTE</b>, combined with <b>ReliefF</b> to classify the dataset.</p>
	 * @param path dataset path
	 * @throws Exception
	 */
	public static void getEvalResultbyReliefF(String path, int index) throws Exception{
		
		Instances ins = DataSource.read(path);
		int numAttr = ins.numAttributes();
		ins.setClassIndex(numAttr - 1);
		
		/** correlation filter to process the whole dataset first*/
		ReliefFAttributeEval evall = new ReliefFAttributeEval();
		Ranker ranker = new Ranker();
		AttributeSelection selector = new AttributeSelection();
		
		selector.setEvaluator(evall);
		selector.setSearch(ranker);
		selector.setInputFormat(ins);
		ins = Filter.useFilter(ins, selector);
		
		SMOTE smote = new SMOTE();
		smote.setInputFormat(ins);
		
		/** classifiers setting*/
		J48 j48 = new J48();
//		j48.setConfidenceFactor(0.4f);
		j48.buildClassifier(ins);

		FilteredClassifier fc = new FilteredClassifier();
		fc.setClassifier(j48);
		fc.setFilter(smote);
			
		Evaluation eval = new Evaluation(ins);	
		eval.crossValidateModel(fc, ins, 10, new Random(1));
		
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(0), eval.recall(0), eval.fMeasure(0));
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(1), eval.recall(1), eval.fMeasure(1));
//		System.out.printf(" %4.3f \n\n", (1-eval.errorRate()));
		results[index][0] = eval.precision(0);
		results[index][1] = eval.recall(0);
		results[index][2] = eval.fMeasure(0);
		results[index][3] = eval.precision(1);
		results[index][4] = eval.recall(1);
		results[index][5] = eval.fMeasure(1);
		results[index][6] = 1-eval.errorRate();
				
	}
 
开发者ID:Gu-Youngfeng,项目名称:CraTer,代码行数:50,代码来源:FeatureSelectionAve.java

示例12: learnParameters

import weka.classifiers.evaluation.Evaluation; //导入方法依赖的package包/类
/**
 * 
 * Learns the rule from parsed features in a cross validation and the set
 * parameters. Additionally feature subset selection is conducted, if the
 * parameters this.forwardSelection or this.backwardSelection are set
 * accordingly.
 * 
 * @param features
 *            Contains features to learn a classifier
 */

@Override
public Performance learnParameters(FeatureVectorDataSet features) {
	// create training
	Instances trainingData = transformToWeka(features, this.trainingSet);

	try {
		Evaluation eval = new Evaluation(trainingData);
		// apply feature subset selection
		if (this.forwardSelection || this.backwardSelection) {

			GreedyStepwise search = new GreedyStepwise();
			search.setSearchBackwards(this.backwardSelection);

			this.fs = new AttributeSelection();
			WrapperSubsetEval wrapper = new WrapperSubsetEval();

			// Do feature subset selection, but using a 10-fold cross
			// validation
			wrapper.buildEvaluator(trainingData);
			wrapper.setClassifier(this.classifier);
			wrapper.setFolds(10);
			wrapper.setThreshold(0.01);

			this.fs.setEvaluator(wrapper);
			this.fs.setSearch(search);

			this.fs.SelectAttributes(trainingData);

			trainingData = fs.reduceDimensionality(trainingData);

		}
		// perform 10-fold Cross Validation to evaluate classifier
		eval.crossValidateModel(this.classifier, trainingData, 10, new Random(1));
		System.out.println(eval.toSummaryString("\nResults\n\n", false));
		
		this.classifier.buildClassifier(trainingData);
		
		int truePositive = (int) eval.numTruePositives(trainingData.classIndex());
		int falsePositive = (int) eval.numFalsePositives(trainingData.classIndex());
		int falseNegative = (int) eval.numFalseNegatives(trainingData.classIndex());
		Performance performance = new Performance(truePositive, truePositive + falsePositive,
				truePositive + falseNegative);

		return performance;

	} catch (Exception e) {
		e.printStackTrace();
		return null;
	}
}
 
开发者ID:olehmberg,项目名称:winter,代码行数:62,代码来源:WekaMatchingRule.java

示例13: main

import weka.classifiers.evaluation.Evaluation; //导入方法依赖的package包/类
public static void main(String[] args) {

        try {
            ConverterUtils.DataSource source = new ConverterUtils.DataSource("fertility_Diagnosis.arff");
            Instances instances = source.getDataSet(9);

            Evaluation eval=new Evaluation(instances);

            J48 arvore = new J48();
            arvore.setConfidenceFactor(0.1f);
            arvore.setReducedErrorPruning(false);
            arvore.setBinarySplits(false);
            arvore.setCollapseTree(false);
            arvore.setUseLaplace(false);
            arvore.setUseMDLcorrection(true);
            arvore.setUnpruned(true);
            arvore.setCollapseTree(false);
            arvore.setReducedErrorPruning(false);
            arvore.setSubtreeRaising(false);
            arvore.setNumFolds(30);




            arvore.buildClassifier(instances);
            eval.crossValidateModel(arvore,instances,10,new Random(1));




            System.out.println(eval.toSummaryString());
            System.out.println(eval.toMatrixString());

        }catch (Exception ignored){
            ignored.printStackTrace();
        }

    }
 
开发者ID:K-weka,项目名称:k-weka,代码行数:39,代码来源:arvoreDecisao.java


注:本文中的weka.classifiers.evaluation.Evaluation.crossValidateModel方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。