當前位置: 首頁>>代碼示例>>Java>>正文


Java J48類代碼示例

本文整理匯總了Java中weka.classifiers.trees.J48的典型用法代碼示例。如果您正苦於以下問題:Java J48類的具體用法?Java J48怎麽用?Java J48使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。


J48類屬於weka.classifiers.trees包,在下文中一共展示了J48類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: getClassifierClassName

import weka.classifiers.trees.J48; //導入依賴的package包/類
/**
 * Get classifier's class name by a short name
 * */
public static String getClassifierClassName(String classifierName) {
	String className = "";
	switch (classifierName) {
	case "SGD":
		className = SGD.class.toString();
		break;
	case "SGDText":
		className = SGDText.class.toString();
		break;
	case "J48":
		className = J48.class.toString();
		break;
	case "PART":
		className = PART.class.toString();
		break;
	case "NaiveBayes":
		className = NaiveBayes.class.toString();
		break;
	case "NBUpdateable":
		className = NaiveBayesUpdateable.class.toString();
		break;
	case "AdaBoostM1":
		className = AdaBoostM1.class.toString();
		break;
	case "LogitBoost":
		className = LogitBoost.class.toString();
		break;
	case "Bagging":
		className = Bagging.class.toString();
		break;
	case "Stacking":
		className = Stacking.class.toString();
		break;
	case "AdditiveRegression":
		className = AdditiveRegression.class.toString();
		break;
	case "Apriori":
		className = Apriori.class.toString();
		break;
	default:
		className = SGD.class.toString();
	}
	className = className.substring(6);
	return className;
}
 
開發者ID:Eyasics,項目名稱:recon,代碼行數:49,代碼來源:Util.java

示例2: runExps

import weka.classifiers.trees.J48; //導入依賴的package包/類
public void runExps(){
	Classifier c1 = new SMO();
	Classifier c2 = new J48();
	Classifier c3 = new NaiveBayes();
	trainModel(c1, "SVM");
	trainModel(c2, "J48");
	trainModel(c3, "Naive Bayes");
	
}
 
開發者ID:uiuc-ischool-scanr,項目名稱:SAIL,代碼行數:10,代碼來源:TrainModel.java

示例3: instance

import weka.classifiers.trees.J48; //導入依賴的package包/類
/**
 * Gets the appropriate subclass explainer (from the standard set that has been implemented in the toolkit),
 * depending on the classifier model (e.g. NaiveBayes, J48).
 * @param enactor
 * @param classifierWrapper
 * @return
 */
public static WekaExplainer<?> instance(Enactor enactor, ClassifierWrapper classifierWrapper) throws Exception {
	Classifier classifier = classifierWrapper.getClassifier();
	
	if (classifier instanceof NaiveBayes) {
		return new NaiveBayesExplainer(enactor, classifierWrapper);
	}
	else if (classifier instanceof J48) {
		return new J48Explainer(enactor, classifierWrapper);
	}
	else {
		return null; // TODO: doesn't support other Weka Classifiers yet
	}
}
 
開發者ID:claudiotrindade,項目名稱:contexttoolkit,代碼行數:21,代碼來源:WekaExplainer.java

示例4: testMultipleModels

import weka.classifiers.trees.J48; //導入依賴的package包/類
@Test
public void testMultipleModels() throws FOSException {
    ModelConfig modelConfig = new ModelConfig();
    modelConfig.setProperty(WekaModelConfig.CLASSIFIER_IMPL, Vote.class.getName());
    modelConfig.setProperty(WekaModelConfig.CLASSIFIER_CONFIG, "-R MAX -B \""+J48.class.getName()+"\" -B \"" + NaiveBayes.class.getName() + "\"");

    MultipleClassifiersCombiner classifier = (MultipleClassifiersCombiner)WekaClassifierFactory.create(modelConfig);
    Assert.assertEquals(2,classifier.getClassifiers().length);
    Assert.assertEquals(J48.class,classifier.getClassifiers()[0].getClass());
    Assert.assertEquals(NaiveBayes.class,classifier.getClassifiers()[1].getClass());
}
 
開發者ID:feedzai,項目名稱:fos-weka,代碼行數:12,代碼來源:WekaClassifierFactoryTest.java

示例5: main

import weka.classifiers.trees.J48; //導入依賴的package包/類
public static void main(String[] args) {
    try {
        BookDecisionTree decisionTree = new BookDecisionTree("books.arff");
        J48 tree = decisionTree.performTraining();
        System.out.println(tree.toString());
        
        Instance testInstance = decisionTree.
                getTestInstance("Leather", "yes", "historical");
        int result = (int) tree.classifyInstance(testInstance);
        String results = decisionTree.trainingData.attribute(3).value(result);
        System.out.println(
                "Test with: " + testInstance + "  Result: " + results);

        testInstance = decisionTree.
                getTestInstance("Paperback", "no", "historical");
        result = (int) tree.classifyInstance(testInstance);
        results = decisionTree.trainingData.attribute(3).value(result);
        System.out.println(
                "Test with: " + testInstance + "  Result: " + results);
    } catch (Exception ex) {
        ex.printStackTrace();
    }
}
 
開發者ID:PacktPublishing,項目名稱:Machine-Learning-End-to-Endguide-for-Java-developers,代碼行數:24,代碼來源:BookDecisionTree.java

示例6: main

import weka.classifiers.trees.J48; //導入依賴的package包/類
public static void main(String[] args) throws Exception {
//        String arffFilename = Utils.getOption("arff", args); // e.g. -arff emotions.arff
//        String xmlFilename = Utils.getOption("xml", args); // e.g. -xml emotions.xml
        String dataSetName = "facet";
        String arffFilename = "M:\\我是研究生\\任務\\分麵樹的生成\\Facet\\experiment\\baseline\\引用最高的\\dataset\\" + dataSetName + "\\" + dataSetName + ".arff"; // e.g. -arff emotions.arff
        String xmlFilename = "M:\\我是研究生\\任務\\分麵樹的生成\\Facet\\experiment\\baseline\\引用最高的\\dataset\\" + dataSetName + "\\" + dataSetName + ".xml"; // e.g. -xml emotions.xml

        MultiLabelInstances dataset = new MultiLabelInstances(arffFilename, xmlFilename);

        RAkEL learner1 = new RAkEL(new LabelPowerset(new J48()));

        MLkNN learner2 = new MLkNN();

        Evaluator eval = new Evaluator();
        MultipleEvaluation results;

        int numFolds = 10;
        results = eval.crossValidate(learner1, dataset, numFolds);
        System.out.println(results);
        System.out.println("=========================================.");
        results = eval.crossValidate(learner2, dataset, numFolds);
        System.out.println(results);
        System.out.println("done.");
    }
 
開發者ID:guozhaotong,項目名稱:FacetExtract,代碼行數:25,代碼來源:MulanExp1.java

示例7: Classification

import weka.classifiers.trees.J48; //導入依賴的package包/類
public Classification(ArrayList<ClassifierType> cType) {

		cls = new Classifier[cType.size()];
		eval = new Evaluation[cType.size()];

		for(int i = 0; i < cType.size();i++){			
			switch(cType.get(i)){
			// TODO Will we use J48 or ID3 implementation of decision trees?
			case J48:
				cls[i] = new J48();
				break;
			case NAIVE_BAYES:
				// If bType == Incremental then cls = new UpdateableNaiveBayes(); else
				cls[i] = new NaiveBayes();
				break;
			case IBK:
				cls[i] = new IBk();
				break;
			case COSINE:
				cls[i] = useCosine();
				// TODO Add other cases: Decision Rule, KNN and so on.
			}
		}
	}
 
開發者ID:a-n-d-r-e-i,項目名稱:seagull,代碼行數:25,代碼來源:Classification.java

示例8: classify

import weka.classifiers.trees.J48; //導入依賴的package包/類
/**
 * Classifies function wise test instances in the associated with the names labels mentioned in the arraylist passed as the argument.
 *
 * @param list - labels of instances contained in the test set that need to be classified.
 * @return TreeMap containing the instance labels and the associated classification results.
 * @throws ClassificationFailedException
 */
@Override
public LinkedHashMap<String, String> classify(LinkedList<String> list) throws ClassificationFailedException {
    output = new LinkedHashMap<String, String>();
    J48 j48 = new J48();
    Remove rm = new Remove();
    rm.setAttributeIndices("1");
    FilteredClassifier fc = new FilteredClassifier();
    fc.setFilter(rm);
    fc.setClassifier(j48);
    try {
        fc.buildClassifier(trainSet);
        for (int i = 0; i < testSet.numInstances(); i++) {
            double pred = fc.classifyInstance(testSet.instance(i));
            if (list.isEmpty()) {
                output.put(String.valueOf(i + 1), testSet.classAttribute().value((int) pred));
            } else {
                output.put(list.get(i), testSet.classAttribute().value((int) pred));
            }
        }
    } catch (Exception ex) {
        throw new ClassificationFailedException();
    }
    return output;
}
 
開發者ID:sunimalr,項目名稱:vimarsha,代碼行數:32,代碼來源:FunctionWiseClassifier.java

示例9: performTraining

import weka.classifiers.trees.J48; //導入依賴的package包/類
private J48 performTraining() {
        J48 j48 = new J48();
        String[] options = {"-U"};
//        Use unpruned tree. -U
        try {
            j48.setOptions(options);
            j48.buildClassifier(trainingData);
        } catch (Exception ex) {
            ex.printStackTrace();
        }
        return j48;
    }
 
開發者ID:PacktPublishing,項目名稱:Java-Data-Science-Made-Easy,代碼行數:13,代碼來源:BookDecisionTree.java

示例10: getEvalResultbySMOTE

import weka.classifiers.trees.J48; //導入依賴的package包/類
/***
	 * <p>To get 10-fold cross validation in one single arff in <b>path</b></p>
	 * <p>Use C4.5 and <b>SMOTE</b> to classify the dataset.</p>
	 * @param path dataset path
	 * @throws Exception
	 */
	public static void getEvalResultbySMOTE(String path, int index) throws Exception{
		
		Instances ins = DataSource.read(path);
		int numAttr = ins.numAttributes();
		ins.setClassIndex(numAttr - 1);
		
		SMOTE smote = new SMOTE();
		smote.setInputFormat(ins);
		
		/** classifiers setting*/
		J48 j48 = new J48();
//		j48.setConfidenceFactor(0.4f);
		j48.buildClassifier(ins);

		FilteredClassifier fc = new FilteredClassifier();
		fc.setClassifier(j48);
		fc.setFilter(smote);
			
		Evaluation eval = new Evaluation(ins);	
		eval.crossValidateModel(fc, ins, 10, new Random(1));
		
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(0), eval.recall(0), eval.fMeasure(0));
//		System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(1), eval.recall(1), eval.fMeasure(1));
//		System.out.printf(" %4.3f \n\n", (1-eval.errorRate()));
		results[index][0] = eval.precision(0);
		results[index][1] = eval.recall(0);
		results[index][2] = eval.fMeasure(0);
		results[index][3] = eval.precision(1);
		results[index][4] = eval.recall(1);
		results[index][5] = eval.fMeasure(1);
		results[index][6] = 1-eval.errorRate();
				
	}
 
開發者ID:Gu-Youngfeng,項目名稱:CraTer,代碼行數:40,代碼來源:ImbalanceProcessingAve.java

示例11: run

import weka.classifiers.trees.J48; //導入依賴的package包/類
public void run()
    throws Exception
{
    String language = "en";
    String trainCorpora = DemoConstants.TRAIN_FOLDER_CROSS_VALIDATION;
    String trainFileSuffix = "*.txt";

    CollectionReaderDescription crd = CollectionReaderFactory.createReaderDescription(
            LineTokenTagReader.class, LineTokenTagReader.PARAM_LANGUAGE, language,
            LineTokenTagReader.PARAM_SOURCE_LOCATION, trainCorpora,
            LineTokenTagReader.PARAM_PATTERNS, trainFileSuffix);

    FlexTagCrossValidation flex = new FlexTagCrossValidation(crd, 2);

    if (System.getProperty("DKPRO_HOME") == null) {
        flex.setDKProHomeFolder("target/home");
    }
    flex.setExperimentName("WekaCrossValidationDemo");

    flex.setFeatures(TcFeatureFactory.create(LuceneCharacterNGram.class));
    
    List<Object> configuration = asList(new Object[] { J48.class.getName() });
    flex.setClassifier(Classifier.WEKA, configuration);
    flex.addReport(CvWekafAvgKnownUnknownAccuracyReport.class);
    flex.execute();
}
 
開發者ID:Horsmann,項目名稱:FlexTag,代碼行數:27,代碼來源:ExampleWekaCrossValidation.java

示例12: training

import weka.classifiers.trees.J48; //導入依賴的package包/類
public void training(double[][] trainFeatures) throws Exception {
	/**
	 * Need to be replaced
	 */
	//How to preprocess trainFeatures into inst


	BufferedReader reader = new BufferedReader(
			new FileReader("/Users/karl/Work/database/forsimpletest/1.arff"));
	Instances inst = new Instances(reader);
	reader.close();
	inst.setClassIndex(inst.numAttributes() - 1);

	//Init classifier
	Classifier cls = new J48();
	cls.buildClassifier(inst);

	// serialize model
	weka.core.SerializationHelper.write(modelFile, cls);
}
 
開發者ID:KangCai,項目名稱:AudioProcessingBox,代碼行數:21,代碼來源:BasicClassification.java

示例13: main

import weka.classifiers.trees.J48; //導入依賴的package包/類
/**
	 * For testing
	 * @param args
	 */
	public static void main(String[] args) {
		// load cModel and header from files
		J48 cModel = (J48) ClassifierWrapper.loadClassifier("demos/imautostatus-dtree/imautostatus.model");
		Instances header = ClassifierWrapper.loadDataset("demos/imautostatus-dtree/imautostatus-test.arff");
//		Instance instance = header.instance(0); // use one instance for testing
		
		// then parse it
		try {
			Map<String, DNF> valueTraces = J48Parser.parse(cModel, header);
			
			for (String value : valueTraces.keySet()) {
				DNF traces = valueTraces.get(value);
				System.out.println(value + "(size=" + traces.size() + "): " + traces);
			}
		} catch (Exception e) {
			e.printStackTrace();
		}
	}
 
開發者ID:claudiotrindade,項目名稱:contexttoolkit,代碼行數:23,代碼來源:J48Parser.java

示例14: getClassifier

import weka.classifiers.trees.J48; //導入依賴的package包/類
/**
 * Returns a new classifier based on the given algorithm.
 */
protected weka.classifiers.Classifier getClassifier(
		EClassificationAlgorithm algorithm) {
	switch (algorithm) {
	case DECISION_TREE_REP:
		return new REPTree();
	case SUPPORT_VECTOR_MACHINE_SMO:
		return new SMO();
	case COST_SENSITIVE_CLASSIFIER:
		return new CostSensitiveClassifier();
	case DECISION_TREE_J48:
		return new J48();
	default:
		throw new AssertionError(
				"Cannot create a classifier without a specified algorithm.");
	}

}
 
開發者ID:vimaier,項目名稱:conqat,代碼行數:21,代碼來源:BaseWekaClassifier.java

示例15: run

import weka.classifiers.trees.J48; //導入依賴的package包/類
public void run() throws Exception {
    // data
    ArffLoader arffLoader = new ArffLoader();
    arffLoader.setSource(new File("/home/bhill/apps/weka/weka-3-6-11/data/soybean.arff"));
    Instances data = arffLoader.getDataSet();
    data.setClassIndex(data.numAttributes()-1);

    J48 j48 = new J48();
    j48.buildClassifier(data);



    // j48.m_root.m_sons

    System.out.println();
    // J48.train()
    // check for nodes
}
 
開發者ID:williamClanton,項目名稱:jbossBA,代碼行數:19,代碼來源:Main.java


注:本文中的weka.classifiers.trees.J48類示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。