当前位置: 首页>>代码示例>>Java>>正文


Java Classifier.predict方法代码示例

本文整理汇总了Java中it.uniroma2.sag.kelp.predictionfunction.classifier.Classifier.predict方法的典型用法代码示例。如果您正苦于以下问题:Java Classifier.predict方法的具体用法?Java Classifier.predict怎么用?Java Classifier.predict使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在it.uniroma2.sag.kelp.predictionfunction.classifier.Classifier的用法示例。


在下文中一共展示了Classifier.predict方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: main

import it.uniroma2.sag.kelp.predictionfunction.classifier.Classifier; //导入方法依赖的package包/类
public static void main(String[] args) {
	try {
		System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", "WARN");
		// Read a dataset into a trainingSet variable
		SimpleDataset trainingSet = new SimpleDataset();
		trainingSet.populate("src/main/resources/qc/train_5500.coarse.klp.gz");

		SimpleDataset testSet = new SimpleDataset();
		testSet.populate("src/main/resources/qc/TREC_10.coarse.klp.gz");

		// print some statistics
		System.out.println("Training set statistics");
		System.out.print("Examples number ");
		System.out.println(trainingSet.getNumberOfExamples());

		List<Label> classes = trainingSet.getClassificationLabels();

		for (Label l : classes) {
			System.out.println("Training Label " + l.toString() + " " + trainingSet.getNumberOfPositiveExamples(l));
			System.out.println("Training Label " + l.toString() + " " + trainingSet.getNumberOfNegativeExamples(l));

			System.out.println("Test Label " + l.toString() + " " + testSet.getNumberOfPositiveExamples(l));
			System.out.println("Test Label " + l.toString() + " " + testSet.getNumberOfNegativeExamples(l));
		}

		JacksonSerializerWrapper serializer = new JacksonSerializerWrapper();
		OneVsAllLearning ovaLearner = serializer.readValue(
				new File("src/main/resources/qc/learningAlgorithmSpecification.klp"), OneVsAllLearning.class);
		
		ovaLearner.setLabels(classes);

		// learn and get the prediction function
		ovaLearner.learn(trainingSet);
		Classifier f = ovaLearner.getPredictionFunction();

		// classify examples and compute some statistics
		MulticlassClassificationEvaluator evaluator = new MulticlassClassificationEvaluator(classes);
		for (Example e : testSet.getExamples()) {
			ClassificationOutput p = f.predict(testSet.getNextExample());
			evaluator.addCount(e, p);
		}

		System.out.println("Accuracy: " + evaluator.getAccuracy());
	} catch (Exception e1) {
		e1.printStackTrace();
	}
}
 
开发者ID:SAG-KeLP-Legacy,项目名称:kelp-examples,代码行数:48,代码来源:QuestionClassificationLearningFromJson.java

示例2: main

import it.uniroma2.sag.kelp.predictionfunction.classifier.Classifier; //导入方法依赖的package包/类
public static void main(String[] args) {
	try {
		// Read a dataset into a trainingSet variable
		SimpleDataset trainingSet = new SimpleDataset();
		trainingSet.populate("src/main/resources/hellolearning/train.klp");
		// Read a dataset into a test variable
		SimpleDataset testSet = new SimpleDataset();
		testSet.populate("src/main/resources/hellolearning/test.klp");

		// define the positive class
		StringLabel positiveClass = new StringLabel("+1");

		// print some statistics
		System.out.println("Training set statistics");
		System.out.print("Examples number ");
		System.out.println(trainingSet.getNumberOfExamples());
		System.out.print("Positive examples ");
		System.out.println(trainingSet
				.getNumberOfPositiveExamples(positiveClass));
		System.out.print("Negative examples ");
		System.out.println(trainingSet
				.getNumberOfNegativeExamples(positiveClass));

		System.out.println("Test set statistics");
		System.out.print("Examples number ");
		System.out.println(testSet.getNumberOfExamples());
		System.out.print("Positive examples ");
		System.out.println(testSet
				.getNumberOfPositiveExamples(positiveClass));
		System.out.print("Negative examples ");
		System.out.println(testSet
				.getNumberOfNegativeExamples(positiveClass));

		// instantiate a passive aggressive algorithm
		KernelizedPassiveAggressiveClassification kPA = new KernelizedPassiveAggressiveClassification();
		// indicate to the learner what is the positive class
		kPA.setLabel(positiveClass);
		// set an aggressiveness parameter
		kPA.setC(0.01f);

		// use the first (and only here) representation
		Kernel linear = new LinearKernel("0");
		// Normalize the linear kernel
		NormalizationKernel normalizedKernel = new NormalizationKernel(
				linear);
		// Apply a Polynomial kernel on the score (normalized) computed by
		// the linear kernel
		Kernel polyKernel = new PolynomialKernel(2f, normalizedKernel);
		// tell the algorithm that the kernel we want to use in learning is
		// the polynomial kernel
		kPA.setKernel(polyKernel);

		// learn and get the prediction function
		kPA.learn(trainingSet);
		Classifier f = kPA.getPredictionFunction();
		// classify examples and compute some statistics
		BinaryClassificationEvaluator ev = new BinaryClassificationEvaluator(positiveClass);
		for (Example e : testSet.getExamples()) {
			ClassificationOutput p = f.predict(testSet.getNextExample());
			ev.addCount(e, p);
		}

		System.out
				.println("Accuracy: " +
						ev.getAccuracy());
	} catch (Exception e1) {
		e1.printStackTrace();
	}
}
 
开发者ID:SAG-KeLP-Legacy,项目名称:kelp-examples,代码行数:70,代码来源:HelloKernelLearning.java

示例3: main

import it.uniroma2.sag.kelp.predictionfunction.classifier.Classifier; //导入方法依赖的package包/类
public static void main(String[] args) {
	try {
		// Read a dataset into a trainingSet variable
		SimpleDataset trainingSet = new SimpleDataset();
		trainingSet
				.populate("src/main/resources/sequenceKernelExample/sequenceTrain.txt");

		SimpleDataset testSet = new SimpleDataset();
		testSet.populate("src/main/resources/sequenceKernelExample/sequenceTest.txt");

		// print some statistics
		System.out.println("Training set statistics");
		System.out.print("Examples number ");
		System.out.println(trainingSet.getNumberOfExamples());

		List<Label> classes = trainingSet.getClassificationLabels();

		for (Label l : classes) {
			System.out.println("Training Label " + l.toString() + " "
					+ trainingSet.getNumberOfPositiveExamples(l));
			System.out.println("Training Label " + l.toString() + " "
					+ trainingSet.getNumberOfNegativeExamples(l));

			System.out.println("Test Label " + l.toString() + " "
					+ testSet.getNumberOfPositiveExamples(l));
			System.out.println("Test Label " + l.toString() + " "
					+ testSet.getNumberOfNegativeExamples(l));
		}

		// Kernel for the first representation (0-index)
		Kernel kernel = new SequenceKernel("SEQUENCE", 2, 1);
		// Normalize the linear kernel
		NormalizationKernel normalizedKernel = new NormalizationKernel(
				kernel);
		kernel.setSquaredNormCache(new FixIndexSquaredNormCache(trainingSet.getNumberOfExamples()));
		kernel.setKernelCache(new FixIndexKernelCache(trainingSet.getNumberOfExamples()));
		// instantiate an svmsolver
		BinaryCSvmClassification svmSolver = new BinaryCSvmClassification();
		svmSolver.setKernel(normalizedKernel);
		svmSolver.setCp(1);
		svmSolver.setCn(1);

		OneVsAllLearning ovaLearner = new OneVsAllLearning();
		ovaLearner.setBaseAlgorithm(svmSolver);
		ovaLearner.setLabels(classes);

		// learn and get the prediction function
		ovaLearner.learn(trainingSet);
		Classifier f = ovaLearner.getPredictionFunction();

		// classify examples and compute some statistics
		MulticlassClassificationEvaluator ev = new MulticlassClassificationEvaluator(
				trainingSet.getClassificationLabels());

		for (Example e : testSet.getExamples()) {
			ClassificationOutput p = f.predict(testSet.getNextExample());
			ev.addCount(e, p);
		}

		System.out.println("Accuracy: "
				+ ev.getPerformanceMeasure("accuracy"));
	} catch (Exception e1) {
		e1.printStackTrace();
	}
}
 
开发者ID:SAG-KeLP-Legacy,项目名称:kelp-examples,代码行数:66,代码来源:SequenceKernelExample.java

示例4: main

import it.uniroma2.sag.kelp.predictionfunction.classifier.Classifier; //导入方法依赖的package包/类
public static void main(String[] args) {
	try {
		// Read a dataset into a trainingSet variable
		SimpleDataset trainingSet = new SimpleDataset();
		trainingSet
				.populate("src/main/resources/iris_dataset/iris_train.klp");

		SimpleDataset testSet = new SimpleDataset();
		testSet.populate("src/main/resources/iris_dataset/iris_test.klp");

		// print some statistics
		System.out.println("Training set statistics");
		System.out.print("Examples number ");
		System.out.println(trainingSet.getNumberOfExamples());

		List<Label> classes = trainingSet.getClassificationLabels();

		for (Label l : classes) {
			System.out.println("Training Label " + l.toString() + " "
					+ trainingSet.getNumberOfPositiveExamples(l));
			System.out.println("Training Label " + l.toString() + " "
					+ trainingSet.getNumberOfNegativeExamples(l));

			System.out.println("Test Label " + l.toString() + " "
					+ testSet.getNumberOfPositiveExamples(l));
			System.out.println("Test Label " + l.toString() + " "
					+ testSet.getNumberOfNegativeExamples(l));
		}

		// Kernel for the first representation (0-index)
		Kernel linear = new LinearKernel("0");
		// Normalize the linear kernel
		NormalizationKernel normalizedKernel = new NormalizationKernel(
				linear);
		// instantiate an svmsolver
		BinaryCSvmClassification svmSolver = new BinaryCSvmClassification();
		svmSolver.setKernel(normalizedKernel);
		svmSolver.setCp(1);
		svmSolver.setCn(1);

		OneVsAllLearning ovaLearner = new OneVsAllLearning();
		ovaLearner.setBaseAlgorithm(svmSolver);
		ovaLearner.setLabels(classes);

		// learn and get the prediction function
		ovaLearner.learn(trainingSet);
		Classifier f = ovaLearner.getPredictionFunction();

		// classify examples and compute some statistics
		MulticlassClassificationEvaluator ev = new MulticlassClassificationEvaluator(
				trainingSet.getClassificationLabels());

		for (Example e : testSet.getExamples()) {
			ClassificationOutput p = f.predict(testSet.getNextExample());
			ev.addCount(e, p);
		}

		List<Label> twoLabels = new ArrayList<Label>();
		twoLabels.add(new StringLabel("iris-setosa"));
		twoLabels.add(new StringLabel("iris-virginica"));

		Object[] as = new Object[1];
		as[0] = twoLabels;

		System.out.println("Mean F1: "
				+ ev.getPerformanceMeasure("MeanF1"));
		System.out.println("Mean F1 For iris-setosa/iris-virginica: "
				+ ev.getPerformanceMeasure("MeanF1For", as));
		System.out.println("F1: "
				+ ev.getPerformanceMeasure("OverallF1"));
	} catch (Exception e1) {
		e1.printStackTrace();
	}
}
 
开发者ID:SAG-KeLP-Legacy,项目名称:kelp-examples,代码行数:75,代码来源:OneVsAllSVMExample.java


注:本文中的it.uniroma2.sag.kelp.predictionfunction.classifier.Classifier.predict方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。