本文整理汇总了Java中it.uniroma2.sag.kelp.kernel.cache.FixIndexKernelCache类的典型用法代码示例。如果您正苦于以下问题:Java FixIndexKernelCache类的具体用法?Java FixIndexKernelCache怎么用?Java FixIndexKernelCache使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
FixIndexKernelCache类属于it.uniroma2.sag.kelp.kernel.cache包,在下文中一共展示了FixIndexKernelCache类的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: learnModel
import it.uniroma2.sag.kelp.kernel.cache.FixIndexKernelCache; //导入依赖的package包/类
@BeforeClass
public static void learnModel() {
trainingSet = new SimpleDataset();
testSet = new SimpleDataset();
try {
trainingSet.populate("src/test/resources/svmTest/binary/binary_train.klp");
// Read a dataset into a test variable
testSet.populate("src/test/resources/svmTest/binary/binary_test.klp");
} catch (Exception e) {
e.printStackTrace();
Assert.assertTrue(false);
}
// define the positive class
StringLabel positiveClass = new StringLabel("+1");
// define the kernel
Kernel kernel = new LinearKernel("0");
// add a cache
kernel.setKernelCache(new FixIndexKernelCache(trainingSet
.getNumberOfExamples()));
// define the learning algorithm
BinaryNuSvmClassification learner = new BinaryNuSvmClassification(kernel,
positiveClass, 0.5f);
// learn and get the prediction function
learner.learn(trainingSet);
f = learner.getPredictionFunction();
}
示例2: learnModel
import it.uniroma2.sag.kelp.kernel.cache.FixIndexKernelCache; //导入依赖的package包/类
@BeforeClass
public static void learnModel() {
trainingSet = new SimpleDataset();
testSet = new SimpleDataset();
try {
trainingSet.populate("src/test/resources/svmTest/binary/binary_train.klp");
// Read a dataset into a test variable
testSet.populate("src/test/resources/svmTest/binary/binary_test.klp");
} catch (Exception e) {
e.printStackTrace();
Assert.assertTrue(false);
}
// define the positive class
StringLabel positiveClass = new StringLabel("+1");
// define the kernel
Kernel kernel = new LinearKernel("0");
// add a cache
kernel.setKernelCache(new FixIndexKernelCache(trainingSet
.getNumberOfExamples()));
// define the learning algorithm
BinaryCSvmClassification learner = new BinaryCSvmClassification(kernel,
positiveClass, 1, 1);
// learn and get the prediction function
learner.learn(trainingSet);
f = learner.getPredictionFunction();
}
示例3: learnModel
import it.uniroma2.sag.kelp.kernel.cache.FixIndexKernelCache; //导入依赖的package包/类
@BeforeClass
public static void learnModel() {
trainingSet = new SimpleDataset();
testSet = new SimpleDataset();
try {
trainingSet
.populate("src/test/resources/svmTest/regression/mg_scale.klp");
// Read a dataset into a test variable
testSet.populate("src/test/resources/svmTest/regression/mg_scale.klp");
} catch (Exception e) {
e.printStackTrace();
Assert.assertTrue(false);
}
// define the regression label
Label label = new StringLabel("r");
// define the kernel
Kernel kernel = new LinearKernel("0");
// add a cache
kernel.setKernelCache(new FixIndexKernelCache(trainingSet
.getNumberOfExamples()));
// define the learning algorithm
EpsilonSvmRegression learner = new EpsilonSvmRegression(kernel, label,
1, 0.1f);
// learn and get the prediction function
learner.learn(trainingSet);
p = learner.getPredictionFunction();
}
示例4: main
import it.uniroma2.sag.kelp.kernel.cache.FixIndexKernelCache; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
SimpleDataset trainingSet = new SimpleDataset();
trainingSet.populate("src/main/resources/mutag/mutag.txt");
StringLabel positiveClass = new StringLabel("1");
System.out.println("Training set statistics");
System.out.print("Examples number ");
System.out.println(trainingSet.getNumberOfExamples());
System.out.print("Positive examples ");
System.out.println(trainingSet
.getNumberOfPositiveExamples(positiveClass));
System.out.print("Negative examples ");
System.out.println(trainingSet
.getNumberOfNegativeExamples(positiveClass));
WLSubtreeMapper m = new WLSubtreeMapper(GRAPH_REPRESENTATION_NAME, VECTORIAL_LINEARIZATION_NAME, 4);
trainingSet.manipulate(m);
StringLabel targetLabel = new StringLabel("1");
BinaryClassificationEvaluator evaluator = new BinaryClassificationEvaluator(targetLabel);
LinearKernelCombination comb = new LinearKernelCombination();
LinearKernel linear = new LinearKernel(VECTORIAL_LINEARIZATION_NAME);
comb.addKernel(1, linear);
ShortestPathKernel spk = new ShortestPathKernel(GRAPH_REPRESENTATION_NAME);
comb.addKernel(1, spk);
comb.setKernelCache(new FixIndexKernelCache(trainingSet.getNumberOfExamples()));
BinaryCSvmClassification svmSolver = new BinaryCSvmClassification(comb, targetLabel, 1, 1);
float meanAcc = 0;
int nFold = 10;
List<BinaryClassificationEvaluator> evalutators = ExperimentUtils.nFoldCrossValidation(nFold, svmSolver, trainingSet, evaluator);
for(int i=0;i<nFold;i++){
float accuracy = evalutators.get(i).getPerformanceMeasure("accuracy");
System.out.println("fold " + (i+1) + " accuracy: " + accuracy);
meanAcc+=accuracy;
}
meanAcc/=(float)10;
System.out.println("MEAN ACC: " + meanAcc);
}
示例5: main
import it.uniroma2.sag.kelp.kernel.cache.FixIndexKernelCache; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
// The epsilon in loss function of the regressor
float pReg = 0.1f;
// The regularization parameter of the regressor
float c = 2f;
// The gamma parameter in the RBF kernel
float gamma = 1f;
// The label indicating the value considered by the regressor
Label label = new StringLabel("r");
// Load the dataset
SimpleDataset dataset = new SimpleDataset();
dataset.populate("src/main/resources/sv_regression_test/mg_scale.klp");
// Split the dataset in train and test datasets
dataset.shuffleExamples(new Random(0));
SimpleDataset[] split = dataset.split(0.7f);
SimpleDataset trainDataset = split[0];
SimpleDataset testDataset = split[1];
// Kernel for the first representation (0-index)
Kernel linear = new LinearKernel("0");
// Applying the RBF kernel
Kernel rbf = new RbfKernel(gamma, linear);
// Applying a cache
FixIndexKernelCache kernelCache = new FixIndexKernelCache(
trainDataset.getNumberOfExamples());
rbf.setKernelCache(kernelCache);
// instantiate the regressor
EpsilonSvmRegression regression = new EpsilonSvmRegression(rbf, label,
c, pReg);
// learn
regression.learn(trainDataset);
// get the prediction function
RegressionFunction regressor = regression.getPredictionFunction();
// initializing the performance evaluator
RegressorEvaluator evaluator = new RegressorEvaluator(
trainDataset.getRegressionProperties());
// For each example from the test set
for (Example e : testDataset.getExamples()) {
// Predict the value
Prediction prediction = regressor.predict(e);
// Print the original and the predicted values
System.out.println("real value: " + e.getRegressionValue(label)
+ "\t-\tpredicted value: " + prediction.getScore(label));
// Update the evaluator
evaluator.addCount(e, prediction);
}
// Get the Mean Squared Error for the targeted label
float measSquareError = evaluator.getMeanSquaredError(label);
System.out.println("\nMean Squared Error:\t" + measSquareError);
}
示例6: main
import it.uniroma2.sag.kelp.kernel.cache.FixIndexKernelCache; //导入依赖的package包/类
public static void main(String[] args) {
try {
// Read a dataset into a trainingSet variable
SimpleDataset trainingSet = new SimpleDataset();
trainingSet
.populate("src/main/resources/sequenceKernelExample/sequenceTrain.txt");
SimpleDataset testSet = new SimpleDataset();
testSet.populate("src/main/resources/sequenceKernelExample/sequenceTest.txt");
// print some statistics
System.out.println("Training set statistics");
System.out.print("Examples number ");
System.out.println(trainingSet.getNumberOfExamples());
List<Label> classes = trainingSet.getClassificationLabels();
for (Label l : classes) {
System.out.println("Training Label " + l.toString() + " "
+ trainingSet.getNumberOfPositiveExamples(l));
System.out.println("Training Label " + l.toString() + " "
+ trainingSet.getNumberOfNegativeExamples(l));
System.out.println("Test Label " + l.toString() + " "
+ testSet.getNumberOfPositiveExamples(l));
System.out.println("Test Label " + l.toString() + " "
+ testSet.getNumberOfNegativeExamples(l));
}
// Kernel for the first representation (0-index)
Kernel kernel = new SequenceKernel("SEQUENCE", 2, 1);
// Normalize the linear kernel
NormalizationKernel normalizedKernel = new NormalizationKernel(
kernel);
kernel.setSquaredNormCache(new FixIndexSquaredNormCache(trainingSet.getNumberOfExamples()));
kernel.setKernelCache(new FixIndexKernelCache(trainingSet.getNumberOfExamples()));
// instantiate an svmsolver
BinaryCSvmClassification svmSolver = new BinaryCSvmClassification();
svmSolver.setKernel(normalizedKernel);
svmSolver.setCp(1);
svmSolver.setCn(1);
OneVsAllLearning ovaLearner = new OneVsAllLearning();
ovaLearner.setBaseAlgorithm(svmSolver);
ovaLearner.setLabels(classes);
// learn and get the prediction function
ovaLearner.learn(trainingSet);
Classifier f = ovaLearner.getPredictionFunction();
// classify examples and compute some statistics
MulticlassClassificationEvaluator ev = new MulticlassClassificationEvaluator(
trainingSet.getClassificationLabels());
for (Example e : testSet.getExamples()) {
ClassificationOutput p = f.predict(testSet.getNextExample());
ev.addCount(e, p);
}
System.out.println("Accuracy: "
+ ev.getPerformanceMeasure("accuracy"));
} catch (Exception e1) {
e1.printStackTrace();
}
}