本文整理汇总了C#中Accord.MachineLearning.VectorMachines.KernelSupportVectorMachine.Compute方法的典型用法代码示例。如果您正苦于以下问题:C# KernelSupportVectorMachine.Compute方法的具体用法?C# KernelSupportVectorMachine.Compute怎么用?C# KernelSupportVectorMachine.Compute使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Accord.MachineLearning.VectorMachines.KernelSupportVectorMachine
的用法示例。
在下文中一共展示了KernelSupportVectorMachine.Compute方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: TrainTest
public void TrainTest()
{
Accord.Math.Tools.SetupGenerator(0);
// Example regression problem. Suppose we are trying
// to model the following equation: f(x, y) = 2x + y
double[][] inputs = // (x, y)
{
new double[] { 0, 1 }, // 2*0 + 1 = 1
new double[] { 4, 3 }, // 2*4 + 3 = 11
new double[] { 8, -8 }, // 2*8 - 8 = 8
new double[] { 2, 2 }, // 2*2 + 2 = 6
new double[] { 6, 1 }, // 2*6 + 1 = 13
new double[] { 5, 4 }, // 2*5 + 4 = 14
new double[] { 9, 1 }, // 2*9 + 1 = 19
new double[] { 1, 6 }, // 2*1 + 6 = 8
};
double[] outputs = // f(x, y)
{
1, 11, 8, 6, 13, 14, 19, 8
};
// Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree
var machine = new KernelSupportVectorMachine(new Polynomial(2), inputs: 2);
// Create the sequential minimal optimization teacher
var learn = new SequentialMinimalOptimizationRegression(machine, inputs, outputs)
{
Complexity = 100
};
// Run the learning algorithm
double error = learn.Run();
// Compute the answer for one particular example
double fxy = machine.Compute(inputs[0]); // 1.0003849827673186
// Check for correct answers
double[] answers = new double[inputs.Length];
for (int i = 0; i < answers.Length; i++)
answers[i] = machine.Compute(inputs[i]);
Assert.AreEqual(1.0, fxy, 1e-2);
for (int i = 0; i < outputs.Length; i++)
Assert.AreEqual(outputs[i], answers[i], 1e-2);
}
示例2: LearnTest
public void LearnTest()
{
double[][] inputs =
{
new double[] { -1, -1 },
new double[] { -1, 1 },
new double[] { 1, -1 },
new double[] { 1, 1 }
};
int[] xor =
{
-1,
1,
1,
-1
};
// Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree
KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Polynomial(2), inputs[0].Length);
// Create the Least Squares Support Vector Machine teacher
LeastSquaresLearning learn = new LeastSquaresLearning(machine, inputs, xor);
learn.Complexity = 10;
// Run the learning algorithm
learn.Run();
int[] output = inputs.Apply(p => Math.Sign(machine.Compute(p)));
for (int i = 0; i < output.Length; i++)
Assert.AreEqual(System.Math.Sign(xor[i]), System.Math.Sign(output[i]));
}
示例3: LearnTest
public void LearnTest()
{
double[][] inputs =
{
new double[] { -1, -1 },
new double[] { -1, 1 },
new double[] { 1, -1 },
new double[] { 1, 1 }
};
int[] xor =
{
-1,
1,
1,
-1
};
// Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree
KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Polynomial(2), inputs[0].Length);
// Create the sequential minimal optimization teacher
SequentialMinimalOptimization learn = new SequentialMinimalOptimization(machine, inputs, xor);
// Run the learning algorithm
learn.Run();
int[] output = inputs.Apply(p => Math.Sign(machine.Compute(p)));
for (int i = 0; i < output.Length; i++)
Assert.AreEqual(System.Math.Sign(xor[i]), System.Math.Sign(output[i]));
}
示例4: ComputeTest
public void ComputeTest()
{
// Example AND problem
double[][] inputs =
{
new double[] { 0, 0 }, // 0 and 0: 0 (label -1)
new double[] { 0, 1 }, // 0 and 1: 0 (label -1)
new double[] { 1, 0 }, // 1 and 0: 0 (label -1)
new double[] { 1, 1 } // 1 and 1: 1 (label +1)
};
// Dichotomy SVM outputs should be given as [-1;+1]
int[] labels =
{
// 0, 0, 0, 1
-1, -1, -1, 1
};
// Create a Support Vector Machine for the given inputs
KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Gaussian(0.1), inputs[0].Length);
// Instantiate a new learning algorithm for SVMs
var smo = new SequentialMinimalOptimization(machine, inputs, labels);
// Set up the learning algorithm
smo.Complexity = 1.0;
// Run
double error = smo.Run();
Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[0])));
Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[1])));
Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[2])));
Assert.AreEqual(+1, Math.Sign(machine.Compute(inputs[3])));
Assert.AreEqual(error, 0);
Assert.AreEqual(-0.6640625, machine.Threshold);
Assert.AreEqual(1, machine.Weights[0]);
Assert.AreEqual(-0.34375, machine.Weights[1]);
Assert.AreEqual(-0.328125, machine.Weights[2]);
Assert.AreEqual(-0.328125, machine.Weights[3]);
}
示例5: PrintAccuracy
static void PrintAccuracy(string colorName, KernelSupportVectorMachine svm, HSL[] positives, HSL[] negatives)
{
int numberCorrect = 0;
for (int c = 0; c < positives.Length; c++)
{
double result = svm.Compute(HSLToDouble(positives[c]));
if (Math.Sign(result) == 1)
{
numberCorrect++;
}
}
for (int c = 0; c < negatives.Length; c++)
{
double result = svm.Compute(HSLToDouble(negatives[c]));
if (Math.Sign(result) == -1)
{
numberCorrect++;
}
}
Console.WriteLine(colorName + " accuracy is " +
(numberCorrect / (positives.Length + negatives.Length * 1.0)).ToString());
}
示例6: FixedWeightsTest
public void FixedWeightsTest()
{
var dataset = KernelSupportVectorMachineTest.training;
var inputs = dataset.Submatrix(null, 0, 3);
var labels = Tools.Scale(0, 1, -1, 1, dataset.GetColumn(4)).ToInt32();
KernelSupportVectorMachine machine = new KernelSupportVectorMachine(
Gaussian.Estimate(inputs), inputs[0].Length);
var smo = new SequentialMinimalOptimization(machine, inputs, labels);
smo.Complexity = 10;
double error = smo.Run();
Assert.AreEqual(0.19047619047619047, error);
Assert.AreEqual(265.78327637381551, (machine.Kernel as Gaussian).Sigma);
Assert.AreEqual(29, machine.SupportVectors.Length);
double[] expectedWeights =
{
1.65717694716503, 1.20005456611466, -5.70824245415995, 10,
10, -2.38755497916487, 10, -8.15723436363058, 10, -10, 10,
10, -0.188634936781317, -5.4354281009458, -8.48341139483265,
-5.91105702760141, -5.71489190049223, 10, -2.37289205235858,
-3.33031262413522, -1.97545116517677, 10, -10, -9.563186799279,
-3.917941544845, -0.532584110773336, 4.81951847548326, 0.343668292727091,
-4.34159482731336
};
Assert.IsTrue(expectedWeights.IsEqual(machine.Weights, 1e-6));
int[] actual = new int[labels.Length];
for (int i = 0; i < actual.Length; i++)
actual[i] = Math.Sign(machine.Compute(inputs[i]));
ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
Assert.AreEqual(8, matrix.FalseNegatives);
Assert.AreEqual(0, matrix.FalsePositives);
Assert.AreEqual(4, matrix.TruePositives);
Assert.AreEqual(30, matrix.TrueNegatives);
Assert.AreEqual(1 / 3.0, matrix.Sensitivity);
Assert.AreEqual(1, matrix.Specificity);
Assert.AreEqual(0.5, matrix.FScore);
Assert.AreEqual(0.5129891760425771, matrix.MatthewsCorrelationCoefficient);
}
示例7: WeightRatioTest
public void WeightRatioTest()
{
var dataset = KernelSupportVectorMachineTest.training;
var inputs = dataset.Submatrix(null, 0, 3);
var labels = Tools.Scale(0, 1, -1, 1, dataset.GetColumn(4)).ToInt32();
Gaussian kernel = Gaussian.Estimate(inputs);
{
var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
var smo = new SequentialMinimalOptimization(machine, inputs, labels);
smo.Complexity = 1.0;
smo.WeightRatio = 10;
double error = smo.Run();
Assert.AreEqual(1.0, smo.PositiveWeight);
Assert.AreEqual(0.1, smo.NegativeWeight);
Assert.AreEqual(0.7142857142857143, error);
Assert.AreEqual(265.78327637381551, (machine.Kernel as Gaussian).Sigma);
Assert.AreEqual(39, machine.SupportVectors.Length);
int[] actual = new int[labels.Length];
for (int i = 0; i < actual.Length; i++)
actual[i] = Math.Sign(machine.Compute(inputs[i]));
ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
Assert.AreEqual(12, matrix.TruePositives); // has more importance
Assert.AreEqual(0, matrix.FalseNegatives); // has more importance
Assert.AreEqual(30, matrix.FalsePositives);
Assert.AreEqual(0, matrix.TrueNegatives);
Assert.AreEqual(1.0, matrix.Sensitivity);
Assert.AreEqual(0.0, matrix.Specificity);
Assert.AreEqual(0.44444444444444448, matrix.FScore);
Assert.AreEqual(0.0, matrix.MatthewsCorrelationCoefficient);
}
{
var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
var smo = new SequentialMinimalOptimization(machine, inputs, labels);
smo.Complexity = 1.0;
smo.WeightRatio = 0.1;
double error = smo.Run();
Assert.AreEqual(0.1, smo.PositiveWeight);
Assert.AreEqual(1.0, smo.NegativeWeight);
Assert.AreEqual(0.21428571428571427, error);
Assert.AreEqual(265.78327637381551, (machine.Kernel as Gaussian).Sigma);
Assert.AreEqual(18, machine.SupportVectors.Length);
int[] actual = new int[labels.Length];
for (int i = 0; i < actual.Length; i++)
actual[i] = Math.Sign(machine.Compute(inputs[i]));
ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
Assert.AreEqual(8, matrix.FalseNegatives);
Assert.AreEqual(1, matrix.FalsePositives); // has more importance
Assert.AreEqual(4, matrix.TruePositives);
Assert.AreEqual(29, matrix.TrueNegatives); // has more importance
Assert.AreEqual(0.33333333333333331, matrix.Sensitivity);
Assert.AreEqual(0.96666666666666667, matrix.Specificity);
Assert.AreEqual(0.47058823529411764, matrix.FScore);
Assert.AreEqual(0.41849149947774944, matrix.MatthewsCorrelationCoefficient);
}
}
示例8: UseClassProportionsTest
public void UseClassProportionsTest()
{
var dataset = KernelSupportVectorMachineTest.training;
var inputs = dataset.Submatrix(null, 0, 3);
var labels = Tools.Scale(0, 1, -1, 1, dataset.GetColumn(4)).ToInt32();
Gaussian kernel = Gaussian.Estimate(inputs);
var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
var smo = new SequentialMinimalOptimization(machine, inputs, labels);
smo.Complexity = 1.0;
smo.UseClassProportions = true;
double error = smo.Run();
Assert.AreEqual(1, smo.Complexity);
Assert.AreEqual(0.4, smo.PositiveWeight);
Assert.AreEqual(1.0, smo.NegativeWeight);
Assert.AreEqual(0.4, smo.WeightRatio, 1e-10);
Assert.AreEqual(0.2857142857142857, error);
Assert.AreEqual(265.78327637381551, (machine.Kernel as Gaussian).Sigma);
Assert.AreEqual(26, machine.SupportVectors.Length);
int[] actual = new int[labels.Length];
for (int i = 0; i < actual.Length; i++)
actual[i] = Math.Sign(machine.Compute(inputs[i]));
ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
Assert.AreEqual(12, matrix.FalseNegatives);
Assert.AreEqual(0, matrix.FalsePositives);
Assert.AreEqual(0, matrix.TruePositives);
Assert.AreEqual(30, matrix.TrueNegatives);
}
示例9: testWeights
private static void testWeights(double[][] inputs, int[] labels, IKernel kernel)
{
{
var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
var smo = new SequentialMinimalOptimization(machine, inputs, labels);
smo.PositiveWeight = 100;
smo.NegativeWeight = 1;
double error = smo.Run();
int[] actual = new int[labels.Length];
for (int i = 0; i < actual.Length; i++)
actual[i] = Math.Sign(machine.Compute(inputs[i]));
ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
Assert.AreEqual(50, matrix.TruePositives); // has more importance
Assert.AreEqual(0, matrix.FalseNegatives); // has more importance
}
{
var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
var smo = new SequentialMinimalOptimization(machine, inputs, labels);
smo.PositiveWeight = 1;
smo.NegativeWeight = 100;
double error = smo.Run();
int[] actual = new int[labels.Length];
for (int i = 0; i < actual.Length; i++)
actual[i] = Math.Sign(machine.Compute(inputs[i]));
var matrix = new ConfusionMatrix(actual, labels);
Assert.AreEqual(50, matrix.TrueNegatives); // has more importance
Assert.AreEqual(0, matrix.FalsePositives); // has more importance
}
}
示例10: ComputeTest6
public void ComputeTest6()
{
double[][] inputs = training.Submatrix(null, 0, 3);
int[] labels = Tools.Scale(0, 1, -1, 1, training.GetColumn(4)).ToInt32();
Gaussian kernel = Gaussian.Estimate(inputs);
{
var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
var smo = new SequentialMinimalOptimization(machine, inputs, labels);
smo.Complexity = 1.0;
smo.WeightRatio = 30 / 12.0;
double error = smo.Run();
Assert.AreEqual(1, smo.PositiveWeight);
Assert.AreEqual(0.4, smo.NegativeWeight);
Assert.AreEqual(0.21428571428571427, error);
Assert.AreEqual(265.78327637381551, (machine.Kernel as Gaussian).Sigma);
Assert.AreEqual(34, machine.SupportVectors.Length);
int[] actual = new int[labels.Length];
for (int i = 0; i < actual.Length; i++)
actual[i] = Math.Sign(machine.Compute(inputs[i]));
ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
Assert.AreEqual(9, matrix.FalseNegatives);
Assert.AreEqual(0, matrix.FalsePositives);
Assert.AreEqual(3, matrix.TruePositives);
Assert.AreEqual(30, matrix.TrueNegatives);
Assert.AreEqual(0.25, matrix.Sensitivity);
Assert.AreEqual(1.0, matrix.Specificity);
Assert.AreEqual(0.4, matrix.FScore);
Assert.AreEqual(0.4385290096535146, matrix.MatthewsCorrelationCoefficient);
}
{
var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
var smo = new SequentialMinimalOptimization(machine, inputs, labels);
smo.Complexity = 1.0;
smo.WeightRatio = 12 / 30.0;
double error = smo.Run();
Assert.AreEqual(0.4, smo.PositiveWeight);
Assert.AreEqual(1.0, smo.NegativeWeight);
Assert.AreEqual(0.38095238095238093, error);
Assert.AreEqual(265.78327637381551, (machine.Kernel as Gaussian).Sigma);
Assert.AreEqual(32, machine.SupportVectors.Length);
int[] actual = new int[labels.Length];
for (int i = 0; i < actual.Length; i++)
actual[i] = Math.Sign(machine.Compute(inputs[i]));
ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
Assert.AreEqual(7, matrix.FalseNegatives);
Assert.AreEqual(9, matrix.FalsePositives);
Assert.AreEqual(5, matrix.TruePositives);
Assert.AreEqual(21, matrix.TrueNegatives);
Assert.AreEqual(0.41666666666666669, matrix.Sensitivity);
Assert.AreEqual(0.7, matrix.Specificity);
Assert.AreEqual(0.38461538461538458, matrix.FScore);
Assert.AreEqual(0.11180339887498948, matrix.MatthewsCorrelationCoefficient);
}
}
示例11: DynamicalTimeWarpingConstructorTest2
public void DynamicalTimeWarpingConstructorTest2()
{
// Declare some testing data
double[][] inputs =
{
// Class -1
new double[] { 0,1,1,0 },
new double[] { 0,0,1,0 },
new double[] { 0,1,1,1,0 },
new double[] { 0,1,0 },
// Class +1
new double[] { 1,0,0,1 },
new double[] { 1,1,0,1 },
new double[] { 1,0,0,0,1 },
new double[] { 1,0,1 },
new double[] { 1,0,0,0,1,1 }
};
int[] outputs =
{
-1,-1,-1,-1, // First four sequences are of class -1
1, 1, 1, 1, 1 // Last five sequences are of class +1
};
// Set the parameters of the kernel
double alpha = 1.0;
int degree = 1;
int innerVectorLength = 1;
// Create the kernel. Note that the input vector will be given out automatically
DynamicTimeWarping target = new DynamicTimeWarping(innerVectorLength, alpha, degree);
// When using variable-length kernels, specify 0 as the input length.
KernelSupportVectorMachine svm = new KernelSupportVectorMachine(target, 0);
// Create the Sequential Minimal Optimization as usual
SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, inputs, outputs);
smo.Complexity = 1.5;
double error = smo.Run();
// Check if the model has learnt the sequences correctly.
for (int i = 0; i < inputs.Length; i++)
{
int expected = outputs[i];
int actual = System.Math.Sign(svm.Compute(inputs[i]));
Assert.AreEqual(expected, actual);
}
// Testing new sequences
Assert.AreEqual(-1,System.Math.Sign(svm.Compute(new double[] { 0, 1, 1, 0, 0 })));
Assert.AreEqual(+1,System.Math.Sign(svm.Compute(new double[] { 1, 1, 0, 0, 1, 1 })));
}
示例12: ComputeTest5
public void ComputeTest5()
{
var dataset = yinyang;
double[][] inputs = dataset.Submatrix(null, 0, 1).ToArray();
int[] labels = dataset.GetColumn(2).ToInt32();
{
Linear kernel = new Linear();
var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
var smo = new SequentialMinimalOptimization(machine, inputs, labels);
smo.Complexity = 1.0;
double error = smo.Run();
Assert.AreEqual(1.0, smo.Complexity);
Assert.AreEqual(1.0, smo.WeightRatio);
Assert.AreEqual(1.0, smo.NegativeWeight);
Assert.AreEqual(1.0, smo.PositiveWeight);
Assert.AreEqual(0.14, error);
Assert.AreEqual(30, machine.SupportVectors.Length);
double[] actualWeights = machine.Weights;
double[] expectedWeights = { -1, -1, 1, -1, -1, 1, 1, -1, 1, -1, 1, 1, -1, 0.337065120144639, -1, 1, -0.337065120144639, -1, 1, 1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1 };
Assert.IsTrue(expectedWeights.IsEqual(actualWeights, 1e-10));
int[] actual = new int[labels.Length];
for (int i = 0; i < actual.Length; i++)
actual[i] = Math.Sign(machine.Compute(inputs[i]));
ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
Assert.AreEqual(7, matrix.FalseNegatives);
Assert.AreEqual(7, matrix.FalsePositives);
Assert.AreEqual(43, matrix.TruePositives);
Assert.AreEqual(43, matrix.TrueNegatives);
}
{
Linear kernel = new Linear();
var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
var smo = new SequentialMinimalOptimization(machine, inputs, labels);
smo.Complexity = 1.0;
smo.PositiveWeight = 0.3;
smo.NegativeWeight = 1.0;
double error = smo.Run();
Assert.AreEqual(1.0, smo.Complexity);
Assert.AreEqual(0.3 / 1.0, smo.WeightRatio);
Assert.AreEqual(1.0, smo.NegativeWeight);
Assert.AreEqual(0.3, smo.PositiveWeight);
Assert.AreEqual(0.21, error);
Assert.AreEqual(24, machine.SupportVectors.Length);
double[] actualWeights = machine.Weights;
//string str = actualWeights.ToString(Accord.Math.Formats.CSharpArrayFormatProvider.InvariantCulture);
double[] expectedWeights = { -0.771026323762095, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -0.928973676237905, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 };
Assert.IsTrue(expectedWeights.IsEqual(actualWeights, 1e-10));
int[] actual = new int[labels.Length];
for (int i = 0; i < actual.Length; i++)
actual[i] = (int)machine.Compute(inputs[i]);
ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
Assert.AreEqual(50, matrix.FalseNegatives);
Assert.AreEqual(0, matrix.FalsePositives);
Assert.AreEqual(0, matrix.TruePositives);
Assert.AreEqual(50, matrix.TrueNegatives);
}
{
Linear kernel = new Linear();
var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
var smo = new SequentialMinimalOptimization(machine, inputs, labels);
smo.Complexity = 1.0;
smo.PositiveWeight = 1.0;
smo.NegativeWeight = 0.3;
double error = smo.Run();
Assert.AreEqual(1.0, smo.Complexity);
Assert.AreEqual(1.0 / 0.3, smo.WeightRatio);
Assert.AreEqual(0.3, smo.NegativeWeight);
Assert.AreEqual(1.0, smo.PositiveWeight);
Assert.AreEqual(0.15, error);
Assert.AreEqual(19, machine.SupportVectors.Length);
double[] actualWeights = machine.Weights;
double[] expectedWeights = new double[] { 1, 1, -0.3, 1, -0.3, 1, 1, -0.3, 1, 1, 1, 1, 1, 1, 1, 1, 0.129080057278249, 1, 0.737797469918795 };
Assert.IsTrue(expectedWeights.IsEqual(actualWeights, 1e-10));
int[] actual = new int[labels.Length];
for (int i = 0; i < actual.Length; i++)
actual[i] = Math.Sign(machine.Compute(inputs[i]));
//.........这里部分代码省略.........
示例13: TransformTest
public void TransformTest()
{
var inputs = yinyang.Submatrix(null, 0, 1).ToArray();
var labels = yinyang.GetColumn(2).ToInt32();
ConfusionMatrix actual, expected;
SequentialMinimalOptimization a, b;
var kernel = new Polynomial(2, 0);
{
var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
a = new SequentialMinimalOptimization(machine, inputs, labels);
a.UseComplexityHeuristic = true;
a.Run();
int[] values = new int[labels.Length];
for (int i = 0; i < values.Length; i++)
values[i] = Math.Sign(machine.Compute(inputs[i]));
expected = new ConfusionMatrix(values, labels);
}
{
var projection = inputs.Apply(kernel.Transform);
var machine = new SupportVectorMachine(projection[0].Length);
b = new SequentialMinimalOptimization(machine, projection, labels);
b.UseComplexityHeuristic = true;
b.Run();
int[] values = new int[labels.Length];
for (int i = 0; i < values.Length; i++)
values[i] = Math.Sign(machine.Compute(projection[i]));
actual = new ConfusionMatrix(values, labels);
}
Assert.AreEqual(a.Complexity, b.Complexity, 1e-15);
Assert.AreEqual(expected.TrueNegatives, actual.TrueNegatives);
Assert.AreEqual(expected.TruePositives, actual.TruePositives);
Assert.AreEqual(expected.FalseNegatives, actual.FalseNegatives);
Assert.AreEqual(expected.FalsePositives, actual.FalsePositives);
}
示例14: RunTest1
public void RunTest1()
{
double[][] inputs =
{
new double[] { -1, -1 },
new double[] { -1, 1 },
new double[] { 1, -1 },
new double[] { 1, 1 }
};
int[] outputs =
{
-1,
1,
1,
-1
};
KernelSupportVectorMachine svm = new KernelSupportVectorMachine(new Gaussian(3.6), 2);
SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, inputs, outputs);
double error1 = smo.Run();
Assert.AreEqual(0, error1);
double[] distances = new double[outputs.Length];
for (int i = 0; i < outputs.Length; i++)
{
int y = svm.Compute(inputs[i], out distances[i]);
Assert.AreEqual(outputs[i], y);
}
ProbabilisticOutputCalibration target = new ProbabilisticOutputCalibration(svm, inputs, outputs);
double ll0 = target.LogLikelihood(inputs, outputs);
double ll1 = target.Run();
double ll2 = target.LogLikelihood(inputs, outputs);
Assert.AreEqual(3.4256203116918824, ll1);
Assert.AreEqual(ll1, ll2);
Assert.IsTrue(ll1 > ll0);
double[] probs = new double[outputs.Length];
for (int i = 0; i < outputs.Length; i++)
{
int y = svm.Compute(inputs[i], out probs[i]);
Assert.AreEqual(outputs[i], y);
}
Assert.AreEqual(0.25, probs[0], 1e-5);
Assert.AreEqual(0.75, probs[1], 1e-5);
Assert.AreEqual(0.75, probs[2], 1e-5);
Assert.AreEqual(0.25, probs[3], 1e-5);
foreach (var p in probs)
Assert.IsFalse(Double.IsNaN(p));
}
示例15: RunTest3
public void RunTest3()
{
// Example XOR problem
double[][] inputs =
{
new double[] { 0, 0 }, // 0 xor 0: 1 (label +1)
new double[] { 0, 1 }, // 0 xor 1: 0 (label -1)
new double[] { 1, 0 }, // 1 xor 0: 0 (label -1)
new double[] { 1, 1 } // 1 xor 1: 1 (label +1)
};
// Dichotomy SVM outputs should be given as [-1;+1]
int[] labels =
{
1, -1, -1, 1
};
// Create a Kernel Support Vector Machine for the given inputs
KernelSupportVectorMachine svm = new KernelSupportVectorMachine(new Gaussian(0.1), inputs[0].Length);
// Instantiate a new learning algorithm for SVMs
SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, inputs, labels);
// Set up the learning algorithm
smo.Complexity = 1.0;
// Run the learning algorithm
double error = smo.Run();
// Instantiate the probabilistic learning calibration
ProbabilisticOutputCalibration calibration = new ProbabilisticOutputCalibration(svm, inputs, labels);
// Run the calibration algorithm
double loglikelihood = calibration.Run();
// Compute the decision output for one of the input vectors,
// while also retrieving the probability of the answer
double probability;
int decision = svm.Compute(inputs[0], out probability);
// At this point, decision is +1 with a probability of 75%
Assert.AreEqual(1, decision);
Assert.AreEqual(0.74999975815069375, probability);
}