本文整理汇总了C#中System.Matrix.GetRows方法的典型用法代码示例。如果您正苦于以下问题:C# Matrix.GetRows方法的具体用法?C# Matrix.GetRows怎么用?C# Matrix.GetRows使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类System.Matrix
的用法示例。
在下文中一共展示了Matrix.GetRows方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: TestConcate
public void TestConcate()
{
Matrix<float> mat = new Matrix<float>(30, 40);
mat.SetRandUniform(new MCvScalar(0), new MCvScalar(255));
Matrix<float> m1 = mat.GetSubRect(new Rectangle(0, 0, mat.Cols, 20));
Matrix<float> m2 = mat.GetSubRect(new Rectangle(0, 20, mat.Cols, mat.Rows - 20));
Matrix<float> mat2 = m1.ConcateVertical(m2);
Assert.IsTrue(mat.Equals(mat2));
Matrix<float> m3 = mat.GetSubRect(new Rectangle(0, 0, 10, mat.Rows));
Matrix<float> m4 = mat.GetSubRect(new Rectangle(10, 0, mat.Cols - 10, mat.Rows));
Matrix<float> mat3 = m3.ConcateHorizontal(m4);
Assert.IsTrue(mat.Equals(mat3));
Matrix<float> m5 = mat.GetRows(0, 5, 1);
Matrix<float> m6 = mat.GetRows(5, 6, 1);
Matrix<float> m7 = mat.GetRows(6, mat.Rows, 1);
Assert.IsTrue(mat.RemoveRows(5, 6).Equals(m5.ConcateVertical(m7)));
Assert.IsTrue(mat.RemoveRows(0, 1).Equals(mat.GetRows(1, mat.Rows, 1)));
Assert.IsTrue(mat.RemoveRows(mat.Rows - 1, mat.Rows).Equals(mat.GetRows(0, mat.Rows - 1, 1)));
}
示例2: TestSVM
public void TestSVM()
{
int trainSampleCount = 150;
int sigma = 60;
#region Generate the training data and classes
Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1);
Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);
Matrix<float> sample = new Matrix<float>(1, 2);
Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1);
trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));
trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));
Matrix<float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(sigma));
Matrix<float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));
trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));
Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount / 3, 1);
trainClasses1.SetValue(1);
Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
trainClasses2.SetValue(2);
Matrix<float> trainClasses3 = trainClasses.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
trainClasses3.SetValue(3);
#endregion
using (SVM model = new SVM())
{
SVMParams p = new SVMParams();
p.KernelType = Emgu.CV.ML.MlEnum.SVM_KERNEL_TYPE.LINEAR;
p.SVMType = Emgu.CV.ML.MlEnum.SVM_TYPE.C_SVC;
p.C = 1;
p.TermCrit = new MCvTermCriteria(100, 0.00001);
//bool trained = model.Train(trainData, trainClasses, null, null, p);
bool trained = model.TrainAuto(trainData, trainClasses, null, null, p.MCvSVMParams, 5);
model.Save("svmModel.xml");
for (int i = 0; i < img.Height; i++)
{
for (int j = 0; j < img.Width; j++)
{
sample.Data[0, 0] = j;
sample.Data[0, 1] = i;
float response = model.Predict(sample);
img[i, j] =
response == 1 ? new Bgr(90, 0, 0) :
response == 2 ? new Bgr(0, 90, 0) :
new Bgr(0, 0, 90);
}
}
int c = model.GetSupportVectorCount();
for (int i = 0; i < c; i++)
{
float[] v = model.GetSupportVector(i);
PointF p1 = new PointF(v[0], v[1]);
img.Draw(new CircleF(p1, 4), new Bgr(128, 128, 128), 2);
}
}
// display the original training samples
for (int i = 0; i < (trainSampleCount / 3); i++)
{
PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
PointF p3 = new PointF(trainData3[i, 0], trainData3[i, 1]);
img.Draw(new CircleF(p3, 2.0f), new Bgr(100, 100, 255), -1);
}
}
示例3: TestRTreesLetterRecognition
public void TestRTreesLetterRecognition()
{
Matrix<float> data, response;
ReadLetterRecognitionData(out data, out response);
int trainingSampleCount = (int)(data.Rows * 0.8);
Matrix<Byte> varType = new Matrix<byte>(data.Cols + 1, 1);
varType.SetValue((byte)MlEnum.VAR_TYPE.NUMERICAL); //the data is numerical
varType[data.Cols, 0] = (byte) MlEnum.VAR_TYPE.CATEGORICAL; //the response is catagorical
Matrix<byte> sampleIdx = new Matrix<byte>(data.Rows, 1);
using (Matrix<byte> sampleRows = sampleIdx.GetRows(0, trainingSampleCount, 1))
sampleRows.SetValue(255);
MCvRTParams param = new MCvRTParams();
param.maxDepth = 10;
param.minSampleCount = 10;
param.regressionAccuracy = 0.0f;
param.useSurrogates = false;
param.maxCategories = 15;
param.priors = IntPtr.Zero;
param.calcVarImportance = true;
param.nactiveVars = 4;
param.termCrit = new MCvTermCriteria(100, 0.01f);
param.termCrit.type = Emgu.CV.CvEnum.TERMCRIT.CV_TERMCRIT_ITER;
using (RTrees forest = new RTrees())
{
bool success = forest.Train(
data,
Emgu.CV.ML.MlEnum.DATA_LAYOUT_TYPE.ROW_SAMPLE,
response,
null,
sampleIdx,
varType,
null,
param);
if (!success) return;
double trainDataCorrectRatio = 0;
double testDataCorrectRatio = 0;
for (int i = 0; i < data.Rows; i++)
{
using (Matrix<float> sample = data.GetRow(i))
{
double r = forest.Predict(sample, null);
r = Math.Abs(r - response[i, 0]);
if (r < 1.0e-5)
{
if (i < trainingSampleCount)
trainDataCorrectRatio++;
else
testDataCorrectRatio++;
}
}
}
trainDataCorrectRatio /= trainingSampleCount;
testDataCorrectRatio /= (data.Rows - trainingSampleCount);
StringBuilder builder = new StringBuilder("Variable Importance: ");
using (Matrix<float> varImportance = forest.VarImportance)
{
for (int i = 0; i < varImportance.Cols; i++)
{
builder.AppendFormat("{0} ", varImportance[0, i]);
}
}
Trace.WriteLine(String.Format("Prediction accuracy for training data :{0}%", trainDataCorrectRatio*100));
Trace.WriteLine(String.Format("Prediction accuracy for test data :{0}%", testDataCorrectRatio*100));
Trace.WriteLine(builder.ToString());
}
}
示例4: TestNormalBayesClassifier
public void TestNormalBayesClassifier()
{
Bgr[] colors = new Bgr[] {
new Bgr(0, 0, 255),
new Bgr(0, 255, 0),
new Bgr(255, 0, 0)};
int trainSampleCount = 150;
#region Generate the training data and classes
Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
Matrix<int> trainClasses = new Matrix<int>(trainSampleCount, 1);
Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);
Matrix<float> sample = new Matrix<float>(1, 2);
Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1);
trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(50));
trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(50));
Matrix<float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(50));
Matrix<float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(50));
trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(50));
Matrix<int> trainClasses1 = trainClasses.GetRows(0, trainSampleCount / 3, 1);
trainClasses1.SetValue(1);
Matrix<int> trainClasses2 = trainClasses.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
trainClasses2.SetValue(2);
Matrix<int> trainClasses3 = trainClasses.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
trainClasses3.SetValue(3);
#endregion
using (NormalBayesClassifier classifier = new NormalBayesClassifier() )
{
classifier.Train(trainData, trainClasses, null, null, false);
classifier.Save("normalBayes.xml");
#region Classify every image pixel
for (int i = 0; i < img.Height; i++)
for (int j = 0; j < img.Width; j++)
{
sample.Data[0, 0] = i;
sample.Data[0, 1] = j;
int response = (int) classifier.Predict(sample, null);
Bgr color = colors[response -1];
img[j, i] = new Bgr(color.Blue * 0.5, color.Green * 0.5, color.Red * 0.5);
}
#endregion
}
// display the original training samples
for (int i = 0; i < (trainSampleCount / 3); i++)
{
PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
img.Draw(new CircleF(p1, 2.0f), colors[0], -1);
PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
img.Draw(new CircleF(p2, 2.0f), colors[1], -1);
PointF p3 = new PointF(trainData3[i, 0], trainData3[i, 1]);
img.Draw(new CircleF(p3, 2.0f), colors[2], -1);
}
}
示例5: TestKNearest
public void TestKNearest()
{
int K = 10;
int trainSampleCount = 100;
#region Generate the training data and classes
Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1);
Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);
Matrix<float> sample = new Matrix<float>(1, 2);
Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1);
trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
Matrix<float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));
Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
trainClasses1.SetValue(1);
Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
trainClasses2.SetValue(2);
#endregion
Matrix<float> results, neighborResponses;
results = new Matrix<float>(sample.Rows, 1);
neighborResponses = new Matrix<float>(sample.Rows, K);
//dist = new Matrix<float>(sample.Rows, K);
using (KNearest knn = new KNearest(trainData, trainClasses, null, false, K))
{
//TODO: find out when knn.save will be implemented
//knn.Save("knn.xml");
for (int i = 0; i < img.Height; i++)
{
for (int j = 0; j < img.Width; j++)
{
sample.Data[0, 0] = j;
sample.Data[0, 1] = i;
// estimates the response and get the neighbors' labels
float response = knn.FindNearest(sample, K, results, null, neighborResponses, null);
int accuracy = 0;
// compute the number of neighbors representing the majority
for (int k = 0; k < K; k++)
{
if (neighborResponses.Data[0, k] == response)
accuracy++;
}
// highlight the pixel depending on the accuracy (or confidence)
img[i, j] =
response == 1 ?
(accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 40, 0)) :
(accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(40, 90, 0));
}
}
}
// display the original training samples
for (int i = 0; i < (trainSampleCount >> 1); i++)
{
PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
}
}
示例6: TestEmgu
private void TestEmgu()
{
int K = 10;
//int trainSampleCount = 100;
int trainSampleCount = this.vectorTable[0].Length-1;
int trainSampleColumns = this.vectorTable.Length - 2; //subtract two columns for the post id and IsImage
int scalingRatio = 10;
#region Generate the traning data and classes
Matrix<float> trainData = new Matrix<float>(trainSampleColumns, trainSampleCount);
Matrix<float> trainClasses = new Matrix<float>(trainSampleColumns, 1);
Image<Bgr, Byte> img = new Image<Bgr, byte>(trainSampleCount, trainSampleCount);
Matrix<float> sample = new Matrix<float>(1, trainSampleCount);
for (int y = 1; y < this.vectorTable[0].Length - 1; y++) {
for (int x = 2; x < this.vectorTable.Length - 1; x++) {
trainData.Data.SetValue(Int32.Parse(this.vectorTable[x][y])*scalingRatio,x-2,y-1);
}
}
Matrix<float> trainData1 = trainData.GetRows(0, trainSampleColumns >> 1, 1);
//trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
Matrix<float> trainData2 = trainData.GetRows(trainSampleColumns >> 1, trainSampleColumns, 1);
//trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));
Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
trainClasses1.SetValue(1);
Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
trainClasses2.SetValue(2);
#endregion
Matrix<float> results, neighborResponses;
results = new Matrix<float>(sample.Rows, 1);
neighborResponses = new Matrix<float>(sample.Rows, K);
//dist = new Matrix<float>(sample.Rows, K);
KNearest knn = new KNearest(trainData, trainClasses, null, false, K);
for (int i = 0; i < img.Height; i++) {
for (int j = 0; j < img.Width; j++) {
sample.Data[0, 0] = j;
sample.Data[0, 1] = i;
//Matrix<float> nearestNeighbors = new Matrix<float>(K* sample.Rows, sample.Cols);
// estimates the response and get the neighbors' labels
float response = knn.FindNearest(sample, K, results, null, neighborResponses, null);
int accuracy = 0;
// compute the number of neighbors representing the majority
for (int k = 0; k < K; k++) {
if (neighborResponses.Data[0, k] == response)
accuracy++;
}
// highlight the pixel depending on the accuracy (or confidence)
img[i, j] =
response == 1 ?
(accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 60, 0)) :
(accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(60, 90, 0));
}
}
// display the original training samples
for (int i = 0; i < (trainSampleCount >> 1); i++) {
PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
}
//Emgu.CV.UI.ImageViewer.Show(img);
Emgu.CV.UI.ImageViewer imgviewer = new Emgu.CV.UI.ImageViewer(img);
imgviewer.Show();
}
示例7: TestRTreesLetterRecognition
public void TestRTreesLetterRecognition()
{
Matrix<float> data, response;
ReadLetterRecognitionData(out data, out response);
int trainingSampleCount = (int) (data.Rows * 0.8);
Matrix<Byte> varType = new Matrix<byte>(data.Cols + 1, 1);
varType.SetValue((byte) MlEnum.VarType.Numerical); //the data is numerical
varType[data.Cols, 0] = (byte) MlEnum.VarType.Categorical; //the response is catagorical
Matrix<byte> sampleIdx = new Matrix<byte>(data.Rows, 1);
using (Matrix<byte> sampleRows = sampleIdx.GetRows(0, trainingSampleCount, 1))
sampleRows.SetValue(255);
using (RTrees forest = new RTrees())
using (TrainData td = new TrainData(data, MlEnum.DataLayoutType.RowSample, response, null, sampleIdx, null, varType))
{
forest.MaxDepth = 10;
forest.MinSampleCount = 10;
forest.RegressionAccuracy = 0.0f;
forest.UseSurrogates = false;
forest.MaxCategories = 15;
forest.CalculateVarImportance = true;
forest.ActiveVarCount = 4;
forest.TermCriteria = new MCvTermCriteria(100, 0.01f);
bool success = forest.Train(td);
if (!success)
return;
double trainDataCorrectRatio = 0;
double testDataCorrectRatio = 0;
for (int i = 0; i < data.Rows; i++)
{
using (Matrix<float> sample = data.GetRow(i))
{
double r = forest.Predict(sample, null);
r = Math.Abs(r - response[i, 0]);
if (r < 1.0e-5)
{
if (i < trainingSampleCount)
trainDataCorrectRatio++;
else
testDataCorrectRatio++;
}
}
}
trainDataCorrectRatio /= trainingSampleCount;
testDataCorrectRatio /= (data.Rows - trainingSampleCount);
StringBuilder builder = new StringBuilder("Variable Importance: ");
/*
using (Matrix<float> varImportance = forest.VarImportance)
{
for (int i = 0; i < varImportance.Cols; i++)
{
builder.AppendFormat("{0} ", varImportance[0, i]);
}
}*/
EmguAssert.WriteLine(String.Format("Prediction accuracy for training data :{0}%", trainDataCorrectRatio * 100));
EmguAssert.WriteLine(String.Format("Prediction accuracy for test data :{0}%", testDataCorrectRatio * 100));
EmguAssert.WriteLine(builder.ToString());
}
}
示例8: TestNormalBayesClassifier
public void TestNormalBayesClassifier()
{
Bgr[] colors = new Bgr[] {
new Bgr(0, 0, 255),
new Bgr(0, 255, 0),
new Bgr(255, 0, 0)};
int trainSampleCount = 150;
#region Generate the training data and classes
Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
Matrix<int> trainClasses = new Matrix<int>(trainSampleCount, 1);
Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);
Matrix<float> sample = new Matrix<float>(1, 2);
Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1);
trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(50));
trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(50));
Matrix<float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(50));
Matrix<float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(50));
trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(50));
Matrix<int> trainClasses1 = trainClasses.GetRows(0, trainSampleCount / 3, 1);
trainClasses1.SetValue(1);
Matrix<int> trainClasses2 = trainClasses.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
trainClasses2.SetValue(2);
Matrix<int> trainClasses3 = trainClasses.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
trainClasses3.SetValue(3);
#endregion
using (TrainData td = new TrainData(trainData, MlEnum.DataLayoutType.RowSample, trainClasses))
using (NormalBayesClassifier classifier = new NormalBayesClassifier())
{
//ParamDef[] defs = classifier.GetParams();
classifier.Train(trainData, MlEnum.DataLayoutType.RowSample, trainClasses);
classifier.Clear();
classifier.Train(td);
#if !NETFX_CORE
String fileName = Path.Combine(Path.GetTempPath(), "normalBayes.xml");
classifier.Save(fileName);
if (File.Exists(fileName))
File.Delete(fileName);
#endif
#region Classify every image pixel
for (int i = 0; i < img.Height; i++)
for (int j = 0; j < img.Width; j++)
{
sample.Data[0, 0] = i;
sample.Data[0, 1] = j;
int response = (int) classifier.Predict(sample, null);
Bgr color = colors[response - 1];
img[j, i] = new Bgr(color.Blue * 0.5, color.Green * 0.5, color.Red * 0.5);
}
#endregion
}
// display the original training samples
for (int i = 0; i < (trainSampleCount / 3); i++)
{
PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
img.Draw(new CircleF(p1, 2.0f), colors[0], -1);
PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
img.Draw(new CircleF(p2, 2.0f), colors[1], -1);
PointF p3 = new PointF(trainData3[i, 0], trainData3[i, 1]);
img.Draw(new CircleF(p3, 2.0f), colors[2], -1);
}
//Emgu.CV.UI.ImageViewer.Show(img);
}
示例9: Main
static void Main(string[] args)
{
int trainSampleCount = 0;
Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);
//conversion of CSV to gesture variables
List<Gesture> gestureListClass1 = csvToGestureList(@"C:\Users\Dan\Desktop\thesis data\testEB-3-20.csv");
List<Gesture> gestureListClass2 = csvToGestureList(@"C:\Users\Dan\Desktop\thesis data\testSNC-3-20.csv");
trainSampleCount = (gestureListClass1.Count) + (gestureListClass2.Count); //set the sample count to the number of gestures we have available
//create relevant matrices based on size of the gestureList
Matrix<float> sample = new Matrix<float>(1, 2); //a sample has 2 columns because of 2 features
Matrix<float> trainTestData = new Matrix<float>(trainSampleCount, 2);
Matrix<float> trainTestClasses = new Matrix<float>(trainSampleCount, 1);
//GESTURE MATH INCOMING
foreach (Gesture g in gestureListClass1)
{
g.runMetrics();
}
foreach (Gesture g in gestureListClass2)
{
g.runMetrics();
}
#region Generate the training data and classes
//fill first set of data
for (int i = 0; i < gestureListClass1.Count; i++)
{
double[] gMetrics = (gestureListClass1[i].returnMetrics()).ToArray();
/*order of values
* list[0] - xyRatio
* list[1] - totalGestureTime
*/
trainTestData[i, 0] = ((float)gMetrics[0])*150;
trainTestData[i, 1] = ((float)gMetrics[1])/4;
}
Matrix<float> trainTestData1 = trainTestData.GetRows(0, gestureListClass1.Count, 1);
for (int j = 0; j < gestureListClass2.Count; j++)
{
double[] gMetrics = (gestureListClass2[j].returnMetrics()).ToArray();
trainTestData[(j + gestureListClass1.Count), 0] = (float)gMetrics[0] * 150;
trainTestData[(j + gestureListClass1.Count), 1] = ((float)gMetrics[1])/4;
}
Matrix<float> trainTestData2 = trainTestData.GetRows(gestureListClass1.Count, trainSampleCount, 1);
Matrix<float> trainTestClasses1 = trainTestClasses.GetRows(0, gestureListClass1.Count, 1);
trainTestClasses1.SetValue(1);
Matrix<float> trainTestClasses2 = trainTestClasses.GetRows(gestureListClass1.Count, trainSampleCount, 1);
trainTestClasses2.SetValue(2);
#endregion
using (SVM model = new SVM())
{
SVMParams p = new SVMParams();
p.KernelType = Emgu.CV.ML.MlEnum.SVM_KERNEL_TYPE.LINEAR;
p.SVMType = Emgu.CV.ML.MlEnum.SVM_TYPE.C_SVC;
//p.Gamma = 0.1;
p.C = 10;
p.TermCrit = new MCvTermCriteria(100, 0.00001);
//bool trained = model.Train(trainTestData, trainTestClasses, null, null, p);
bool trained = model.TrainAuto(trainTestData, trainTestClasses, null, null, p.MCvSVMParams, 5);
for (int i = 0; i < img.Height; i++)
{
for (int j = 0; j < img.Width; j++)
{
sample.Data[0, 0] = j;
sample.Data[0, 1] = i;
float response = model.Predict(sample);
img[i, j] =
response == 1 ? new Bgr(90, 0, 0) :
new Bgr(0, 0, 90);
//response == 2 ? new Bgr(0, 90, 0) :
}
}
int c = model.GetSupportVectorCount();
for (int i = 0; i < c; i++)
{
float[] v = model.GetSupportVector(i);
PointF p1 = new PointF(v[0], v[1]);
img.Draw(new CircleF(p1, 4), new Bgr(255, 255, 128), 2);
}
}
// display the original training samples
for (int i = 0; i < (trainSampleCount / 2); i++)
{
//.........这里部分代码省略.........
示例10: Hminired
private Matrix Hminired(Matrix A)
{
//function A=hminired(A)
//%HMINIRED Initial reduction of cost matrix for the Hungarian method.
//%
//%B=assredin(A)
//%A - the unreduced cost matris.
//%B - the reduced cost matrix with linked zeros in each row.
//% v1.0 96-06-13. Niclas Borlin, [email protected]
//[m,n]=size(A);
int m = A.Rows, n = A.Columns;
//% Subtract column-minimum values from each column.
//colMin=min(A);
var colMin = new DenseVector(A.GetColumns().Select(col => col.Min()).ToArray());
//A=A-colMin(ones(n,1),:);
for (int i = 0; i < A.Rows; ++i) {
A.SetRow(i, A.GetRow(i) - colMin);
}
//% Subtract row-minimum values from each row.
//rowMin=min(A')';
var rowMin = new DenseVector(A.GetRows().Select(row => row.Min()).ToArray());
//A=A-rowMin(:,ones(1,n));
for (int j = 0; j < A.Rows; ++j) {
A.SetColumn(j, A.GetColumn(j) - rowMin);
}
//% Get positions of all zeros.
//[i,j]=find(A==0);
List<int> ilist = new List<int>();
List<int> jlist = new List<int>();
A.EachT((v, i, j) => {
if (v == 0) {
ilist.Add(i);
jlist.Add(j);
}
});
//% Extend A to give room for row zero list header column.
//A(1,n+1)=0;
Matrix tmp = Zeros(n, n + 1);
tmp.SetSubMatrix(0, n, 0, n, A);
//for k=1:n
for (int k = 0; k < n; ++k) {
// % Get all column in this row.
// cols=j(k==i)';
var cols = new List<int>();
cols.Add(n);
for (int i = 0; i < ilist.Count; ++i) {
if (ilist[i] == k) {
cols.Add(jlist[i]);
}
}
cols.Add(-1);
// % Insert pointers in matrix.
// A(k,[n+1 cols])=[-cols 0];
for (int i = 0; i < cols.Count - 1; ++i) {
tmp[k, cols[i]] = -(cols[i + 1]) - 1;
} // TODO 不知道对不对了
//result[k, cols[cols.Count - 1]] = 0;
//end
}
var result = tmp.Each(v => {
if (v < 0) return v + 1;
else if (v == 0) return NoMatch;
else return v;
});
return result;
}
示例11: svm
private Image<Bgr, Byte> svm()
{
Stopwatch timer = new Stopwatch();
timer.Start();
int trainSampleCount = 150;
int sigma = 60;
#region Generate the training data and classes
Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1);
Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);
Matrix<float> sample = new Matrix<float>(1, 2);
Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1);
trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));
trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));
Matrix<float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(sigma));
Matrix<float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));
trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));
Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount / 3, 1);
trainClasses1.SetValue(1);
Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
trainClasses2.SetValue(2);
Matrix<float> trainClasses3 = trainClasses.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
trainClasses3.SetValue(3);
#endregion
timer.Stop();
MessageBox.Show("生成" + timer.ElapsedMilliseconds + "ms");
timer.Reset();
timer.Start();
using (SVM model = new SVM()) {
SVMParams p = new SVMParams();
p.KernelType = Emgu.CV.ML.MlEnum.SVM_KERNEL_TYPE.LINEAR;
p.SVMType = Emgu.CV.ML.MlEnum.SVM_TYPE.C_SVC;
p.C = 1;
p.TermCrit = new MCvTermCriteria(100, 0.00001);
//model.Load(@"D:\Play Data\训练数据");
//bool trained = model.Train(trainData, trainClasses, null, null, p);
bool trained = model.TrainAuto(trainData, trainClasses, null, null, p.MCvSVMParams, 5);
timer.Stop();
MessageBox.Show("训练" + timer.ElapsedMilliseconds + "ms");
timer.Reset();
timer.Start();
for (int i = 0; i < img.Height; i++) {
for (int j = 0; j < img.Width; j++) {
sample.Data[0, 0] = j;
sample.Data[0, 1] = i;
//float response = model.Predict(sample);
//img[i, j] =
// response == 1 ? new Bgr(90, 0, 0) :
// response == 2 ? new Bgr(0, 90, 0) :
// new Bgr(0, 0, 90);
}
}
//model.Save(@"D:\Play Data\训练数据");
timer.Stop();
MessageBox.Show("染色" + timer.ElapsedMilliseconds + "ms");
timer.Reset();
timer.Start();
int c = model.GetSupportVectorCount();
for (int i = 0; i < c; i++) {
float[] v = model.GetSupportVector(i);
PointF p1 = new PointF(v[0], v[1]);
img.Draw(new CircleF(p1, 4), new Bgr(128, 128, 128), 2);
}
timer.Stop();
MessageBox.Show("画圈" + timer.ElapsedMilliseconds + "ms");
timer.Reset();
timer.Start();
}
// display the original training samples
for (int i = 0; i < (trainSampleCount / 3); i++) {
PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
PointF p3 = new PointF(trainData3[i, 0], trainData3[i, 1]);
img.Draw(new CircleF(p3, 2.0f), new Bgr(100, 100, 255), -1);
}
timer.Stop();
MessageBox.Show("标点" + timer.ElapsedMilliseconds + "ms");
timer.Reset();
timer.Start();
//.........这里部分代码省略.........
示例12: knn
private Image<Bgr, Byte> knn()
{
int K = 10;
int trainSampleCount = 150;
int sigma = 60;
#region Generate the training data and classes
Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1);
Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);
Matrix<float> sample = new Matrix<float>(1, 2);
Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1);
trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));
trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));
Matrix<float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(sigma));
Matrix<float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));
trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));
Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount / 3, 1);
trainClasses1.SetValue(1);
Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
trainClasses2.SetValue(2);
Matrix<float> trainClasses3 = trainClasses.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
trainClasses3.SetValue(3);
#endregion
Matrix<float> results, neighborResponses;
results = new Matrix<float>(sample.Rows, 1);
neighborResponses = new Matrix<float>(sample.Rows, K);
//dist = new Matrix<float>(sample.Rows, K);
//using (KNearest knn = new KNearest(trainData, trainClasses, null, false, K)) {
using (KNearest knn = new KNearest()) {
bool trained = knn.Train(trainData, trainClasses, null, false, K, false);
for (int i = 0; i < img.Height; i++) {
for (int j = 0; j < img.Width; j++) {
sample.Data[0, 0] = j;
sample.Data[0, 1] = i;
//Matrix<float> nearestNeighbors = new Matrix<float>(K* sample.Rows, sample.Cols);
// estimates the response and get the neighbors' labels
float response = knn.FindNearest(sample, K, results, null, neighborResponses, null);
int accuracy = 0;
// compute the number of neighbors representing the majority
for (int k = 0; k < K; k++) {
if (neighborResponses.Data[0, k] == response)
accuracy++;
}
// highlight the pixel depending on the accuracy (or confidence)
//img[i, j] =
//response == 1 ?
// (accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 60, 0)) :
// (accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(60, 90, 0));
img[i, j] =
response == 1 ? (accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 30, 30)) :
response == 2 ? (accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(30, 90, 30)) :
(accuracy > 5 ? new Bgr(0, 0, 90) : new Bgr(30, 30, 90));
}
}
knn.Save(@"D:\Play Data\KNN训练数据");
}
// display the original training samples
for (int i = 0; i < (trainSampleCount / 3); i++) {
PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
PointF p3 = new PointF(trainData3[i, 0], trainData3[i, 1]);
img.Draw(new CircleF(p3, 2.0f), new Bgr(100, 100, 255), -1);
}
return img;
}
示例13: kmeans
private Image<Bgr, Byte> kmeans()
{
int trainSampleCount = 1500;
int sigma = 60;
Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1);
trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));
trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));
Matrix<float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(sigma));
Matrix<float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));
trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));
PointF[] points = new PointF[trainSampleCount];
for (int i = 0; i < points.Length; ++i) {
points[i] = new PointF(trainData[i, 0], trainData[i, 1]);
}
var km = new KMeans<PointF>(points, 3,
(a, b) => ((a.X - b.X) * (a.X - b.X) + (a.Y - b.Y) * (a.Y - b.Y)),
list => new PointF(list.Average(p => p.X), list.Average(p => p.Y))
);
int it = 0;
MyTimer timer = new MyTimer();
timer.Restart();
//var cluster = km.Cluster();
var cluster = km.AnnealCluster(
(a, b) => new PointF(a.X + b.X, a.Y + b.Y),
(a, b) => new PointF(a.X - b.X, a.Y - b.Y),
(p, v) => new PointF((float)(p.X / v), (float)(p.Y / v)),
out it);
var time = timer.Stop();
this.Text = String.Format("n={0}, k={1}, time={2}ms, iter={3}.", trainSampleCount, 3, time, it);
Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);
for (int y = 0; y < 500; ++y) {
for (int x = 0; x < 500; ++x) {
double d0 = (x - cluster[0].Center.X) * (x - cluster[0].Center.X)
+ (y - cluster[0].Center.Y) * (y - cluster[0].Center.Y);
double d1 = (x - cluster[1].Center.X) * (x - cluster[1].Center.X)
+ (y - cluster[1].Center.Y) * (y - cluster[1].Center.Y);
double d2 = (x - cluster[2].Center.X) * (x - cluster[2].Center.X)
+ (y - cluster[2].Center.Y) * (y - cluster[2].Center.Y);
Bgr color = new Bgr(0, 0, 0);
if (d0 < d1 && d0 < d2) {
color = new Bgr(20, 0, 0);
}
if (d1 < d0 && d1 < d2) {
color = new Bgr(0, 20, 0);
}
if (d2 < d0 && d2 < d1) {
color = new Bgr(0, 0, 20);
}
img[y, x] = color;
}
}
Bgr[] colors = new[] { new Bgr(128, 0, 0), new Bgr(0, 128, 0), new Bgr(0, 0, 128) };
Bgr[] centers = new[] { new Bgr(255, 0, 0), new Bgr(0, 255, 0), new Bgr(0, 0, 255) };
for (int i = 0; i < 3; ++i) {
foreach (var p in cluster[i]) {
img.Draw(new CircleF(p, 2), colors[i], 1);
}
img.Draw(new CircleF(cluster[i].Center, 5), centers[i], 3);
}
img.Draw(new CircleF(new PointF(100, 300), sigma), new Bgr(128, 128, 128), 2);
img.Draw(new CircleF(new PointF(100, 300), 3), new Bgr(128, 128, 128), 2);
img.Draw(new CircleF(new PointF(300, 100), sigma), new Bgr(128, 128, 128), 2);
img.Draw(new CircleF(new PointF(300, 100), 3), new Bgr(128, 128, 128), 2);
img.Draw(new CircleF(new PointF(400, 400), sigma), new Bgr(128, 128, 128), 2);
img.Draw(new CircleF(new PointF(400, 400), 3), new Bgr(128, 128, 128), 2);
return img;
}
示例14: TestDTreesMushroom
public void TestDTreesMushroom()
{
Matrix<float> data, response;
ReadMushroomData(out data, out response);
//Use the first 80% of data as training sample
int trainingSampleCount = (int)(data.Rows * 0.8);
Matrix<Byte> varType = new Matrix<byte>(data.Cols + 1, 1);
varType.SetValue((byte)MlEnum.VAR_TYPE.CATEGORICAL); //the data is categorical
Matrix<byte> sampleIdx = new Matrix<byte>(data.Rows, 1);
using (Matrix<byte> sampleRows = sampleIdx.GetRows(0, trainingSampleCount, 1))
sampleRows.SetValue(255);
float[] priors = new float[] {1, 0.5f};
GCHandle priorsHandle = GCHandle.Alloc(priors, GCHandleType.Pinned);
MCvDTreeParams param = new MCvDTreeParams();
param.maxDepth = 8;
param.minSampleCount = 10;
param.regressionAccuracy = 0;
param.useSurrogates = true;
param.maxCategories = 15;
param.cvFolds = 10;
param.use1seRule = true;
param.truncatePrunedTree = true;
param.priors = priorsHandle.AddrOfPinnedObject();
using (DTree dtree = new DTree())
{
bool success = dtree.Train(
data,
Emgu.CV.ML.MlEnum.DATA_LAYOUT_TYPE.ROW_SAMPLE,
response,
null,
sampleIdx,
varType,
null,
param);
if (!success) return;
double trainDataCorrectRatio = 0;
double testDataCorrectRatio = 0;
for (int i = 0; i < data.Rows; i++)
{
using (Matrix<float> sample = data.GetRow(i))
{
double r = dtree.Predict(sample, null, false).value;
r = Math.Abs(r - response[i, 0]);
if (r < 1.0e-5)
{
if (i < trainingSampleCount)
trainDataCorrectRatio++;
else
testDataCorrectRatio++;
}
}
}
trainDataCorrectRatio /= trainingSampleCount;
testDataCorrectRatio /= (data.Rows - trainingSampleCount);
Trace.WriteLine(String.Format("Prediction accuracy for training data :{0}%", trainDataCorrectRatio*100));
Trace.WriteLine(String.Format("Prediction accuracy for test data :{0}%", testDataCorrectRatio*100));
}
priorsHandle.Free();
}
示例15: TestEM
public void TestEM()
{
int N = 4; //number of clusters
int N1 = (int)Math.Sqrt((double)4);
Bgr[] colors = new Bgr[] {
new Bgr(0, 0, 255),
new Bgr(0, 255, 0),
new Bgr(0, 255, 255),
new Bgr(255, 255, 0)};
int nSamples = 100;
Matrix<float> samples = new Matrix<float>(nSamples, 2);
Matrix<Int32> labels = new Matrix<int>(nSamples, 1);
Image<Bgr, Byte> img = new Image<Bgr,byte>(500, 500);
Matrix<float> sample = new Matrix<float>(1, 2);
CvInvoke.cvReshape(samples.Ptr, samples.Ptr, 2, 0);
for (int i = 0; i < N; i++)
{
Matrix<float> rows = samples.GetRows(i * nSamples / N, (i + 1) * nSamples / N, 1);
double scale = ((i % N1) + 1.0) / (N1 + 1);
MCvScalar mean = new MCvScalar(scale * img.Width, scale * img.Height);
MCvScalar sigma = new MCvScalar(30, 30);
rows.SetRandNormal(mean, sigma);
}
CvInvoke.cvReshape(samples.Ptr, samples.Ptr, 1, 0);
using (EM emModel1 = new EM())
using (EM emModel2 = new EM())
{
EMParams parameters1 = new EMParams();
parameters1.Nclusters = N;
parameters1.CovMatType = Emgu.CV.ML.MlEnum.EM_COVARIAN_MATRIX_TYPE.COV_MAT_DIAGONAL;
parameters1.StartStep = Emgu.CV.ML.MlEnum.EM_INIT_STEP_TYPE.START_AUTO_STEP;
parameters1.TermCrit = new MCvTermCriteria(10, 0.01);
emModel1.Train(samples, null, parameters1, labels);
EMParams parameters2 = new EMParams();
parameters2.Nclusters = N;
parameters2.CovMatType = Emgu.CV.ML.MlEnum.EM_COVARIAN_MATRIX_TYPE.COV_MAT_GENERIC;
parameters2.StartStep = Emgu.CV.ML.MlEnum.EM_INIT_STEP_TYPE.START_E_STEP;
parameters2.TermCrit = new MCvTermCriteria(100, 1.0e-6);
parameters2.Means = emModel1.Means;
parameters2.Covs = emModel1.GetCovariances();
parameters2.Weights = emModel1.Weights;
emModel2.Train(samples, null, parameters2, labels);
//TODO: Find out when saving of EM model will be enable
//emModel2.Save("emModel.xml");
#region Classify every image pixel
for (int i = 0; i < img.Height; i++)
for (int j = 0; j < img.Width; j++)
{
sample.Data[0, 0] = i;
sample.Data[0, 1] = j;
int response = (int) emModel2.Predict(sample, null);
Bgr color = colors[response];
img[j, i] = new Bgr(color.Blue*0.5, color.Green * 0.5, color.Red * 0.5 );
}
#endregion
#region draw the clustered samples
for (int i = 0; i < nSamples; i++)
{
img.Draw(new CircleF(new PointF(samples.Data[i, 0], samples.Data[i, 1]), 1), colors[labels.Data[i, 0]], 0);
}
#endregion
}
}