本文整理汇总了C#中System.Matrix.GetCol方法的典型用法代码示例。如果您正苦于以下问题:C# Matrix.GetCol方法的具体用法?C# Matrix.GetCol怎么用?C# Matrix.GetCol使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类System.Matrix
的用法示例。
在下文中一共展示了Matrix.GetCol方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: listToMatrix
private Matrix<double> listToMatrix()
{
Matrix<double> tempMatrix = new Matrix<double>(trainingSampleCount, 3);
tempMatrix.GetCol(0).SetValue((csvTouchList[0].xPosition));
return tempMatrix;
}
示例2: Main
static void Main(string[] args)
{
Matrix<int> test = new Matrix<int>(2, 2);
test.GetCol(3);
Console.WriteLine(test.ToString());
}
示例3: GeneratePointCloud
/// <summary>
/// Generate a random point cloud around the ellipse.
/// </summary>
/// <param name="e">The region where the point cloud will be generated. The axes of e corresponds to std of the random point cloud.</param>
/// <param name="numberOfPoints">The number of points to be generated</param>
/// <returns>A random point cloud around the ellipse</returns>
public static PointF[] GeneratePointCloud(Ellipse e, int numberOfPoints)
{
PointF[] cloud = new PointF[numberOfPoints];
GCHandle handle = GCHandle.Alloc(cloud, GCHandleType.Pinned);
using (Matrix<float> points = new Matrix<float>(numberOfPoints, 2, handle.AddrOfPinnedObject()))
using (Matrix<float> xValues = points.GetCol(0))
using (Matrix<float> yValues = points.GetCol(1))
using (RotationMatrix2D<float> rotation = new RotationMatrix2D<float>(e.MCvBox2D.center, e.MCvBox2D.angle, 1.0))
{
xValues.SetRandNormal(new MCvScalar(e.MCvBox2D.center.X), new MCvScalar(e.MCvBox2D.size.Width / 2.0f));
yValues.SetRandNormal(new MCvScalar(e.MCvBox2D.center.Y), new MCvScalar(e.MCvBox2D.size.Height / 2.0f));
rotation.RotatePoints(points);
}
handle.Free();
return cloud;
}
示例4: ReprojectImageTo3D
/*
/// <summary>
/// Re-project pixels on a 1-channel disparity map to array of 3D points.
/// </summary>
/// <param name="disparity">Disparity map</param>
/// <param name="Q">The re-projection 4x4 matrix, can be arbitrary, e.g. the one, computed by cvStereoRectify</param>
/// <returns>The reprojected 3D points</returns>
public static MCvPoint3D32f[] ReprojectImageTo3D(Image<Gray, Byte> disparity, Matrix<double> Q)
{
Size size = disparity.Size;
MCvPoint3D32f[] points3D = new MCvPoint3D32f[size.Width * size.Height];
GCHandle handle = GCHandle.Alloc(points3D, GCHandleType.Pinned);
using (Matrix<float> pts = new Matrix<float>(size.Height, size.Width, 3, handle.AddrOfPinnedObject(), 0))
CvInvoke.ReprojectImageTo3D(disparity, pts, Q, false, CvEnum.DepthType.Cv32F);
handle.Free();
return points3D;
}*/
/// <summary>
/// Generate a random point cloud around the ellipse.
/// </summary>
/// <param name="e">The region where the point cloud will be generated. The axes of e corresponds to std of the random point cloud.</param>
/// <param name="numberOfPoints">The number of points to be generated</param>
/// <returns>A random point cloud around the ellipse</returns>
public static PointF[] GeneratePointCloud(Ellipse e, int numberOfPoints)
{
PointF[] cloud = new PointF[numberOfPoints];
GCHandle handle = GCHandle.Alloc(cloud, GCHandleType.Pinned);
using (Matrix<float> points = new Matrix<float>(numberOfPoints, 2, handle.AddrOfPinnedObject()))
using (Matrix<float> xValues = points.GetCol(0))
using (Matrix<float> yValues = points.GetCol(1))
using (RotationMatrix2D rotation = new RotationMatrix2D(e.RotatedRect.Center, e.RotatedRect.Angle, 1.0))
using (Mat tmp = new Mat())
{
rotation.ConvertTo(tmp, DepthType.Cv32F);
xValues.SetRandNormal(new MCvScalar(e.RotatedRect.Center.X), new MCvScalar(e.RotatedRect.Size.Width / 2.0f));
yValues.SetRandNormal(new MCvScalar(e.RotatedRect.Center.Y), new MCvScalar(e.RotatedRect.Size.Height / 2.0f));
rotation.RotatePoints(points);
}
handle.Free();
return cloud;
}
示例5: TestSubMatrix
public void TestSubMatrix()
{
Matrix<float> mat = new Matrix<float>(30, 40);
mat.SetRandUniform(new MCvScalar(0), new MCvScalar(255));
Matrix<float> submat = mat.GetSubRect(new Rectangle(5, 5, 15, 15));
for (int i = 0; i < 15; i++)
for (int j = 0; j < 15; j++)
EmguAssert.AreEqual(mat[i + 5, j + 5], submat[i, j]);
Matrix<float> secondRow = mat.GetRow(1);
for (int i = 0; i < mat.Cols; i++)
{
EmguAssert.AreEqual(mat[1, i], secondRow[0, i]);
}
Matrix<float> thirdCol = mat.GetCol(2);
for (int i = 0; i < mat.Rows; i++)
{
EmguAssert.AreEqual(mat[i, 2], thirdCol[i, 0]);
}
Matrix<float> diagonal = mat.GetDiag();
for (int i = 0; i < Math.Min(mat.Rows, mat.Cols); i++)
{
EmguAssert.AreEqual(diagonal[i, 0], mat[i, i]);
}
}
示例6: TestGetDiagColRow
public void TestGetDiagColRow()
{
Matrix<double> m = new Matrix<double>(new double[,] { {1, 2}, {3, 4}});
Matrix<double> diag = m.GetDiag();
EmguAssert.IsTrue(diag[0, 0] == 1);
EmguAssert.IsTrue(diag[1, 0] == 4);
EmguAssert.IsTrue(diag.Sum == m.Trace.V0);
Matrix<double> col1 = m.GetCol(1);
EmguAssert.IsTrue(col1[0, 0] == 2);
EmguAssert.IsTrue(col1[1, 0] == 4);
EmguAssert.IsTrue(col1.Sum == 2 + 4);
Matrix<double> row1 = m.GetRow(1);
EmguAssert.IsTrue(row1[0, 0] == 3);
EmguAssert.IsTrue(row1[0, 1] == 4);
EmguAssert.IsTrue(row1.Sum == 3 + 4);
}
示例7: VoteForUniqueness
/// <summary>
/// Filter the matched Features, such that if a match is not unique, it is rejected.
/// </summary>
/// <param name="distance">The matched distances, should have at lease 2 col</param>
/// <param name="uniquenessThreshold">The distance different ratio which a match is consider unique, a good number will be 0.8</param>
/// <param name="mask">This is both input and output. This matrix indicates which row is valid for the matches.</param>
public static void VoteForUniqueness(Matrix<float> distance, double uniquenessThreshold, Matrix<Byte> mask)
{
using (Matrix<float> firstCol = distance.GetCol(0))
using (Matrix<float> secCol = distance.GetCol(1))
using (Matrix<float> tmp = new Matrix<float>(firstCol.Size))
using (Matrix<Byte> maskBuffer = new Matrix<byte>(firstCol.Size))
{
CvInvoke.cvDiv(firstCol, secCol, tmp, 1.0);
CvInvoke.cvCmpS(tmp, uniquenessThreshold, maskBuffer, CvEnum.CMP_TYPE.CV_CMP_LE);
CvInvoke.cvAnd(maskBuffer, mask, mask, IntPtr.Zero);
}
}
示例8: Calibrate
public override bool Calibrate()
{
if (NumImages == 0)
{
//throw new ArgumentException("numImages=0 in Calibrate()");
return false;
}
try
{
CalibrationDataLeft = new CalibrationData();
CalibrationDataRight = new CalibrationData();
var targets = new Matrix<double>(NumImages, 3);
var designMatrixLeft = new Matrix<double>(NumImages, 6);
var designMatrixRight = new Matrix<double>(NumImages, 6);
var rowLeft = new double[6];
var rowRight = new double[6];
int k = 0;
foreach (CalibrationTarget ct in CalibrationTargets)
{
for (int j = 0; j < ct.NumImages; j++)
{
targets[k, 0] = ct.targetCoordinates.X;
targets[k, 1] = ct.targetCoordinates.Y;
double xLeft = ct.pupilCentersLeft[j].X;
double yLeft = ct.pupilCentersLeft[j].Y;
rowLeft[0] = 1;
rowLeft[1] = xLeft;
rowLeft[2] = yLeft;
rowLeft[3] = xLeft*yLeft;
rowLeft[4] = xLeft*xLeft;
rowLeft[5] = yLeft*yLeft;
for (int r = 0; r < 6; r++)
{
designMatrixLeft[k, r] = rowLeft[r];
}
if (Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Binocular)
{
double xRight = ct.pupilCentersRight[j].X;
double yRight = ct.pupilCentersRight[j].Y;
rowRight[0] = 1;
rowRight[1] = xRight;
rowRight[2] = yRight;
rowRight[3] = xRight*yRight;
rowRight[4] = xRight*xRight;
rowRight[5] = yRight*yRight;
for (int r = 0; r < 6; r++)
{
designMatrixRight[k, r] = rowRight[r];
}
}
k++;
}
}
CalibrationDataLeft.CoeffsX = new Matrix<double>(6, 1);
CalibrationDataLeft.CoeffsY = new Matrix<double>(6, 1);
CalibrationDataLeft.CoeffsX = Operations.SolveLeastSquares(designMatrixLeft, targets.GetCol(0));
CalibrationDataLeft.CoeffsY = Operations.SolveLeastSquares(designMatrixLeft, targets.GetCol(1));
if (Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Binocular)
{
CalibrationDataRight.CoeffsX = new Matrix<double>(6, 1);
CalibrationDataRight.CoeffsY = new Matrix<double>(6, 1);
CalibrationDataRight.CoeffsX = Operations.SolveLeastSquares(designMatrixRight, targets.GetCol(0));
CalibrationDataRight.CoeffsY = Operations.SolveLeastSquares(designMatrixRight, targets.GetCol(1));
}
// For each image we calculate the estimated gaze coordinates
foreach (CalibrationTarget ct in CalibrationTargets)
{
// We might be recalibrating so clear estGazeCoords first
ct.estimatedGazeCoordinatesLeft.Clear();
ct.estimatedGazeCoordinatesRight.Clear();
for (int j = 0; j < ct.NumImages; j++)
{
PupilCenterLeft = ct.pupilCentersLeft[j];
ct.estimatedGazeCoordinatesLeft.Add(GetGazeCoordinates(EyeEnum.Left));
if (Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Binocular)
{
PupilCenterRight = ct.pupilCentersRight[j];
ct.estimatedGazeCoordinatesRight.Add(GetGazeCoordinates(EyeEnum.Right));
}
}
ct.CalculateAverageCoords();
ct.averageErrorLeft = Operations.Distance(ct.meanGazeCoordinatesLeft, ct.targetCoordinates);
//.........这里部分代码省略.........