本文整理汇总了C#中CvPoint2D32f类的典型用法代码示例。如果您正苦于以下问题:C# CvPoint2D32f类的具体用法?C# CvPoint2D32f怎么用?C# CvPoint2D32f使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
CvPoint2D32f类属于命名空间,在下文中一共展示了CvPoint2D32f类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: Perspective
public Perspective()
{
using (var srcImg = new IplImage(FilePath.Image.Lenna, LoadMode.AnyDepth | LoadMode.AnyColor))
using (var dstImg = srcImg.Clone())
{
CvPoint2D32f[] srcPnt = new CvPoint2D32f[4];
CvPoint2D32f[] dstPnt = new CvPoint2D32f[4];
srcPnt[0] = new CvPoint2D32f(150.0f, 150.0f);
srcPnt[1] = new CvPoint2D32f(150.0f, 300.0f);
srcPnt[2] = new CvPoint2D32f(350.0f, 300.0f);
srcPnt[3] = new CvPoint2D32f(350.0f, 150.0f);
dstPnt[0] = new CvPoint2D32f(200.0f, 200.0f);
dstPnt[1] = new CvPoint2D32f(150.0f, 300.0f);
dstPnt[2] = new CvPoint2D32f(350.0f, 300.0f);
dstPnt[3] = new CvPoint2D32f(300.0f, 200.0f);
using (CvMat mapMatrix = Cv.GetPerspectiveTransform(srcPnt, dstPnt))
{
Cv.WarpPerspective(srcImg, dstImg, mapMatrix, Interpolation.Linear | Interpolation.FillOutliers, CvScalar.ScalarAll(100));
using (new CvWindow("src", srcImg))
using (new CvWindow("dst", dstImg))
{
Cv.WaitKey(0);
}
}
}
}
示例2: Perspective
public Perspective()
{
// cvGetPerspectiveTransform + cvWarpPerspective
// 画像上の4点対応より透視投影変換行列を計算し,その行列を用いて画像全体の透視投影変換を行う.
// (1)画像の読み込み,出力用画像領域の確保を行なう
using (IplImage srcImg = new IplImage(Const.ImageLenna, LoadMode.AnyDepth | LoadMode.AnyColor))
using (IplImage dstImg = srcImg.Clone())
{
// (2)四角形の変換前と変換後の対応する頂点をそれぞれセットし
// cvWarpPerspectiveを用いて透視投影変換行列を求める
CvPoint2D32f[] srcPnt = new CvPoint2D32f[4];
CvPoint2D32f[] dstPnt = new CvPoint2D32f[4];
srcPnt[0] = new CvPoint2D32f(150.0f, 150.0f);
srcPnt[1] = new CvPoint2D32f(150.0f, 300.0f);
srcPnt[2] = new CvPoint2D32f(350.0f, 300.0f);
srcPnt[3] = new CvPoint2D32f(350.0f, 150.0f);
dstPnt[0] = new CvPoint2D32f(200.0f, 200.0f);
dstPnt[1] = new CvPoint2D32f(150.0f, 300.0f);
dstPnt[2] = new CvPoint2D32f(350.0f, 300.0f);
dstPnt[3] = new CvPoint2D32f(300.0f, 200.0f);
using (CvMat mapMatrix = Cv.GetPerspectiveTransform(srcPnt, dstPnt))
{
// (3)指定されたアフィン行列により,cvWarpAffineを用いて画像を回転させる
Cv.WarpPerspective(srcImg, dstImg, mapMatrix, Interpolation.Linear | Interpolation.FillOutliers, CvScalar.ScalarAll(100));
// (4)結果を表示する
using (new CvWindow("src", srcImg))
using (new CvWindow("dst", dstImg))
{
Cv.WaitKey(0);
}
}
}
}
示例3: Affine
public Affine()
{
// cvGetAffineTransform + cvWarpAffine
// 画像上の3点対応よりアフィン変換行列を計算し,その行列を用いて画像全体のアフィン変換を行う.
// (1)画像の読み込み,出力用画像領域の確保を行なう
using (IplImage srcImg = new IplImage(Const.ImageGoryokaku, LoadMode.AnyDepth | LoadMode.AnyColor))
using (IplImage dstImg = srcImg.Clone())
{
// (2)三角形の回転前と回転後の対応する頂点をそれぞれセットし
// cvGetAffineTransformを用いてアフィン行列を求める
CvPoint2D32f[] srcPnt = new CvPoint2D32f[3];
CvPoint2D32f[] dstPnt = new CvPoint2D32f[3];
srcPnt[0] = new CvPoint2D32f(200.0f, 200.0f);
srcPnt[1] = new CvPoint2D32f(250.0f, 200.0f);
srcPnt[2] = new CvPoint2D32f(200.0f, 100.0f);
dstPnt[0] = new CvPoint2D32f(300.0f, 100.0f);
dstPnt[1] = new CvPoint2D32f(300.0f, 50.0f);
dstPnt[2] = new CvPoint2D32f(200.0f, 100.0f);
using (CvMat mapMatrix = Cv.GetAffineTransform(srcPnt, dstPnt))
{
// (3)指定されたアフィン行列により,cvWarpAffineを用いて画像を回転させる
Cv.WarpAffine(srcImg, dstImg, mapMatrix, Interpolation.Linear | Interpolation.FillOutliers, CvScalar.ScalarAll(0));
// (4)結果を表示する
using (new CvWindow("src", srcImg))
using (new CvWindow("dst", dstImg))
{
Cv.WaitKey(0);
}
}
}
}
示例4: UpdateLineMesh
public static void UpdateLineMesh(OpticalFlowWorker.AsyncResult r, Mesh mesh, CvPoint2D32f[] velocities, float limitVelocity)
{
var vertices = new Vector3[r.nCorners * 2];
var colors = new Color[vertices.Length];
var indices = new int[vertices.Length];
var limitSqrVelocity = limitVelocity * limitVelocity;
var c0s = r.corners0;
var rTexelSize = new Vector2(1f / r.imageWidth, 1f / r.imageHeight);
for (var i = 0; i < r.nCorners; i++) {
var vertexIndex = 2 * i;
var c0 = c0s[i];
var v0 = new Vector3(c0.X * rTexelSize.x - 0.5f, -(c0.Y * rTexelSize.y - 0.5f), 0f);
var cv = velocities[i];
var v = new Vector3(cv.X * rTexelSize.x, cv.Y * rTexelSize.y, 0f);
var rad = Mathf.Atan2(v.y, v.x);
if (rad < 0)
rad += 2 * Mathf.PI;
var color = HSBColor.ToColor(new HSBColor(rad * R_TWO_PI, 1f, 1f));
if (limitSqrVelocity < v.sqrMagnitude)
v = Vector3.zero;
vertices[vertexIndex] = v0;
vertices[vertexIndex + 1] = v0 + v;
colors[vertexIndex] = color;
colors[vertexIndex + 1] = color;
indices[vertexIndex] = vertexIndex;
indices[vertexIndex + 1] = vertexIndex + 1;
}
mesh.vertices = vertices;
mesh.colors = colors;
mesh.SetIndices(indices, MeshTopology.Lines, 0);
mesh.RecalculateBounds();
}
示例5: Affine
public Affine()
{
// cvGetAffineTransform + cvWarpAffine
using (IplImage srcImg = new IplImage(FilePath.Image.Goryokaku, LoadMode.AnyDepth | LoadMode.AnyColor))
using (IplImage dstImg = srcImg.Clone())
{
CvPoint2D32f[] srcPnt = new CvPoint2D32f[3];
CvPoint2D32f[] dstPnt = new CvPoint2D32f[3];
srcPnt[0] = new CvPoint2D32f(200.0f, 200.0f);
srcPnt[1] = new CvPoint2D32f(250.0f, 200.0f);
srcPnt[2] = new CvPoint2D32f(200.0f, 100.0f);
dstPnt[0] = new CvPoint2D32f(300.0f, 100.0f);
dstPnt[1] = new CvPoint2D32f(300.0f, 50.0f);
dstPnt[2] = new CvPoint2D32f(200.0f, 100.0f);
using (CvMat mapMatrix = Cv.GetAffineTransform(srcPnt, dstPnt))
{
Cv.WarpAffine(srcImg, dstImg, mapMatrix, Interpolation.Linear | Interpolation.FillOutliers, CvScalar.ScalarAll(0));
using (new CvWindow("src", srcImg))
using (new CvWindow("dst", dstImg))
{
Cv.WaitKey(0);
}
}
}
}
示例6: PixelSampling
public PixelSampling()
{
// 並進移動のためのピクセルサンプリング cvGetRectSubPix
// (1) 画像の読み込み,出力用画像領域の確保を行なう
using (IplImage srcImg = new IplImage(Const.ImageLenna, LoadMode.AnyDepth | LoadMode.AnyColor))
using (IplImage dstImg = srcImg.Clone())
{
// (2)dst_imgの画像中心になるsrc_img中の位置centerを指定する
CvPoint2D32f center = new CvPoint2D32f
{
X = srcImg.Width - 1,
Y = srcImg.Height - 1
};
// (3)centerが画像中心になるように,GetRectSubPixを用いて画像全体をシフトさせる
Cv.GetRectSubPix(srcImg, dstImg, center);
// (4)結果を表示する
using (CvWindow wSrc = new CvWindow("src"))
using (CvWindow wDst = new CvWindow("dst"))
{
wSrc.Image = srcImg;
wDst.Image = dstImg;
Cv.WaitKey(0);
}
}
// 回転移動のためのピクセルサンプリング cvGetQuadrangleSubPix
const int angle = 45;
// (1)画像の読み込み,出力用画像領域の確保を行なう
using (IplImage srcImg = new IplImage(Const.ImageLenna, LoadMode.AnyDepth | LoadMode.AnyColor))
using (IplImage dstImg = srcImg.Clone())
{
// (2)回転のための行列(アフィン行列)要素を設定し,CvMat行列Mを初期化する
float[] m = new float[6];
m[0] = (float)(Math.Cos(angle * Cv.PI / 180.0));
m[1] = (float)(-Math.Sin(angle * Cv.PI / 180.0));
m[2] = srcImg.Width * 0.5f;
m[3] = -m[1];
m[4] = m[0];
m[5] = srcImg.Height * 0.5f;
using (CvMat mat = new CvMat(2, 3, MatrixType.F32C1, m))
{
// (3)指定された回転行列により,GetQuadrangleSubPixを用いて画像全体を回転させる
Cv.GetQuadrangleSubPix(srcImg, dstImg, mat);
// (4)結果を表示する
using (CvWindow wSrc = new CvWindow("src"))
using (CvWindow wDst = new CvWindow("dst"))
{
wSrc.Image = srcImg;
wDst.Image = dstImg;
Cv.WaitKey(0);
}
}
}
}
示例7: GetEnclosingCircle
private void GetEnclosingCircle(
IEnumerable<CvPoint> points, out CvPoint2D32f center, out float radius)
{
var pointsArray = points.ToArray();
using (var pointsMat = new CvMat(pointsArray.Length, 1, MatrixType.S32C2, pointsArray))
{
Cv.MinEnclosingCircle(pointsMat, out center, out radius);
}
}
示例8: sMarkerInfo
public sMarkerInfo()
{
width = 0.0;
height = 0.0;
ID = -1;
for (int i = 0; i < 4; i++)
{
corner[i] = new CvPoint2D32f(0, 0);
}
}
示例9: CalculateOpticalFlow
public AsyncResult CalculateOpticalFlow(CvPoint2D32f[] corners0)
{
var r = new AsyncResult();
r.prevTime = _prevTime = _currTime;
r.currTime = _currTime = Time.time;
r.corners0 = corners0;
r.nCorners = corners0.Length;
ThreadPool.QueueUserWorkItem(_CalculateOpticalFlow, r);
return r;
}
示例10: Approximate
public static CvCircleSegment Approximate(CvPoint[] points)
{
CvPoint2D32f[] points2D32f = new CvPoint2D32f[points.Length];
for (int i = 0; i < points.Length; i++)
{
points2D32f[i].X = (float)points[i].X;
points2D32f[i].Y = (float)points[i].Y;
}
return Approximate(points2D32f);
}
示例11: PixelSampling
public PixelSampling()
{
// cvGetRectSubPix
using (IplImage srcImg = new IplImage(FilePath.Image.Lenna, LoadMode.AnyDepth | LoadMode.AnyColor))
using (IplImage dstImg = srcImg.Clone())
{
CvPoint2D32f center = new CvPoint2D32f
{
X = srcImg.Width - 1,
Y = srcImg.Height - 1
};
Cv.GetRectSubPix(srcImg, dstImg, center);
using (CvWindow wSrc = new CvWindow("src"))
using (CvWindow wDst = new CvWindow("dst"))
{
wSrc.Image = srcImg;
wDst.Image = dstImg;
Cv.WaitKey(0);
}
}
// cvGetQuadrangleSubPix
const int angle = 45;
using (IplImage srcImg = new IplImage(FilePath.Image.Lenna, LoadMode.AnyDepth | LoadMode.AnyColor))
using (IplImage dstImg = srcImg.Clone())
{
float[] m = new float[6];
m[0] = (float)(Math.Cos(angle * Cv.PI / 180.0));
m[1] = (float)(-Math.Sin(angle * Cv.PI / 180.0));
m[2] = srcImg.Width * 0.5f;
m[3] = -m[1];
m[4] = m[0];
m[5] = srcImg.Height * 0.5f;
using (CvMat mat = new CvMat(2, 3, MatrixType.F32C1, m))
{
Cv.GetQuadrangleSubPix(srcImg, dstImg, mat);
using (CvWindow wSrc = new CvWindow("src"))
using (CvWindow wDst = new CvWindow("dst"))
{
wSrc.Image = srcImg;
wDst.Image = dstImg;
Cv.WaitKey(0);
}
}
}
}
示例12: GetRandomPoints
private CvPoint2D32f[] GetRandomPoints(int count, CvSize imageSize)
{
Random rand = new Random();
CvPoint2D32f[] points = new CvPoint2D32f[count];
double a = rand.NextDouble() + 0.5;
for (int i = 0; i < points.Length; i++)
{
double x = rand.Next(imageSize.Width);
double y = (x * a) + (rand.Next(100) - 50);
points[i] = new CvPoint2D32f(x, y);
}
return points;
}
示例13: CalculateFlowVelocities
public static void CalculateFlowVelocities(OpticalFlowWorker.AsyncResult r, ref CvPoint2D32f[] velocities)
{
if (velocities == null || velocities.Length != r.nCorners)
velocities = new CvPoint2D32f[r.nCorners];
var c0s = r.corners0;
var c1s = r.corners1;
for (var i = 0; i < r.nCorners; i++) {
var c0 = c0s[i];
var c1 = c1s[i];
var cv = c1 - c0;
velocities[i] = cv;
}
}
示例14: GenGridCorners
public static OpenCvSharp.CvPoint2D32f[] GenGridCorners(int width, int height, float gridSize)
{
var nx = (int)(width / gridSize);
var ny = (int)(height / gridSize);
var corners = new CvPoint2D32f[nx * ny];
var offset = gridSize * 0.5f;
for (var y = 0; y < ny; y++) {
for (var x = 0; x < nx; x++) {
var index = x + y * nx;
corners[index] = new CvPoint2D32f(offset + x * gridSize, offset + y * gridSize);
}
}
return corners;
}
示例15: Delaunay
public Delaunay()
{
CvRect rect = new CvRect(0, 0, 600, 600);
CvColor activeFacetColor = new CvColor(255, 0, 0);
CvColor delaunayColor = new CvColor(0, 0, 0);
CvColor voronoiColor = new CvColor(0, 180, 0);
CvColor bkgndColor = new CvColor(255, 255, 255);
Random rand = new Random();
using (CvMemStorage storage = new CvMemStorage(0))
using (IplImage img = new IplImage(rect.Size, BitDepth.U8, 3))
using (CvWindow window = new CvWindow("delaunay"))
{
img.Set(bkgndColor);
CvSubdiv2D subdiv = new CvSubdiv2D(rect, storage);
for (int i = 0; i < 200; i++)
{
CvPoint2D32f fp = new CvPoint2D32f
{
X = (float)rand.Next(5, rect.Width - 10),
Y = (float)rand.Next(5, rect.Height - 10)
};
LocatePoint(subdiv, fp, img, activeFacetColor);
window.Image = img;
if (CvWindow.WaitKey(100) >= 0)
{
break;
}
subdiv.Insert(fp);
subdiv.CalcVoronoi2D();
img.Set(bkgndColor);
DrawSubdiv(img, subdiv, delaunayColor, voronoiColor);
window.Image = img;
if (CvWindow.WaitKey(100) >= 0)
{
break;
}
}
img.Set(bkgndColor);
PaintVoronoi(subdiv, img);
window.Image = img;
CvWindow.WaitKey(0);
}
}