本文整理汇总了C#中Mat.Set方法的典型用法代码示例。如果您正苦于以下问题:C# Mat.Set方法的具体用法?C# Mat.Set怎么用?C# Mat.Set使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Mat
的用法示例。
在下文中一共展示了Mat.Set方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: usingCppInterface1
private static void usingCppInterface1()
{
// Cv2.ImRead
using (var src = new Mat(@"..\..\Images\Penguin.Png", LoadMode.AnyDepth | LoadMode.AnyColor))
using (var dst = new Mat())
{
src.CopyTo(dst);
for (var y = 0; y < src.Height; y++)
{
for (var x = 0; x < src.Width; x++)
{
var pixel = src.Get<Vec3b>(y, x);
var newPixel = new Vec3b
{
Item0 = (byte)(255 - pixel.Item0), // B
Item1 = (byte)(255 - pixel.Item1), // G
Item2 = (byte)(255 - pixel.Item2) // R
};
dst.Set(y, x, newPixel);
}
}
// [Cpp] Accessing Pixel
// https://github.com/shimat/opencvsharp/wiki/%5BCpp%5D-Accessing-Pixel
//Cv2.NamedWindow();
//Cv2.ImShow();
using (new Window("C++ Interface: Src", image: src))
using (new Window("C++ Interface: Dst", image: dst))
{
Cv2.WaitKey(0);
}
}
}
示例2: Run
public void Run()
{
Console.WriteLine("===== FlannTest =====");
// creates data set
using (Mat features = new Mat(10000, 2, MatType.CV_32FC1))
{
Random rand = new Random();
for (int i = 0; i < features.Rows; i++)
{
features.Set<float>(i, 0, rand.Next(10000));
features.Set<float>(i, 1, rand.Next(10000));
}
// query
Point2f queryPoint = new Point2f(7777, 7777);
Mat queries = new Mat(1, 2, MatType.CV_32FC1);
queries.Set<float>(0, 0, queryPoint.X);
queries.Set<float>(0, 1, queryPoint.Y);
Console.WriteLine("query:({0}, {1})", queryPoint.X, queryPoint.Y);
Console.WriteLine("-----");
// knnSearch
using (Index nnIndex = new Index(features, new KDTreeIndexParams(4)))
{
const int Knn = 1;
int[] indices;
float[] dists;
nnIndex.KnnSearch(queries, out indices, out dists, Knn, new SearchParams(32));
for (int i = 0; i < Knn; i++)
{
int index = indices[i];
float dist = dists[i];
Point2f pt = new Point2f(features.Get<float>(index, 0), features.Get<float>(index, 1));
Console.Write("No.{0}\t", i);
Console.Write("index:{0}", index);
Console.Write(" distance:{0}", dist);
Console.Write(" data:({0}, {1})", pt.X, pt.Y);
Console.WriteLine();
}
Knn.ToString();
}
}
Console.Read();
}
示例3: PixelAccess
private void PixelAccess()
{
using (Mat mat = new Mat(128, 128, MatrixType.U8C1))
{
for (int y = 0; y < mat.Rows; y++)
{
for (int x = 0; x < mat.Cols; x++)
{
mat.Set<byte>(y, x, (byte)(y + x));
}
}
using (new CvWindow("PixelAccess", mat.ToIplImage()))
{
Cv.WaitKey();
}
}
}
示例4: Texture2DToMat
// Convert the Texture2D type of Unity to OpenCV's CvMat
// This uses Adcock's parallel C# code to parallelize the conversion and make it faster
// I found the code execution dropped from 180 msec per frame to 70 msec per frame with parallelization
void Texture2DToMat(Texture2D tex, Mat m)
{
//float startTime = Time.realtimeSinceStartup;
Color[] pixels = tex.GetPixels();
// Parallel for loop
Parallel.For(0, imHeight, i =>
{
for (var j = 0; j < imWidth; j++)
{
var pixel = pixels[j + i * imWidth];
var col = new CvScalar
{
Val0 = (double)pixel.b * 255,
Val1 = (double)pixel.g * 255,
Val2 = (double)pixel.r * 255
};
m.Set(i, j, col);
}
});
// CvScalar col;
// Color pixel;
// int i, j;
//
// // Non-parallelized code
// for (i = 0; i < imHeight; i++) {
// for (j = 0; j < imWidth; j++) {
// pixel = pixels [j + i * imWidth];
//
// col = new CvScalar
// {
// Val0 = (double)pixel.b * 255,
// Val1 = (double)pixel.g * 255,
// Val2 = (double)pixel.r * 255
// };
//
// videoSourceImage.Set2D (i, j, col);
// }
//
// }
// Flip up/down dimension and right/left dimension
if (!FlipUpDownAxis && FlipLeftRightAxis)
m.Flip(FlipMode.XY);
else if (!FlipUpDownAxis)
m.Flip(FlipMode.X);
else if (FlipLeftRightAxis)
m.Flip(FlipMode.Y);
// Test difference in time between parallel and non-parallel code
//Debug.Log (Time.realtimeSinceStartup - startTime);
}
示例5: CvMainThread
/// <summary>
/// Worker thread for image processing.
/// </summary>
public void CvMainThread()
{
var faceCascade = new CascadeClassifier();
var eyesCascade = new CascadeClassifier();
faceCascade.load("haarcascade_frontalface_alt.xml");
eyesCascade.load("haarcascade_eye_tree_eyeglasses.xml");
var srcFrame = new Mat();
var dstFrame = new Mat();
var imgProc = new ImgProc();
_videoIo.StartCapture();
while (true)
{
_videoIo.GetFrame(srcFrame);
switch (_processingMethodIndex)
{
// passthrough
case 0:
break;
// gray
case 1:
imgProc.cvtColor(srcFrame, dstFrame, ColorConversionCodes.COLOR_RGBA2GRAY);
imgProc.cvtColor(dstFrame, srcFrame, ColorConversionCodes.COLOR_GRAY2RGB);
break;
// canny
case 3:
imgProc.cvtColor(srcFrame, dstFrame, cvRT.ColorConversionCodes.COLOR_RGBA2GRAY);
imgProc.GaussianBlur(dstFrame, dstFrame, new cvRT.Size(7, 7), 1.5, 1.5);
imgProc.Canny(dstFrame, dstFrame, 0, 30, 3);
imgProc.cvtColor(dstFrame, srcFrame, ColorConversionCodes.COLOR_GRAY2RGB);
break;
// contour
case 4:
{
var contours = new VectorOfVectorOfPoint();
var hierarchy = new VectorOfVec4i();
var color = new Scalar(255, 255, 255, 255);
imgProc.Canny(srcFrame, dstFrame, 100, 100 * 2, 3);
imgProc.FindContours(dstFrame, contours, hierarchy, ContourRetrievalAlgorithm.RETR_TREE, ContourApproximationModes.CHAIN_APPROX_SIMPLE, new Point(0, 0));
srcFrame.Set(new Scalar(0, 0, 0, 0));
for (var i = 0 ; i < contours.Count(); i++)
{
imgProc.DrawContours(srcFrame, contours, i, color, 2, 8, hierarchy, 0, new Point(0, 0));
}
break;
}
// face detect
case 5:
{
imgProc.cvtColor(srcFrame, dstFrame, ColorConversionCodes.COLOR_RGBA2GRAY);
imgProc.EqualizeHist(dstFrame, dstFrame);
// Faces in the frame.
var faces = new List<Rect>();
try
{
faces = new List<Rect>();
faceCascade.detectMultiScale(dstFrame, faces, 1.1, 2, (int)(0 | CV_HAAR.SCALE_IMAGE), new cvRT.Size(30, 30));
}
catch (Exception ex)
{
Debug.WriteLine("Exception {0}", ex.Message);
}
// For each face, detect the eyes
foreach (var face in faces)
{
// Draw ellipse for the face.
var faceCenter = new Point(face.X + face.Width / 2, face.Y + face.Height / 2);
imgProc.Ellipse(srcFrame, faceCenter, new cvRT.Size(face.Width / 2, face.Height / 2), 0, 0, 360, new Scalar(255, 0, 255, 0), 4, 8, 0);
// Detect the eyes for the face
var faceRoi = dstFrame.RectOfInterest(face);
var eyes = new List<Rect>();
eyesCascade.detectMultiScale(faceRoi, eyes, 1.1, 2, (int) (0 | CASCADE_FLAG.CASCADE_SCALE_IMAGE),new cvRT.Size(30, 30));
// Draw the eyes
foreach (var eye in eyes)
{
var eyeCenter = new Point(face.X + eye.X + eye.Width/2, face.Y + eye.Y + eye.Height/2);
var radius = (int) Math.Round((eye.Width + eye.Height) * 0.25);
imgProc.Circle(srcFrame, eyeCenter, radius, new Scalar(255, 0, 0, 0), 4, 8, 0);
}
//.........这里部分代码省略.........
示例6: example02
private static void example02()
{
var src = new Mat(@"..\..\Images\fruits.jpg", LoadMode.AnyDepth | LoadMode.AnyColor);
Cv2.ImShow("Source", src);
Cv2.WaitKey(1); // do events
Cv2.Blur(src, src, new Size(15, 15));
Cv2.ImShow("Blurred Image", src);
Cv2.WaitKey(1); // do events
// Converts the MxNx3 image into a Kx3 matrix where K=MxN and
// each row is now a vector in the 3-D space of RGB.
// change to a Mx3 column vector (M is number of pixels in image)
var columnVector = src.Reshape(cn: 3, rows: src.Rows * src.Cols);
// convert to floating point, it is a requirement of the k-means method of OpenCV.
var samples = new Mat();
columnVector.ConvertTo(samples, MatType.CV_32FC3);
for (var clustersCount = 2; clustersCount <= 8; clustersCount += 2)
{
var bestLabels = new Mat();
var centers = new Mat();
Cv2.Kmeans(
data: samples,
k: clustersCount,
bestLabels: bestLabels,
criteria:
new TermCriteria(type: CriteriaType.Epsilon | CriteriaType.Iteration, maxCount: 10, epsilon: 1.0),
attempts: 3,
flags: KMeansFlag.PpCenters,
centers: centers);
var clusteredImage = new Mat(src.Rows, src.Cols, src.Type());
for (var size = 0; size < src.Cols * src.Rows; size++)
{
var clusterIndex = bestLabels.At<int>(0, size);
var newPixel = new Vec3b
{
Item0 = (byte)(centers.At<float>(clusterIndex, 0)), // B
Item1 = (byte)(centers.At<float>(clusterIndex, 1)), // G
Item2 = (byte)(centers.At<float>(clusterIndex, 2)) // R
};
clusteredImage.Set(size / src.Cols, size % src.Cols, newPixel);
}
Cv2.ImShow(string.Format("Clustered Image [k:{0}]", clustersCount), clusteredImage);
Cv2.WaitKey(1); // do events
}
Cv2.WaitKey();
Cv2.DestroyAllWindows();
}
示例7: watershedExample
//.........这里部分代码省略.........
var key = Cv2.WaitKey(0);
if ((char)key == 27) // ESC
{
break;
}
if ((char)key == 'r') // Reset
{
markerMask = new Mat(markerMask.Size(), markerMask.Type(), s: Scalar.All(0));
src.CopyTo(srcCopy);
sourceWindow.Image = srcCopy;
}
if ((char)key == 'w' || (char)key == ' ') // Apply watershed
{
Point[][] contours; //vector<vector<Point>> contours;
HiearchyIndex[] hierarchyIndexes; //vector<Vec4i> hierarchy;
Cv2.FindContours(
markerMask,
out contours,
out hierarchyIndexes,
mode: ContourRetrieval.CComp,
method: ContourChain.ApproxSimple);
if (contours.Length == 0)
{
continue;
}
var markers = new Mat(markerMask.Size(), MatType.CV_32S, s: Scalar.All(0));
var componentCount = 0;
var contourIndex = 0;
while ((contourIndex >= 0))
{
Cv2.DrawContours(
markers,
contours,
contourIndex,
color: Scalar.All(componentCount+1),
thickness: -1,
lineType: LineType.Link8,
hierarchy: hierarchyIndexes,
maxLevel: int.MaxValue);
componentCount++;
contourIndex = hierarchyIndexes[contourIndex].Next;
}
if (componentCount == 0)
{
continue;
}
var colorTable = new List<Vec3b>();
for (var i = 0; i < componentCount; i++)
{
var b = rnd.Next(0, 255); //Cv2.TheRNG().Uniform(0, 255);
var g = rnd.Next(0, 255); //Cv2.TheRNG().Uniform(0, 255);
var r = rnd.Next(0, 255); //Cv2.TheRNG().Uniform(0, 255);
colorTable.Add(new Vec3b((byte)b, (byte)g, (byte)r));
}
Cv2.Watershed(src, markers);
var watershedImage = new Mat(markers.Size(), MatType.CV_8UC3);
// paint the watershed image
for (var i = 0; i < markers.Rows; i++)
{
for (var j = 0; j < markers.Cols; j++)
{
var idx = markers.At<int>(i, j);
if (idx == -1)
{
watershedImage.Set(i, j, new Vec3b(255, 255, 255));
}
else if (idx <= 0 || idx > componentCount)
{
watershedImage.Set(i, j, new Vec3b(0, 0, 0));
}
else
{
watershedImage.Set(i, j, colorTable[idx - 1]);
}
}
}
watershedImage = watershedImage * 0.5 + imgGray * 0.5;
Cv2.ImShow("Watershed Transform", watershedImage);
Cv2.WaitKey(1); //do events
}
}
sourceWindow.Dispose();
Cv2.DestroyAllWindows();
src.Dispose();
}
示例8: Run
/// <summary>
/// Classical Multidimensional Scaling
/// </summary>
public void Run()
{
// creates distance matrix
int size = CityDistance.GetLength(0);
Mat t = new Mat(size, size, MatType.CV_64FC1, CityDistance);
// adds Torgerson's additive constant to t
double torgarson = Torgerson(t);
t += torgarson;
// squares all elements of t
t = t.Mul(t);
// centering matrix G
Mat g = CenteringMatrix(size);
// calculates inner product matrix B
Mat b = g * t * g.T() * -0.5;
// calculates eigenvalues and eigenvectors of B
Mat values = new Mat();
Mat vectors = new Mat();
Cv2.Eigen(b, values, vectors);
for (int r = 0; r < values.Rows; r++)
{
if (values.Get<double>(r) < 0)
values.Set<double>(r, 0);
}
//Console.WriteLine(values.Dump());
// multiplies sqrt(eigenvalue) by eigenvector
Mat result = vectors.RowRange(0, 2);
{
var at = result.GetGenericIndexer<double>();
for (int r = 0; r < result.Rows; r++)
{
for (int c = 0; c < result.Cols; c++)
{
at[r, c] *= Math.Sqrt(values.Get<double>(r));
}
}
}
// scaling
Cv2.Normalize(result, result, 0, 800, NormType.MinMax);
// opens a window
using (Mat img = Mat.Zeros(600, 800, MatType.CV_8UC3))
using (Window window = new Window("City Location Estimation"))
{
var at = result.GetGenericIndexer<double>();
for (int c = 0; c < size; c++)
{
double x = at[0, c];
double y = at[1, c];
x = x * 0.7 + img.Width * 0.1;
y = y * 0.7 + img.Height * 0.1;
img.Circle((int)x, (int)y, 5, CvColor.Red, -1);
Point textPos = new Point(x + 5, y + 10);
img.PutText(CityNames[c], textPos, FontFace.HersheySimplex, 0.5, CvColor.White);
}
window.Image = img;
Cv2.WaitKey();
}
}
示例9: Update
// Update is called once per frame
void Update()
{
if (runCalibration)
{
if (Input.GetMouseButton(0) || Input.GetMouseButton(1) || Input.GetMouseButton(2))
{
if (Input.GetMouseButton(0))
{
//Debug.Log(Input.mousePosition);
GameObject bc = GameObject.FindGameObjectWithTag("BlueCross");
bc.transform.localPosition = new Vector3(Map(Input.mousePosition.x, Screen.width / 2.0f - 320.0f, Screen.width / 2.0f + 320.0f, 0.0f, 640.0f) - 320.0f, -Map(Input.mousePosition.y, Screen.height / 2.0f + 240.0f, Screen.height / 2.0f - 240.0f, 0.0f, 480.0f) + 240.0f, 0.0f);
}
else if (Input.GetMouseButton(1))
{
GameObject yc = GameObject.FindGameObjectWithTag("YellowCross");
yc.transform.localPosition = new Vector3(Map(Input.mousePosition.x, Screen.width / 2.0f - 320.0f, Screen.width / 2.0f + 320.0f, 0.0f, 640.0f) - 320.0f, -Map(Input.mousePosition.y, Screen.height / 2.0f + 240.0f, Screen.height / 2.0f - 240.0f, 0.0f, 480.0f) + 240.0f, 0.0f);
nextBt = true;
}
else if (Input.GetMouseButton(2) && nextBt == true)
{
if (addKinectPoint())
{
addProjectorPoint();
Debug.Log("Point Added! -> (" + kinectCoordinates.Count + ") ");
nextBt = false;
}
else
{
Debug.Log("Kinect Point out of bounds!");
}
}
}
if (Input.GetKeyDown(KeyCode.A))
{
//PointerEventData pointer = new PointerEventData(EventSystem.current);
//pointer.position = Input.mousePosition;
//List<RaycastResult> raycastResults = new List<RaycastResult>();
//EventSystem.current.RaycastAll(pointer, raycastResults);
if (addKinectPoint())
{
addProjectorPoint();
Debug.Log("Point Added! -> " + kinectCoordinates.Count);
}
else
{
Debug.Log("Kinect Point out of bounds!");
}
}
if (Input.GetKeyDown(KeyCode.S))
{
if (kinectCoordinates.Count >= 8)
{
Debug.Log("Starting Calibration...");
findTransformation(kinectCoordinates, projectorCoordinates);
foundResult = true;
}
else
{
Debug.Log("Not Enough Points!");
}
}
if (Input.GetKeyDown(KeyCode.D) && foundResult == true)
{
showResult = !showResult;
if (!showResult)
{
screenTx.SetPixels32(resetPixels);
screenTx.Apply(false);
}
Debug.Log("Show result toggle: " + showResult);
}
if (Input.GetKeyDown(KeyCode.F) && foundResult == true)
{
using (CvFileStorage fs = new CvFileStorage("KinectCalibration.xml", null, FileStorageMode.Write))
{
string nodeName = "calibResult";
fs.Write(nodeName, result.ToCvMat());
nodeName = "kinectPoints";
Mat kinectPts = new Mat(1, kinectCoordinates.Count, MatType.CV_64FC3);
for (int i = 0; i < kinectCoordinates.Count; i++)
{
kinectPts.Set<CvPoint3D64f>(0, i, (CvPoint3D64f)kinectCoordinates[i]);
}
fs.Write(nodeName, kinectPts.ToCvMat());
nodeName = "projectorPoints";
Mat projPts = new Mat(1, projectorCoordinates.Count, MatType.CV_64FC2);
for (int i = 0; i < projectorCoordinates.Count; i++)
{
projPts.Set<CvPoint2D64f>(0, i, (CvPoint2D64f)projectorCoordinates[i]);
}
fs.Write(nodeName, projPts.ToCvMat());
fs.Dispose();
}
Debug.Log("Calib Data saved!");
}
if (Input.GetKeyDown(KeyCode.Q))
{
delLastPoints();
//.........这里部分代码省略.........
示例10: prepareMatrices
private void prepareMatrices(ArrayList kinectCoors, ArrayList projectorCoors)
{
foundCoordinatesMatrix = new Mat(projectorCoors.Count * 2, 11, OpenCvSharp.CPlusPlus.MatType.CV_64FC1);
rightSideMatrix = new Mat(projectorCoors.Count * 2, 1, OpenCvSharp.CPlusPlus.MatType.CV_64FC1);
for (int i = 0; i < projectorCoors.Count * 2; i = i + 2)
{
OpenCvSharp.CvPoint3D64f kc = (OpenCvSharp.CvPoint3D64f)kinectCoors[i / 2];
OpenCvSharp.CvPoint2D64f projC = (OpenCvSharp.CvPoint2D64f)projectorCoors[i / 2];
foundCoordinatesMatrix.Set(i, 0, kc.X);
foundCoordinatesMatrix.Set(i, 1, kc.Y);
foundCoordinatesMatrix.Set(i, 2, kc.Z);
foundCoordinatesMatrix.Set(i, 3, 1);
foundCoordinatesMatrix.Set(i, 4, 0);
foundCoordinatesMatrix.Set(i, 5, 0);
foundCoordinatesMatrix.Set(i, 6, 0);
foundCoordinatesMatrix.Set(i, 7, 0);
foundCoordinatesMatrix.Set(i, 8, -projC.X * kc.X);
foundCoordinatesMatrix.Set(i, 9, -projC.X * kc.Y);
foundCoordinatesMatrix.Set(i, 10, -projC.X * kc.Z);
rightSideMatrix.Set(i, 0, projC.X);
foundCoordinatesMatrix.Set(i + 1, 0, 0);
foundCoordinatesMatrix.Set(i + 1, 1, 0);
foundCoordinatesMatrix.Set(i + 1, 2, 0);
foundCoordinatesMatrix.Set(i + 1, 3, 0);
foundCoordinatesMatrix.Set(i + 1, 4, kc.X);
foundCoordinatesMatrix.Set(i + 1, 5, kc.Y);
foundCoordinatesMatrix.Set(i + 1, 6, kc.Z);
foundCoordinatesMatrix.Set(i + 1, 7, 1);
foundCoordinatesMatrix.Set(i + 1, 8, -projC.Y * kc.X);
foundCoordinatesMatrix.Set(i + 1, 9, -projC.Y * kc.Y);
foundCoordinatesMatrix.Set(i + 1, 10, -projC.Y * kc.Z);
rightSideMatrix.Set(i + 1, 0, projC.Y);
}
}
示例11: OtherOperation
public static bool OtherOperation(System.Drawing.Rectangle frogRect)
{
if (false)
{
var mat = Cv2.GetRotationMatrix2D(new Point2f(10, 10), 45, 1);
Console.WriteLine(mat);
//Console.WriteLine(mat.Dims);
Console.WriteLine(mat.Size());
var p = new Mat(3, 1, MatType.CV_64FC1);
p.Set<double>(0, 0, 0);
p.Set<double>(1, 0, 0);
p.Set<double>(2, 0, 1);
Console.WriteLine(p);
//var q = mat.Cross(p);
var q = (mat * p).ToMat();
Console.WriteLine(q);
Console.WriteLine(q.Get<double>(0, 0));
Console.WriteLine(q.Get<double>(1, 0));
return true;
}
if (false)
{
var dir = "Example2/";
var src = new Mat("0.bmp");
var frog = src.Cut(new Rect(frogRect.X, frogRect.Y, frogRect.Width, frogRect.Height));
frog.ImWrite(dir + "/frog.bmp");
var frog_hsv_channels = frog.CvtColor(ColorConversion.RgbToHsv).Split();
for (var i = 0; i < 3; ++i)
frog_hsv_channels[i].ImWrite(dir + string.Format("frog {0}.png", hsv_names[i]));
frog_hsv_channels[1].Threshold(210, 255, ThresholdType.Binary).ImWrite(dir + "frog s th.png");
return true;
}
if (false)
{
MakeImagesForArticle();
return true;
}
if (false)
{
DetectBallsForArticle();
return true;
}
if (false)
{
var images = Enumerable.Range(0, 3).Select(i => string.Format("{0}.bmp", i)).Select(name => LoadBitmap(name).ToImage()).ToArray();
//var zeroImage = new int[images[0].GetLength(0), images[0].GetLength(1)];
var diff0_1 = images[0].Diff(images[1], 0, 0, 10);
var diff1_2 = images[1].Diff(images[2], 0, 0, 10);
diff0_1.ToBitmap().Save("diff s 0 1.bmp");
diff1_2.ToBitmap().Save("diff s 1 2.bmp");
diff0_1.And(diff1_2).ToBitmap().Save("diff s.bmp");
foreach (var t in new[] { 0, 10, 20, 50, 100 })
{
for (var dy = 0; dy < 10; ++dy)
{
images[1].Diff(images[0], 0, dy, t).ToBitmap().Save(string.Format("diff 0 {0} {1}.bmp", dy, t));
}
}
//foreach (var t in new[] { 0, 10, 20, 50, 100, 150 })
//{
// images[1].Diff(zeroImage, 0, 0, t).ToBitmap().Save(string.Format("diff-z {0}.bmp", t));
//}
return true;
}
return false;
}