本文整理汇总了C#中Emgu.CV.Mat.ToImage方法的典型用法代码示例。如果您正苦于以下问题:C# Mat.ToImage方法的具体用法?C# Mat.ToImage怎么用?C# Mat.ToImage使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Emgu.CV.Mat
的用法示例。
在下文中一共展示了Mat.ToImage方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: ProcessFrame
private void ProcessFrame(object sender, EventArgs arg)
{
Mat frame = new Mat(); //Matrix to save the picture
capture.Retrieve(frame, 0); //retrieve the picture to the matrinx
Image<Bgr, byte> image = frame.ToImage<Bgr, byte>();
FaceNo = 0;
if (frame != null)
{
Image<Gray, byte> grayFrame = frame.ToImage<Gray, byte>(); // display the image in the imageBox
faces = cascade.DetectMultiScale(grayFrame, 1.1, 2, new Size(30, 30));
Bitmap BitmapInput = grayFrame.ToBitmap();
Bitmap ExtractedFace;
Graphics FaceCanvas;
//countTable.Text = faces.Count().ToString();
if (faces.Count() > 0)
{
foreach (var face in faces)
{
image.Draw(face, new Bgr(Color.Blue), 1); // draw rectangles in the picture
ExtractedFace = new Bitmap(face.Width, face.Height);
FaceCanvas = Graphics.FromImage(ExtractedFace);
FaceCanvas.DrawImage(BitmapInput, 0, 0, face, GraphicsUnit.Pixel);
ExtFaces.Add(ExtractedFace);
FaceNo++;
}
}
imageBox1.Image = image; // display the image in the imageBox
}
}
示例2: ProcessFrame
void ProcessFrame(object sender, EventArgs e)
{
Mat frame = _cameraCapture.QueryFrame();
Mat smoothedFrame = new Mat();
CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(3, 3), 1); //filter out noises
//frame._SmoothGaussian(3);
#region use the BG/FG detector to find the forground mask
Mat forgroundMask = new Mat();
_fgDetector.Apply(smoothedFrame, forgroundMask);
#endregion
CvBlobs blobs = new CvBlobs();
_blobDetector.Detect(forgroundMask.ToImage<Gray, byte>(), blobs);
blobs.FilterByArea(100, int.MaxValue);
float scale = (frame.Width + frame.Width)/2.0f;
_tracker.Update(blobs, 0.01 * scale, 5, 5);
foreach (var pair in _tracker)
{
CvTrack b = pair.Value;
CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2);
CvInvoke.PutText(frame, b.Id.ToString(), new Point((int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0));
}
imageBox1.Image = frame;
imageBox2.Image = forgroundMask;
}
示例3: ImageGrabbedHandler
public override void ImageGrabbedHandler(object sender, EventArgs e)
{
if (_transmitTask == null || _transmitTask.IsCompleted)
{
using (var matCaptured = new Mat())
{
CameraCapture.Retrieve(matCaptured);
var bgrImage = matCaptured.ToImage<Bgr, byte>();
WriteText(bgrImage, 30, DateTime.Now.ToString("HH:mm:ss tt"));
imageBoxCaptured.Image = bgrImage;
IImageTransmitter transmitter = null;
if (radBsonImage.Checked)
{
transmitter = _imageTransmitter;
}
if (radBsonJpeg.Checked)
{
transmitter = _jpegTransmitter;
}
if (transmitter != null)
{
_transmitTask = transmitter.Transmit(bgrImage);
}
}
}
}
示例4: ImageGrabbedHandler
public override void ImageGrabbedHandler(object sender, EventArgs e)
{
using (var matCaptured = new Mat())
{
CameraCapture.Retrieve(matCaptured);
var grayImage = matCaptured.ToImage<Gray, byte>();
#region circle detection
var watch = Stopwatch.StartNew();
double cannyThreshold = 180.0;
double circleAccumulatorThreshold = 120;
CircleF[] circles = CvInvoke.HoughCircles(
grayImage
, HoughType.Gradient
, 2.0
, 40.0
, cannyThreshold
, circleAccumulatorThreshold
, 5);
watch.Stop();
NotifyStatus("{0} Hough circles in {1}; ", circles.Length, watch.Elapsed.ToHumanReadable());
#endregion
#region draw circles
var circleImage = matCaptured.ToImage<Bgr, byte>();
foreach (CircleF circle in circles)
{
circleImage.Draw(circle, new Bgr(Color.Green), 10);
}
#endregion
imageBoxCaptured.Image = circleImage;
}
}
示例5: ImageGrabbedHandler
public override void ImageGrabbedHandler(object sender, EventArgs e)
{
using (var frame = new Mat())
{
CameraCapture.Retrieve(frame);
var input = new MotionDetectorInput();
var inputImage = frame.ToImage<Bgr,byte>();
input.Captured = frame;
input.Settings = _currentSettings;
var output = _motionDetector.Process(input);
var bgrRed = new Bgr(Color.Red);
var bgrBlue = new Bgr(Color.Blue);
foreach (var motionRegion in output.MotionSections)
{
var text = string.Format("A={0}, M={1}", motionRegion.Area, motionRegion.PixelsInMotionCount);
inputImage.Draw(motionRegion.Region, bgrRed);
if (chkRectangleStats.Checked)
{
inputImage.Draw(text, motionRegion.Region.Location, Emgu.CV.CvEnum.FontFace.HersheyComplexSmall, .8, bgrRed);
}
DrawMotion(output.MotionImage, motionRegion.Region, motionRegion.Angle, bgrRed);
}
DrawMotion(output.MotionImage, new Rectangle(Point.Empty, output.MotionImage.Size), output.OverallAngle, new Bgr(Color.Green));
if (output.BiggestMotion != null)
{
var motion = output.BiggestMotion;
inputImage.Draw(motion.Region, bgrBlue);
}
imageBoxCaptured.Image = inputImage;
imageBoxMasked.Image = output.ForegroundImage;
imageBoxMotion.Image = output.MotionImage;
NotifyStatus(
"Motion detection took {0}. {1} motions, {2} over all pixel count"
, output.Elapsed.ToHumanReadable()
, output.MotionSections.Count
, output.OverallMotionPixelCount);
}
}
示例6: ImageGrabbedHandler
public override void ImageGrabbedHandler(object sender, EventArgs e)
{
if (_detector == null)
{
return;
}
using (var matCaptured = new Mat())
{
CameraCapture.Retrieve(matCaptured);
var input = new CascadeDetectorInput {Captured = matCaptured};
input.ClassifierParams = _classiferParams;
var result = _detector.Process(input);
var image = matCaptured.ToImage<Bgr, byte>();
foreach (Rectangle item in result.Objects)
{
image.Draw(item, new Bgr(Color.Blue), 2);
}
imageBoxCaptured.Image = image;
}
}
示例7: ProcessFrame
private void ProcessFrame(object sender, EventArgs arg)
{
Mat frame = new Mat();
Image<Bgr, Byte> frame1;
frameNum = _capture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.PosFrames);
_capture.Retrieve(frame, 0);
frame1 = frame.ToImage<Bgr, Byte>();
frame1 = frame1.Resize(.5, Emgu.CV.CvEnum.Inter.Cubic);
frame = frame1.Mat;
//MessageBox.Show(_capture.Height + " " + _capture.Width + "\n" + frame1.Height + " " + frame1.Width);
if (frame != null)
{
using (UMat ugray = new UMat())
{
CvInvoke.CvtColor(frame, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);
CvInvoke.EqualizeHist(ugray, ugray);
Rectangle[] breastDetected = cascadeBreast.DetectMultiScale(
ugray,
1.1,
30,
new Size(20, 20));
/*Rectangle[] pussyDetected = cascadePuss.DetectMultiScale(
ugray,
1.1,
30,
new Size(20, 20));
Rectangle[] dickDetected = cascadePen.DetectMultiScale(
ugray,
1.1,
35,
new Size(20, 20));*/
progressBar1.Invoke(new MethodInvoker(delegate { progressBar1.Increment(1); label1.Text = frameNum.ToString();}));
}
}
}
示例8: detect
//detect method runs on it's own thread
public void detect()
{
while (!Stop)
{
//if the frame image is updated
if (updated)
{
//not safe to set the frame image
Finished = false;
using (Mat bgImage = _image.Clone())
using (Mat mask = new Mat())
{
Finished = true;
updated = false;
//get mask
_foregroundDetector.Apply(bgImage, mask, -1);
//set blob image
_bImage.setBlobImage(mask.ToImage<Gray, Byte>().Clone());
}
}
}
}
示例9: Process
public static ProcessResult Process(Mat image, out SessionPoint[] points)
{
IntPtr outPtr;
int outSize;
int result;
using (var i = image.ToImage<Bgr, byte>()) {
result = Process(i.Ptr, out outPtr, out outSize);
}
points = new SessionPoint[outSize];
for (var i = 0; i < outSize; i++) {
points[i] = (SessionPoint)Marshal.PtrToStructure(new IntPtr(outPtr.ToInt64() + (i * Size)), Type);
}
switch (result) {
case 1:
return ProcessResult.EmptyImage;
case 2:
return ProcessResult.NotDetected;
case 3:
return ProcessResult.Success;
default:
return ProcessResult.Error;
}
}
示例10: ProcessFrame
private void ProcessFrame(object sender, EventArgs arg)
{
try
{
if (capturecam != null)
{
Mat frame = new Mat();
capturecam.Retrieve(frame, 0);
if (frame != null)
{
_captureInProgress = true;
Mat cannyFrame = new Mat();
CvInvoke.Canny(frame, cannyFrame, 100, 60);
Image<Bgr, Byte> frameCanny = cannyFrame.ToImage<Bgr, Byte>();
Image<Bgr, Byte> frameBgr = frame.ToImage<Bgr, Byte>();
Image<Gray, Byte> frameGray = frame.ToImage<Gray, Byte>();
//CvInvoke.EqualizeHist(frameGray, frameGray); // normalizes brightness and increases contrast of the image
Rectangle[] facesDetected = _cascadeClassifierFace.DetectMultiScale(frameGray, 1.1, 10, new Size(20, 20)); //Size.Empty); //the actual face detection happens here
//faces.AddRange(facesDetected);
foreach (Rectangle f in facesDetected)
{
//frameBgr.Draw(f, new Bgr(Color.OrangeRed), 2); //the detected face(s) is highlighted here using a box that is drawn around it/them
CvInvoke.Rectangle(frameBgr, f, new Bgr(Color.OrangeRed).MCvScalar, 2);
//Console.WriteLine("Rect : " + f);
_centerFace.X = (int)(f.X + f.Width * 0.5);
_centerFace.Y = (int)(f.Y + f.Height * 0.5);
_trajFace.Add(_centerFace);
textBoxUpdate(textBoxPosX, "X : " + _centerFace.X.ToString());
textBoxUpdate(textBoxPosY, "Y : " + _centerFace.Y.ToString());
CvInvoke.Circle(frameBgr, _centerFace, 1, new Bgr(Color.OrangeRed).MCvScalar, 5, Emgu.CV.CvEnum.LineType.AntiAlias, 0);
//centerRect.
//Get the region of interest on the faces
using (UMat faceRegion = new UMat(frameGray.ToUMat(), f))
{
Rectangle[] eyesDetected = _cascadeClassifierEye.DetectMultiScale(faceRegion, 1.1, 10, new Size(20, 20));
foreach (Rectangle e in eyesDetected)
{
Rectangle eyeRect = e;
eyeRect.Offset(f.X, f.Y);
//eyes.Add(eyeRect);
//frameBgr.Draw(eyeRect, new Bgr(Color.Red), 2); //the eyes face(s) is highlighted here using a box that is drawn around it/them
//CvInvoke.Rectangle(frameBgr, eyeRect, new Bgr(Color.Blue).MCvScalar, 2);
_centerEye.X = (int)(eyeRect.X + eyeRect.Width * 0.5);
_centerEye.Y = (int)(eyeRect.Y + eyeRect.Height * 0.5);
CvInvoke.Circle(frameBgr, _centerEye, 1, new Bgr(Color.Blue).MCvScalar, 5, Emgu.CV.CvEnum.LineType.AntiAlias, 0);
LineSegment2D _lindeEye = new LineSegment2D(_centerEye, _centerEyePrev);
if ((_firstLine) && (_lindeEye.P1 != _lindeEye.P2)) CvInvoke.Line(frameBgr, _centerEye, _centerEyePrev, new Bgr(Color.Blue).MCvScalar, 1, Emgu.CV.CvEnum.LineType.AntiAlias, 0);
_centerEyePrev = _centerEye;
_firstLine = true;
if ((_lindeEye.P1 != _lindeEye.P2) && (_lindeEye.P1.X != 0) && (_lindeEye.P2.X != 0) && (_lindeEye.P1.Y != 0) && (_lindeEye.P2.Y != 0))
{
//double angle = (Math.Cos((_lindeRef.P2.X - _lindeRef.P1.X) / ((_lindeEye.P2.X - _lindeEye.P1.X)))*180) / Math.PI;
double angle = (Math.Atan2(Math.Abs(_lindeEye.P1.Y - _lindeEye.P2.Y), (_lindeEye.P1.X - _lindeEye.P2.X)) * 180 / Math.PI);
//Console.WriteLine("Angle : " + angle);
if (angle != 1)
{
//angle -= 57.0;
//Console.WriteLine("Angle : " + angle);
//if ((Math.Abs(angle) > 15) && (Math.Abs(angle) < 50))
if (angle < 90)
{
textBoxUpdate(textBoxAngle, Math.Round(angle).ToString() + "° ");
//frameBgr = frameBgr.Rotate(angle, new Bgr(Color.Gray), false);
}
else if (angle > 90)
{
textBoxUpdate(textBoxAngle, (180 - Math.Round(angle)).ToString() + "° ");
//frameBgr = frameBgr.Rotate((180-angle), new Bgr(Color.Gray), false);
}
}
}
/*using (Image<Bgr, Byte> drawing = new Image<Bgr, Byte>(imageBoxDraw.Width, imageBoxDraw.Height))
//.........这里部分代码省略.........
示例11: ImageGrabbedHandler
public override void ImageGrabbedHandler(object sender, EventArgs e)
{
if (_calibrationInProgress)
{
return;
}
using (var matCaptured = new Mat())
{
CameraCapture.Retrieve(matCaptured);
var statusAccumulation = new StringBuilder();
var bgrImage = matCaptured.ToImage<Bgr, byte>();
DrawReticle(bgrImage, _centre, Color.Red);
if (UserReticle != null)
{
DrawReticle(bgrImage, UserReticle.Value, Color.Green);
}
var input = new CameraProcessInput();
input.SetCapturedImage = true;
input.Captured = matCaptured;
CameraPanTiltProcessOutput output = null;
if (chkBoxColourTracking.Checked)
{
var result = _colourTrackingController.Process(input);
output = result;
if (result.IsDetected)
{
DrawReticle(bgrImage, result.Target, Color.Yellow);
}
imageBoxFiltered.Image = result.ThresholdImage;
statusAccumulation.AppendFormat("{0} moment area", result.MomentArea);
// WriteText(bgrImage, _captureConfig.Resolution.Height - 10, "Colour Tracking");
}
if (chkBoxFaceTracker.Checked)
{
// WriteText(bgrImage, _captureConfig.Resolution.Height - 50, "Face Tracking");
var result = _faceTrackingController.Process(input);
output = result;
if (result.IsDetected)
{
foreach (var face in result.Faces)
{
bgrImage.Draw(face.Region, new Bgr(Color.Yellow), 2);
}
DrawReticle(bgrImage, result.Target, Color.Yellow);
}
statusAccumulation.AppendFormat("{0} faces detected", result.Faces.Count);
}
if (chkBoxMotionTracking.Checked)
{
// WriteText(bgrImage, _captureConfig.Resolution.Height - 75, "Motion Tracking");
var result = _motionTrackingController.Process(input);
output = result;
if (result.IsDetected)
{
foreach (var motionSection in result.MotionSections)
{
bgrImage.Draw(motionSection.Region, new Bgr(Color.Green));
}
if (result.TargetedMotion != null)
{
bgrImage.Draw(result.TargetedMotion.Region, new Bgr(Color.Red), 2);
}
}
statusAccumulation.AppendFormat("{0} motions", result.MotionSections.Count);
imageBoxFiltered.Image = result.ForegroundImage;
}
if (chkMultimode.Checked)
{
var multimodeOutput = _multimodePanTiltController.Process(input);
output = multimodeOutput;
if (output.Target != Point.Empty)
{
DrawReticle(bgrImage, output.Target, Color.Yellow);
}
}
if (output != null)
{
if (output.IsServoInMotion)
{
statusAccumulation.AppendFormat(", Waiting for servo");
}
else
{
statusAccumulation.AppendFormat(", tracking took {0}", output.Elapsed.ToHumanReadable());
}
//.........这里部分代码省略.........
示例12: ShowFromCam
void ShowFromCam(object sender, EventArgs e)
{
Mat frameMat = new Mat();
try
{
if (!cap.Retrieve(frameMat, 0))
return;
//image = null;
//while (image == null) image = cap;
Image<Bgr, byte> returnimage = frameMat.ToImage<Bgr, byte>();
pictureBox1.Image = returnimage.ToBitmap();
//Image<Bgr, byte> frame = frameMat.ToImage<Bgr, byte>();
}
catch
{
}
// Get image.
}
示例13: ProcessFrame
private void ProcessFrame(object sender, EventArgs arg)
{
Mat frame = new Mat();
frameNum = _capture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.PosFrames);
Image<Bgr, Byte> frame1;
int breastCount = 0;
int pussyCount = 0;
int dickCount = 0;
string temp = "";
_capture.Retrieve(frame, 0);
frame1 = frame.ToImage<Bgr, Byte>();
frame1 = frame1.Resize(_rescale, Emgu.CV.CvEnum.Inter.Cubic);
frame = frame1.Mat;
//MessageBox.Show(_nn.ToString());
if (frame != null && frameCtr == _frameskip)
{
frameCtr = 0;
using(UMat ugray = new UMat())
{
CvInvoke.CvtColor(frame, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);
CvInvoke.EqualizeHist(ugray, ugray);
Rectangle[] breastDetected = cascadeBreast.DetectMultiScale(
ugray,
1.1,
_nn,
new Size(20, 20));
Rectangle[] pussyDetected = cascadePuss.DetectMultiScale(
ugray,
1.1,
_nn,
new Size(20, 20));
Rectangle[] dickDetected = cascadePen.DetectMultiScale(
ugray,
1.1,
50,
new Size(20, 20));
foreach (Rectangle b in breastDetected)
{
CvInvoke.Rectangle(frame, b, new Bgr(Color.Red).MCvScalar, 2);
}
foreach (Rectangle b in pussyDetected)
{
CvInvoke.Rectangle(frame, b, new Bgr(Color.Blue).MCvScalar, 2);
}
foreach (Rectangle b in dickDetected)
{
CvInvoke.Rectangle(frame, b, new Bgr(Color.Green).MCvScalar, 2);
}
breastCount = breastDetected.Length;
pussyCount = pussyDetected.Length;
dickCount = dickDetected.Length;
totalBreastCount += breastCount;
totalPussyCount += pussyCount;
totalDickCount += dickCount;
if ((breastCount > 0 || pussyCount > 0 || dickCount > 0) && _pauseAtDetection)
{
_capture.Pause();
playToggle.Invoke(new MethodInvoker(delegate { playToggle.Text = "Start"; }));
_captureInProgress = false;
if (breastCount > 0)
{
temp += ""+ breastCount + "breast(s) found\n";
}
if (pussyCount > 0)
{
temp += ""+ pussyCount+"pussy(s) found\n";
}
if (dickCount > 0)
{
temp += "" + dickCount + "dick(s) found\n";
}
MessageBox.Show(temp);
}
}
}
if (_frameskip > 0)
{
frameCtr++;
}
label4.Invoke(new MethodInvoker(delegate { label4.Text = frameNum.ToString(); logger(frameNum, breastCount, pussyCount,dickCount); totalBreast.Text = totalBreastCount.ToString(); totalF.Text = totalPussyCount.ToString(); totalG.Text = totalDickCount.ToString(); }));
imgBox.Image = frame;
}
示例14: ProcessFrame
public void ProcessFrame(object sender, EventArgs arg)
{
_capture.FlipHorizontal = true; // Переворачиваем изображение относительно оси У
Mat imageMatrix = new Mat(); //Матрица, которую мы забираем из потока камеры
_capture.Retrieve(imageMatrix, 0);
Image<Bgr, byte> imageFrameBGR = imageMatrix.ToImage<Bgr, byte>();
Image<Gray, byte> imageFrameGray = RGBFilter(imageFrameBGR,
red_color_min, red_color_max,//Фильтрация на пороговые значения цвета
green_color_min, green_color_max,
blue_color_min, blue_color_max);
imageFrameGray = MassCenter(imageFrameGray);
Display(imageMatrix, imageFrameGray); //<--------------------- отображение
if (_isDetected) // Ищем движения
{
if (_count_frames == 0) //быдлокод потому что покадрово, нужно по времени
{
_firstSpeedCorrection = true;
}
if(_firstSpeedCorrection)
{
if(_count_frames!=0)
{
if (Math.Abs(x_coord[0] - center_x) >= 30 || Math.Abs(y_coord[0] - center_y) >= 30)
{
x_coord.Clear();
y_coord.Clear();
}
else
{
_firstSpeedCorrection = false;
x_begining = center_x;
y_begining = center_y;
}
}
}
x_coord.Add(center_x);
y_coord.Add(center_y);
x_ending = center_x;
y_ending = center_y;
_count_frames++;
}
else
{
if (my_timer.ElapsedMilliseconds > 2000) // 2 секунды на корректировку изображения
{
my_timer.Reset();
if (_count_frames >= 5)
{
Line_func line = new Line_func(x_coord, y_coord);
labelFunc.Text = line.coord_a_.ToString() + "X + " + line.coord_b_.ToString();
_correction = false;
if (Math.Abs(x_ending - x_begining) > 200 && Math.Abs(line.coord_a_) < 1)
{
if (x_begining < 215)
{
labelType.Text = " Горизонтальная линия из левого края";
windowsChoose.nextSong();// след песня
}
else if (x_begining > 430)
{
labelType.Text = " Горизонтальная линия из правого края";
windowsChoose.prevSong();
}
else
{
if (x_ending - x_begining > 0)
{
labelType.Text = " Горизонтальная линия из середины вправо";
windowsChoose.playSong();
}
else if (x_ending - x_begining < 0)
{
labelType.Text = " Горизонтальная линия из середины влево";
windowsChoose.stopSong();
}
}
}
else if (Math.Abs(y_ending - y_begining) > 250 && Math.Abs(line.coord_a_) > 1)
{
if (y_ending - y_begining > 0)
{
labelType.Text = " Вертикальная линия сверху вниз";
windowsChoose.voulumeDown();
}
else if (y_ending - y_begining < 0)
{
labelType.Text = " Вертикальная линия снизу вверх";
windowsChoose.voulumeUp();
}
}
else
labelType.Text = "";
}
else
{
//.........这里部分代码省略.........
示例15: TestEmguCVLoad
/// <summary>
/// Throws an exception if it isn't going to load
/// </summary>
private static void TestEmguCVLoad()
{
if (IntPtr.Size != 8)
{
throw new Exception("Change VS options to ensure 64bit IIS Express");
}
using (var test = new Mat())
{
var f = test.ToImage<Bgr, byte>();
f.Dispose();
}
}