本文整理汇总了C#中Emgu.CV.Capture.Retrieve方法的典型用法代码示例。如果您正苦于以下问题:C# Capture.Retrieve方法的具体用法?C# Capture.Retrieve怎么用?C# Capture.Retrieve使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Emgu.CV.Capture
的用法示例。
在下文中一共展示了Capture.Retrieve方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: TestCodeBookBGModel
/*
public void TestCodeBookBGModel()
{
using (Capture capture = new Capture())
using (BGCodeBookModel<Bgr> model = new BGCodeBookModel<Bgr>())
{
ImageViewer viewer = new ImageViewer();
Image<Gray, byte> fgMask = capture.QueryFrame().Convert<Gray, Byte>();
Application.Idle += delegate(Object sender, EventArgs args)
{
Mat frame = capture.QueryFrame();
model.Apply(frame);
viewer.Image = model.ForegroundMask;
};
viewer.ShowDialog();
}
}
public void TestBlobTracking()
{
MCvFGDStatModelParams fgparam = new MCvFGDStatModelParams();
fgparam.alpha1 = 0.1f;
fgparam.alpha2 = 0.005f;
fgparam.alpha3 = 0.1f;
fgparam.delta = 2;
fgparam.is_obj_without_holes = 1;
fgparam.Lc = 32;
fgparam.Lcc = 16;
fgparam.minArea = 15;
fgparam.N1c = 15;
fgparam.N1cc = 25;
fgparam.N2c = 25;
fgparam.N2cc = 35;
fgparam.perform_morphing = 0;
fgparam.T = 0.9f;
BlobTrackerAutoParam<Bgr> param = new BlobTrackerAutoParam<Bgr>();
param.BlobDetector = new BlobDetector(Emgu.CV.CvEnum.BlobDetectorType.CC);
param.FGDetector = new FGDetector<Bgr>(Emgu.CV.CvEnum.ForgroundDetectorType.Fgd, fgparam);
param.BlobTracker = new BlobTracker(Emgu.CV.CvEnum.BLOBTRACKER_TYPE.MSFG);
param.FGTrainFrames = 10;
BlobTrackerAuto<Bgr> tracker = new BlobTrackerAuto<Bgr>(param);
//MCvFont font = new MCvFont(Emgu.CV.CvEnum.FontFace.HersheySimplex, 1.0, 1.0);
using(ImageViewer viewer = new ImageViewer())
using (Capture capture = new Capture())
{
capture.ImageGrabbed += delegate(object sender, EventArgs e)
{
tracker.Process(capture.RetrieveBgrFrame());
//Image<Bgr, Byte> img = capture.RetrieveBgrFrame();
Image<Bgr, Byte> img = tracker.ForegroundMask.Convert<Bgr, Byte>();
foreach (MCvBlob blob in tracker)
{
img.Draw((Rectangle)blob, new Bgr(255.0, 255.0, 255.0), 2);
img.Draw(blob.ID.ToString(), Point.Round(blob.Center), CvEnum.FontFace.HersheySimplex, 1.0, new Bgr(255.0, 255.0, 255.0));
}
viewer.Image = img;
};
capture.Start();
viewer.ShowDialog();
}
}*/
public void TestCvBlob()
{
//MCvFont font = new MCvFont(Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, 0.5);
using (CvTracks tracks = new CvTracks())
using (ImageViewer viewer = new ImageViewer())
using (Capture capture = new Capture())
using (Mat fgMask = new Mat())
{
//BGStatModel<Bgr> bgModel = new BGStatModel<Bgr>(capture.QueryFrame(), Emgu.CV.CvEnum.BG_STAT_TYPE.GAUSSIAN_BG_MODEL);
BackgroundSubtractorMOG2 bgModel = new BackgroundSubtractorMOG2(0, 0, true);
//BackgroundSubstractorMOG bgModel = new BackgroundSubstractorMOG(0, 0, 0, 0);
capture.ImageGrabbed += delegate(object sender, EventArgs e)
{
Mat frame = new Mat();
capture.Retrieve(frame);
bgModel.Apply(frame, fgMask);
using (CvBlobDetector detector = new CvBlobDetector())
using (CvBlobs blobs = new CvBlobs())
{
detector.Detect(fgMask.ToImage<Gray, Byte>(), blobs);
blobs.FilterByArea(100, int.MaxValue);
tracks.Update(blobs, 20.0, 10, 0);
Image<Bgr, Byte> result = new Image<Bgr, byte>(frame.Size);
using (Image<Gray, Byte> blobMask = detector.DrawBlobsMask(blobs))
{
frame.CopyTo(result, blobMask);
}
//.........这里部分代码省略.........
示例2: TestFileCapturePause
public void TestFileCapturePause()
{
int totalFrames1 = 0;
Capture capture1 = new Capture(EmguAssert.GetFile("tree.avi"));
//capture one will continute capturing all the frames.
EventHandler captureHandle1 = delegate
{
Mat img = new Mat();
capture1.Retrieve(img);
totalFrames1++;
Trace.WriteLine(String.Format("capture 1 frame {0}: {1}", totalFrames1, DateTime.Now.ToString()));
};
capture1.ImageGrabbed += captureHandle1;
capture1.Start();
System.Threading.Thread.Sleep(2);
int totalFrames2 = 0;
Capture capture2 = new Capture(EmguAssert.GetFile("tree.avi"));
int counter = 0;
//capture 2 will capture 2 frames, pause for 1 seconds, then continute;
EventHandler captureHandle = delegate
{
counter++;
totalFrames2++;
bool needPause = (counter >= 2);
if (needPause)
{
capture2.Pause();
counter = 0;
}
Mat img = new Mat();
capture2.Retrieve(img);
Trace.WriteLine(String.Format("capture 2 frame {0}: {1}", totalFrames2, DateTime.Now.ToString()));
if (needPause)
{
System.Threading.ThreadPool.QueueUserWorkItem(delegate
{
Trace.WriteLine("Sleep for 1 sec");
System.Threading.Thread.Sleep(1000);
capture2.Start();
});
}
};
capture2.ImageGrabbed += captureHandle;
capture2.Start();
//int totalFrames = 69;
Stopwatch s = Stopwatch.StartNew();
while (! (totalFrames1 == totalFrames2))
{
System.Threading.Thread.Sleep(1000);
if (s.ElapsedMilliseconds > 120 * 1000)
{
EmguAssert.IsTrue(false, "Unable to finished reading frames in 2 mins");
break;
}
}
capture1.Dispose();
capture2.Dispose();
}
示例3: TestGpuVibe
/*
public void TestGpuVibe()
{
int warmUpFrames = 20;
GpuVibe<Gray> vibe = null;
Image<Gray, Byte> mask = null;
using (ImageViewer viewer = new ImageViewer()) //create an image viewer
using (Capture capture = new Capture()) //create a camera captue
{
capture.ImageGrabbed += delegate(object sender, EventArgs e)
{
//run this until application closed (close button click on image viewer)
using(Image<Bgr, byte> frame = capture.RetrieveBgrFrame(0))
using (CudaImage<Bgr, byte> gpuFrame = new CudaImage<Bgr, byte>(frame))
using (CudaImage<Gray, Byte> gpuGray = gpuFrame.Convert<Gray, Byte>())
{
if (warmUpFrames > 0)
{
warmUpFrames--;
return;
}
if (vibe == null)
{
vibe = new GpuVibe<Gray>(1234567, gpuGray, null);
return;
}
else
{
vibe.Apply(gpuGray, null);
if (mask == null)
mask = new Image<Gray, byte>(vibe.ForgroundMask.Size);
vibe.ForgroundMask.Download(mask);
viewer.Image = frame.ConcateHorizontal(mask.Convert<Bgr, Byte>()); //draw the image obtained from camera
}
}
};
capture.Start();
viewer.ShowDialog(); //show the image viewer
}
}
public void TestGpuBackgroundModel()
{
int warmUpFrames = 20;
int totalFrames = 0;
//CudaBackgroundSubtractorMOG2<Bgr> bgModel = null;
//CudaBackgroundSubtractorMOG<Bgr> bgModel = null;
CudaBackgroundSubtractorGMG<Bgr> bgModel = null;
//CudaBackgroundSubtractorFGD<Bgr> bgModel = null;
Image<Gray, Byte> mask = null;
using (ImageViewer viewer = new ImageViewer()) //create an image viewer
using (Capture capture = new Capture()) //create a camera captue
{
capture.ImageGrabbed += delegate(object sender, EventArgs e)
{
//run this until application closed (close button click on image viewer)
totalFrames++;
if (viewer != null && !viewer.IsDisposed)
{
if (viewer.InvokeRequired)
{
viewer.Invoke((Action)delegate { viewer.Text = String.Format("Processing {0}th frame.", totalFrames); });
}
else
{
viewer.Text = String.Format("Processing {0}th frame.", totalFrames);
}
}
using (Image<Bgr, byte> frame = capture.RetrieveBgrFrame(0))
using (CudaImage<Bgr, byte> gpuFrame = new CudaImage<Bgr, byte>(frame))
{
if (warmUpFrames > 0)
{
warmUpFrames--;
return;
}
if (bgModel == null)
{
//bgModel = new CudaBackgroundSubtractorMOG2<Bgr>(500, 16, true);
//bgModel = new CudaBackgroundSubtractorMOG<Bgr>(200, 5, 0.7, 0);
bgModel = new CudaBackgroundSubtractorGMG<Bgr>(120, 0.8);
bgModel.Apply(gpuFrame, -1.0f, null);
//bgModel = new CudaBackgroundSubtractorFGD<Bgr>(128, 15, 25, 64, 25, 40, true, 1, 0.1f, 0.005f, 0.1f, 2.0f, 0.9f, 15.0f);
//bgModel.Apply(gpuFrame, -1.0f);
return;
}
else
{
bgModel.Apply(gpuFrame, -1.0f, null);
//.........这里部分代码省略.........