本文整理汇总了C#中Emgu.CV.Capture.Start方法的典型用法代码示例。如果您正苦于以下问题:C# Capture.Start方法的具体用法?C# Capture.Start怎么用?C# Capture.Start使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Emgu.CV.Capture
的用法示例。
在下文中一共展示了Capture.Start方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: TestCodeBookBGModel
/*
public void TestCodeBookBGModel()
{
using (Capture capture = new Capture())
using (BGCodeBookModel<Bgr> model = new BGCodeBookModel<Bgr>())
{
ImageViewer viewer = new ImageViewer();
Image<Gray, byte> fgMask = capture.QueryFrame().Convert<Gray, Byte>();
Application.Idle += delegate(Object sender, EventArgs args)
{
Mat frame = capture.QueryFrame();
model.Apply(frame);
viewer.Image = model.ForegroundMask;
};
viewer.ShowDialog();
}
}
public void TestBlobTracking()
{
MCvFGDStatModelParams fgparam = new MCvFGDStatModelParams();
fgparam.alpha1 = 0.1f;
fgparam.alpha2 = 0.005f;
fgparam.alpha3 = 0.1f;
fgparam.delta = 2;
fgparam.is_obj_without_holes = 1;
fgparam.Lc = 32;
fgparam.Lcc = 16;
fgparam.minArea = 15;
fgparam.N1c = 15;
fgparam.N1cc = 25;
fgparam.N2c = 25;
fgparam.N2cc = 35;
fgparam.perform_morphing = 0;
fgparam.T = 0.9f;
BlobTrackerAutoParam<Bgr> param = new BlobTrackerAutoParam<Bgr>();
param.BlobDetector = new BlobDetector(Emgu.CV.CvEnum.BlobDetectorType.CC);
param.FGDetector = new FGDetector<Bgr>(Emgu.CV.CvEnum.ForgroundDetectorType.Fgd, fgparam);
param.BlobTracker = new BlobTracker(Emgu.CV.CvEnum.BLOBTRACKER_TYPE.MSFG);
param.FGTrainFrames = 10;
BlobTrackerAuto<Bgr> tracker = new BlobTrackerAuto<Bgr>(param);
//MCvFont font = new MCvFont(Emgu.CV.CvEnum.FontFace.HersheySimplex, 1.0, 1.0);
using(ImageViewer viewer = new ImageViewer())
using (Capture capture = new Capture())
{
capture.ImageGrabbed += delegate(object sender, EventArgs e)
{
tracker.Process(capture.RetrieveBgrFrame());
//Image<Bgr, Byte> img = capture.RetrieveBgrFrame();
Image<Bgr, Byte> img = tracker.ForegroundMask.Convert<Bgr, Byte>();
foreach (MCvBlob blob in tracker)
{
img.Draw((Rectangle)blob, new Bgr(255.0, 255.0, 255.0), 2);
img.Draw(blob.ID.ToString(), Point.Round(blob.Center), CvEnum.FontFace.HersheySimplex, 1.0, new Bgr(255.0, 255.0, 255.0));
}
viewer.Image = img;
};
capture.Start();
viewer.ShowDialog();
}
}*/
public void TestCvBlob()
{
//MCvFont font = new MCvFont(Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, 0.5);
using (CvTracks tracks = new CvTracks())
using (ImageViewer viewer = new ImageViewer())
using (Capture capture = new Capture())
using (Mat fgMask = new Mat())
{
//BGStatModel<Bgr> bgModel = new BGStatModel<Bgr>(capture.QueryFrame(), Emgu.CV.CvEnum.BG_STAT_TYPE.GAUSSIAN_BG_MODEL);
BackgroundSubtractorMOG2 bgModel = new BackgroundSubtractorMOG2(0, 0, true);
//BackgroundSubstractorMOG bgModel = new BackgroundSubstractorMOG(0, 0, 0, 0);
capture.ImageGrabbed += delegate(object sender, EventArgs e)
{
Mat frame = new Mat();
capture.Retrieve(frame);
bgModel.Apply(frame, fgMask);
using (CvBlobDetector detector = new CvBlobDetector())
using (CvBlobs blobs = new CvBlobs())
{
detector.Detect(fgMask.ToImage<Gray, Byte>(), blobs);
blobs.FilterByArea(100, int.MaxValue);
tracks.Update(blobs, 20.0, 10, 0);
Image<Bgr, Byte> result = new Image<Bgr, byte>(frame.Size);
using (Image<Gray, Byte> blobMask = detector.DrawBlobsMask(blobs))
{
frame.CopyTo(result, blobMask);
}
//.........这里部分代码省略.........
示例2: AIRecognition
public AIRecognition()
{
InitializeComponent();
_faceClassifier = new CascadeClassifier("haarcascade_frontalface_default.xml");
Loaded += (s, e) =>
{
_vmodel.Pictures.Clear();
_vmodel.PersonRecognized = 0;
this.DataContext = _vmodel;
if (grabber == null)
{
CommonData.LoadSavedData();
//check how many faces we already have
_countFaces = CommonData.PicturesVM.Pictures.Count;
grabber = new Capture();
grabber.QueryFrame();
grabber.Start();
}
else
{
grabber.Start();
}
};
Unloaded += (s, e) =>
{
grabber.Stop();
};
CompositionTarget.Rendering += CompositionTarget_Rendering;
}
示例3: RileeCapture
public RileeCapture()
{
InitializeComponent();
btnDrawMasterImage.Enabled = false;
//initialization for recognition boxes
_limgMasters.Add(imbMaster1);
_limgMasters.Add(imbMaster2);
_limgMasters.Add(imbMaster3);
_limgMasters.Add(imbMaster4);
_limgMasters.Add(imbMaster5);
try
{
_capture = new Capture();
_capture.ImageGrabbed += ProcessFrame;
_capture.Start();
tslStatus.Text = "Capture started";
//flip horizontal to natural
//if ((_capture != null)&&(!_capture.FlipHorizontal)) _capture.FlipHorizontal = true;
}
catch (NullReferenceException ex)
{
tslStatus.Text = "Capture initialization failed...";
MessageBox.Show(ex.Message);
}
}
示例4: Camera
public Camera(int num)
{
lens = new Capture(num);
lens.ImageGrabbed += this.Process;
lens.Start();
}
示例5: Admin
public Admin()
{
InitializeComponent();
face = new HaarCascade("haarcascade_frontalface_default.xml");
Loaded += (s, e) =>
{
this.DataContext = CommonData.PicturesVM;
if (grabber == null)
{
CommonData.LoadSavedData();
//check how many faces we already have
_countFaces = CommonData.PicturesVM.Pictures.Count;
grabber = new Capture();
grabber.QueryFrame();
grabber.Start();
}
else
{
grabber.Start();
}
};
Unloaded += (s, e) =>
{
grabber.Stop();
};
CompositionTarget.Rendering += CompositionTarget_Rendering;
}
示例6: Form1
/// <summary>
/// loading of the form
/// </summary>
public Form1()
{
try
{
IC = new IntrinsicCameraParameters();
}
catch (Exception ex)
{
MessageBox.Show("Error: " + ex.Message);
}
InitializeComponent();
//fill line colour array
Random R = new Random();
for (int i = 0; i < line_colour_array.Length; i++)
{
line_colour_array[i] = new Bgr(R.Next(0, 255), R.Next(0, 255), R.Next(0, 255));
}
//set up cature as normal
try
{
_Capture = new Capture();
_Capture.ImageGrabbed += new Emgu.CV.Capture.GrabEventHandler(_Capture_ImageGrabbed);
_Capture.Start();
}
catch (Exception ex)
{
MessageBox.Show("Error: " + ex.Message);
}
}
示例7: Window_Loaded
private void Window_Loaded(object sender, RoutedEventArgs e)
{
Capture capture = new Capture(7);
capture.Start();
ComponentDispatcher.ThreadIdle += (o, arg) =>
{
var img = capture.QueryFrame();
Emgu.CV.Contour<Bgr> con = new Contour<Bgr>(new MemStorage());
Display.Source = BitmapSourceConvert.ToBitmapSource(img);
};
}
示例8: StartCamera
public void StartCamera(int cameraIndex)
{
if (isCapturing)
{
return;
}
CvInvoke.UseOpenCL = false;
camera = new Capture(cameraIndex);
camera.ImageGrabbed += CapOnImageGrabbed;
camera.Start();
isCapturing = true;
}
示例9: Form1
public Form1()
{
InitializeComponent();
try
{
_capture = new Capture();
_capture.ImageGrabbed += ProcessFrame;
_capture.Start();
processing = false;
}
catch (NullReferenceException excpt)
{
MessageBox.Show(excpt.Message);
}
}
示例10: StartCapture
public static void StartCapture()
{
CvInvoke.cvNamedWindow("Capture");
try {
_capture = new Capture (1);
} catch (NullReferenceException excpt) {
Console.Out.WriteLine (excpt.Message);
_capture = new Capture (0);
} finally {
watch = Stopwatch.StartNew ();
Application.Idle += ProcessFrame;
_capture.Start ();
}
}
示例11: Form1
int _width; //width of chessboard no. squares in width - 1
#endregion Fields
#region Constructors
public Form1()
{
InitializeComponent();
CvInvoke.UseOpenCL = false;
//set up cature as normal
try
{
_capture = new Capture();
_capture.ImageGrabbed += ProcessFrame;
_capture.Start();
}
catch (NullReferenceException excpt)
{
MessageBox.Show(excpt.Message);
}
}
示例12: InitVideoCapture
public void InitVideoCapture(string path)
{
try
{
m_FrameMat = new Mat();
m_VideoCaptureFilename = path;
m_VideoCaptureInterface = null;
m_VideoCaptureInterface = new Capture(m_VideoCaptureFilename);
m_VideoCaptureInterface.SetCaptureProperty(CapProp.FrameHeight, 640);
m_VideoCaptureInterface.SetCaptureProperty(CapProp.FrameWidth, 360);
m_VideoCaptureInterface.SetCaptureProperty(CapProp.Fps, 5);
m_VideoCaptureInterface.ImageGrabbed += VideoCaptureInterface_ImageGrabbed;
m_VideoCaptureFrameCount = (int)m_VideoCaptureInterface.GetCaptureProperty(CapProp.FrameCount);
m_VideoCaptureInterface.Start();
}
catch (Exception e)
{
}
}
示例13: buttonStart_Click
private void buttonStart_Click(object sender, EventArgs e)
{
try
{
capture = new Capture();
capture.FlipHorizontal = true;
timer = new DispatcherTimer();
//Event for processing each frame in 20 ms interval
timer.Tick += ProcessFrame;
timer.Interval = new TimeSpan(0, 0, 0, 0, 20);
timer.Start();
capture.Start();
}
catch (Exception ex)
{
MessageBox.Show(ex.Message);
}
buttonStart.Enabled = false;
buttonStop.Enabled = true;
}
示例14: SalmonCounter
public SalmonCounter()
{
InitializeComponent();
_capture = new Capture(videoOne);
counter = new Counter(_capture.Width);
bImage = new BlobImage();
fgDetector = new ForegroundDetector(bImage);
sTracker = new SalmonTracker(bImage, counter);
watch = new Stopwatch();
time = new TimeSpan();
FPS = (int)_capture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.Fps);
frameCount = (int)_capture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameCount);
pictureBox1.Width = _capture.Width;
pictureBox1.Height = _capture.Height;
show.Width = _capture.Width;
show.Height = _capture.Height;
//msec between frames
msec = (int)(1000 / FPS);
//set the event handler
_capture.ImageGrabbed += grabImage;
_capture.Start();
watch.Start();
_frame = new Mat();
//Start foregroundSegmenter tread and salmon tracker thread
backgroundSubtractorThread = new Thread(fgDetector.detect);
backgroundSubtractorThread.Start();
sTrackerThread = new Thread(sTracker.updateSalmons);
sTrackerThread.Start();
}
示例15: openWebCam
protected Boolean openWebCam(int NomCamera, int indexResolution)
{
LimiteTerrain.Clear();
ratioCmParPixel = new double[2] { 1, 1 };
/* Ouvre le flux vidéo et initialise le EventHandler */
// TODO : selection de la caméra
_capture = new Capture(); // Utiliser la webcam de base
// Evenement lors de la reception d'une image
_capture.ImageGrabbed += ProcessFrame;
// Passage en MPG
_capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FOURCC, CvInvoke.CV_FOURCC('M', 'J', 'P', 'G'));
// Resolution
VideoCaptureDevice tmpVideo = new VideoCaptureDevice(VideoCaptureDevices[NomCamera].MonikerString);
_capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_WIDTH, tmpVideo.VideoCapabilities[indexResolution].FrameSize.Width);
_capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT, tmpVideo.VideoCapabilities[indexResolution].FrameSize.Height);
_capture.Start();
return true;
}