本文整理汇总了C#中Emgu.CV.Capture.QueryFrame方法的典型用法代码示例。如果您正苦于以下问题:C# Capture.QueryFrame方法的具体用法?C# Capture.QueryFrame怎么用?C# Capture.QueryFrame使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Emgu.CV.Capture
的用法示例。
在下文中一共展示了Capture.QueryFrame方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: write
public void write() {
int codec = Emgu.CV.CvInvoke.CV_FOURCC('P', 'I', 'M', '1');
int fps = 25;
if (list_timestamps.Count > 0)
{
String tempvideopath = Program.getConfiguration().Mediafolderpath + @"\" + list_timestamps[0].ToString() + ".mpg";
Capture tempcapture = new Capture(tempvideopath);
fps = (int)tempcapture.GetCaptureProperty(CAP_PROP.CV_CAP_PROP_FPS);
tempcapture.Dispose();
}
VideoWriter videowriter = new VideoWriter(videopath, codec, fps, 640, 480, true);
for (int i = 0; i < list_timestamps.Count; i++)
{
videopath = Program.getConfiguration().Mediafolderpath + @"\" + list_timestamps[i].ToString() + ".mpg";
try
{
Capture joincapture = new Capture(videopath);
Image<Bgr, byte> frame = joincapture.QueryFrame();
for (int n = 1; n < 15; n++)
joincapture.QueryFrame();
while (frame != null)
{
videowriter.WriteFrame(frame);
frame = joincapture.QueryFrame();
}
joincapture.Dispose();
// Notify main frame to update its progressbar
ExportVideoProgressEventArgs e = new ExportVideoProgressEventArgs(i);
DoneAppendingRallyVideoEvent(this, e);
}
catch (NullReferenceException) { Console.WriteLine("unreadable video file"); }
}
videowriter.Dispose();
}
示例2: RecognitionOnPrem
public RecognitionOnPrem()
{
InitializeComponent();
Loaded += (s, e) =>
{
if (grabber == null)
{
_faceClassifier = new CascadeClassifier("haarcascade_frontalface_default.xml");
//count number of trained faces
ContTrain = CommonData.TrainingImages.Count;
grabber = new Capture();
grabber.QueryFrame();
}
else
{
grabber.Start();
}
};
Unloaded += (s, e) => {
grabber.Stop();
};
CompositionTarget.Rendering += CompositionTarget_Rendering;
}
示例3: AIRecognition
public AIRecognition()
{
InitializeComponent();
_faceClassifier = new CascadeClassifier("haarcascade_frontalface_default.xml");
Loaded += (s, e) =>
{
_vmodel.Pictures.Clear();
_vmodel.PersonRecognized = 0;
this.DataContext = _vmodel;
if (grabber == null)
{
CommonData.LoadSavedData();
//check how many faces we already have
_countFaces = CommonData.PicturesVM.Pictures.Count;
grabber = new Capture();
grabber.QueryFrame();
grabber.Start();
}
else
{
grabber.Start();
}
};
Unloaded += (s, e) =>
{
grabber.Stop();
};
CompositionTarget.Rendering += CompositionTarget_Rendering;
}
示例4: Admin
public Admin()
{
InitializeComponent();
face = new HaarCascade("haarcascade_frontalface_default.xml");
Loaded += (s, e) =>
{
this.DataContext = CommonData.PicturesVM;
if (grabber == null)
{
CommonData.LoadSavedData();
//check how many faces we already have
_countFaces = CommonData.PicturesVM.Pictures.Count;
grabber = new Capture();
grabber.QueryFrame();
grabber.Start();
}
else
{
grabber.Start();
}
};
Unloaded += (s, e) =>
{
grabber.Stop();
};
CompositionTarget.Rendering += CompositionTarget_Rendering;
}
示例5: ColorSampleForm
public ColorSampleForm(Capture c)
{
InitializeComponent();
sampleImg = c.QueryFrame();
sampleImg = sampleImg.Resize(_frameWidth, _frameHeight, true); //resize while maintaining proportion.
sampleImageBox.Image = sampleImg;
}
示例6: StartStreaming
public void StartStreaming()
{
grabber = new Capture();
grabber.QueryFrame();
Application.Idle += new EventHandler(FrameGrabber);
//All.Enabled = false;
}
示例7: CountFramesNumberAsync
/// <summary>
/// Подсчет количества кадров видео
/// </summary>
/// <param name="data">Информация о видео</param>
/// <returns>Количество кадров</returns>
public Task<int> CountFramesNumberAsync(object data)
{
try
{
if (data == null)
throw new ArgumentNullException("Null data in LoadFrames");
IOData ioData = (IOData)data;
string videoFileName = ioData.FileName;
if (videoFileName == null || videoFileName.Length == 0)
throw new ArgumentNullException("Null videoFileName in LoadFrames");
return Task.Run(() =>
{
List<Image<Bgr, Byte>> frames = new List<Image<Bgr, byte>>();
Capture capture = new Capture(videoFileName);
Image<Bgr, Byte> frame = null;
int frameNumber = 0;
do
{
frame = capture.QueryFrame();
if (frame != null)
++frameNumber;
}
while (frame != null);
return frameNumber;
});
}
catch (Exception exception)
{
throw exception;
}
}
示例8: Form1
public Form1()
{
InitializeComponent();
grabber = new Emgu.CV.Capture("C:/Users/L33549.CITI/Desktop/a.avi");
grabber.QueryFrame();
frameWidth = grabber.Width;
frameHeight = grabber.Height;
//detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE);
hsv_min = new Hsv(0, 45, 0);
hsv_max = new Hsv(20, 255, 255);
YCrCb_min = new Ycc(0, 129, 40);
YCrCb_max = new Ycc(255, 185, 135);
box = new MCvBox2D();
ellip = new Ellipse();
contourStorage = new MemStorage();
approxStorage = new MemStorage();
hullStorage = new MemStorage();
defectsStorage = new MemStorage();
tipPts = new Point[MAX_POINTS]; // coords of the finger tips
foldPts = new Point[MAX_POINTS]; // coords of the skin folds between fingers
depths = new float[MAX_POINTS]; // distances from tips to folds
cogPt = new Point();
fingerTips = new List<Point>();
face = new CascadeClassifier("C:/Users/L33549.CITI/Desktop/AbuseAnalysis/HandGestureRecognition/HandGestureRecognition/HandGestureRecognition/haar/Original/haarcascade_hand.xml");
Application.Idle += new EventHandler(FrameGrabber);
/*foreach (var potentialSensor in KinectSensor.KinectSensors)
{
if (potentialSensor.Status == KinectStatus.Connected)
{
this.sensor = potentialSensor;
break;
}
}
if (null != this.sensor)
{
// Turn on the color stream to receive color frames
this.sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
// Allocate space to put the pixels we'll receive
this.colorPixels = new byte[this.sensor.ColorStream.FramePixelDataLength];
// This is the bitmap we'll display on-screen
this.colorBitmap = new WriteableBitmap(this.sensor.ColorStream.FrameWidth, this.sensor.ColorStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null);
// Set the image we display to point to the bitmap where we'll put the image data
//this.Image.Source = this.colorBitmap;
// Add an event handler to be called whenever there is new color frame data
this.sensor.ColorFrameReady += this.SensorColorFrameReady;
// Start the sensor!
this.sensor.Start();
}*/
}
示例9: processarVideo
public void processarVideo(ParametrosDinamicos parametros)
{
mCapture = new Capture(mNomeDoArquivo);
inicializarVariaveis();
carregarParametrosNaTela(parametros);
while (mImagemColorida != null)
{
atualizarParametros(parametros);
mContadorDeFrames++;
processarImagem(false);
CvInvoke.WaitKey(100);
// CvInvoke.cvShowImage("Imagem", mImagemColorida);
desenharNaImagem(parametros);
exibirImagem(false);
if (mSalvarImagem)
{
/*CvInvoke.SaveImage(String.Format(@"C:\Users\Tadeu Rahian\Dropbox\Dropbox\UFMG\PFC1\Imagens\mImagemColorida{0}.jpg", mContadorDeFrames), mImagemColorida);
EnviarImagensEmail(new Attachment(String.Format(@"C:\Users\Tadeu Rahian\Dropbox\Dropbox\UFMG\PFC1\Imagens\mImagemColorida{0}.jpg", mContadorDeFrames)));
mSalvarImagem = false;*/
}
mImagemColorida = mCapture.QueryFrame();
}
mCapture.Dispose();
}
示例10: button1_Click
private void button1_Click(object sender, EventArgs e)
{
//Inicializar el dispositivo de Captura
grabber = new Capture();
grabber.QueryFrame();
//Controlar el Evento de la Camara
Application.Idle += new EventHandler(FrameGrabber);
button1.Enabled = false;
}
示例11: InitializeEmguCv
private void InitializeEmguCv()
{
capture = new Capture(CaptureType.Any);
background = capture
.QueryFrame()
.ToGrey()
.GaussianBlur(new Size(11, 11));
}
示例12: testCam
public void testCam()
{
ImageViewer viewer = new ImageViewer(); //create an image viewer
Capture capture = new Capture(); //create a camera captue
Application.Idle += new EventHandler(delegate(object sender, EventArgs e)
{ //run this until application closed (close button click on image viewer)
viewer.Image = capture.QueryFrame(); //draw the image obtained from camera
});
viewer.ShowDialog(); //show the image viewer
}
示例13: createCaptures
public void createCaptures()
{
devices = new List<Capture>();
for (int i = 0; i < 2; i++)
{
Capture c = new Capture(i);
if (c.QueryFrame() != null)
devices.Add(c);
}
}
示例14: Main
static void Main(string[] args)
{
LoCoMoCo MyBot = new LoCoMoCo("COM3"); // com port number
var MainToken = new CancellationTokenSource(); //create token for the cancel
UdpClient MainServerSocket = new UdpClient(15000); // declare a client
byte[] MainDataReceived = new byte[1024]; // prepare container for received data
string MainStringData = "";
Capture capture = new Capture(); // declare object for camera
Image<Bgr, Byte> frame; // declare image for capture
int TotalMessageCount = 0;
while (true) // this while for keeping the main server "listening"
{
try {
frame = capture.QueryFrame();
Console.WriteLine("Waiting for a UDP client..."); // display stuff
IPEndPoint MainClient = new IPEndPoint(IPAddress.Any,0); // prepare a client
MainDataReceived = MainServerSocket.Receive(ref MainClient); // receive packet
MainStringData = Encoding.ASCII.GetString(MainDataReceived, 0, MainDataReceived.Length); // get string from packet
Console.WriteLine("Response from " + MainClient.Address); // display stuff
Console.WriteLine("Message " + TotalMessageCount++ + ": " + MainStringData + "\n"); // display client's string
if (MainStringData.Equals("Picture"))
{
MainToken = new CancellationTokenSource(); // new cancellation token every iteration
Task.Run(() => SendPicture(MainServerSocket, MainClient, frame), MainToken.Token); //start method on another thread
}
if (MainStringData.Equals("StopV"))
MainToken.Cancel();
else if (MainStringData.Equals("Forward"))
MyBot.forward();
else if (MainStringData.Equals("Backward"))
MyBot.backward();
else if (MainStringData.Equals("Left"))
MyBot.turnleft();
else if (MainStringData.Equals("Right"))
MyBot.turnright();
else if (MainStringData.Equals("Stop"))
MyBot.stop();
} catch (Exception e)
{ }
}
}
示例15: TestCodeBook
public static void TestCodeBook()
{
int learningFrames = 40;
using (Capture capture = new Capture("tree.avi"))
using (BGCodeBookModel<Ycc> bgmodel = new BGCodeBookModel<Ycc>())
{
#region Set color thresholds values
MCvBGCodeBookModel param = bgmodel.MCvBGCodeBookModel;
param.modMin[0] = param.modMin[1] = param.modMin[2] = 3;
param.modMax[0] = param.modMax[1] = param.modMax[2] = 10;
param.cbBounds[0] = param.cbBounds[1] = param.cbBounds[2] = 10;
bgmodel.MCvBGCodeBookModel = param;
#endregion
ImageViewer viewer = new ImageViewer();
int count = 0;
EventHandler processFrame = delegate(Object sender, EventArgs e)
{
Image<Bgr, Byte> img = capture.QueryFrame();
if (img == null)
{
return;
}
Image<Gray, byte> mask = new Image<Gray, Byte>(img.Size);
mask.SetValue(255);
viewer.Text = String.Format("Processing {0}th image. {1}", count++, learningFrames > 0 ? "(Learning)" : String.Empty);
using (Image<Ycc, Byte> ycc = img.Convert<Ycc, Byte>()) //using YCC color space for BGCodeBook
{
bgmodel.Update(ycc, ycc.ROI, mask);
if (learningFrames == 0) //training is completed
bgmodel.ClearStale(bgmodel.MCvBGCodeBookModel.t / 2, ycc.ROI, mask);
learningFrames--;
Image<Gray, Byte> m = bgmodel.ForgroundMask.Clone();
if (count == 56)
{
m = bgmodel.ForgroundMask.Clone();
}
//m._EqualizeHist();
viewer.Image = m;
//viewer.Image = img;
System.Threading.Thread.Sleep(100);
}
img.Dispose();
};
Application.Idle += processFrame;
viewer.ShowDialog();
}
}