本文整理汇总了C#中Emgu.CV.Capture.QueryGrayFrame方法的典型用法代码示例。如果您正苦于以下问题:C# Capture.QueryGrayFrame方法的具体用法?C# Capture.QueryGrayFrame怎么用?C# Capture.QueryGrayFrame使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Emgu.CV.Capture
的用法示例。
在下文中一共展示了Capture.QueryGrayFrame方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: StartClient
private static void StartClient()
{
th_cli = new Thread (delegate() {
try
{
ConsoleAdditives.WriteHeader("Stream started");
Capture cap = new Capture();
while(_isRunning) {
byte[] buf = cap.QueryGrayFrame().Bytes;
int buflp = buf.Length/5;
for(byte i=0;i<5;i++)
{
byte[] tbuf = new byte[buflp];
tbuf[0]=i;
for(int j=1;j<buflp;j++)
{
tbuf[j]=buf[i*buflp+j];
}
client.Send(tbuf,buflp,remoteEP);
}
}
ConsoleAdditives.WriteHeader("Stream stoped");
}
catch(Exception ex)
{
Console.WriteLine(ex.ToString());
}
});
th_cli.Start ();
}
示例2: GetKeyFrames
/// <summary>
/// Извлечение ключевых кадров из списка кадров
/// </summary>
/// <param name="videoFileName">Имя видео файла</param>
/// <param name="frameWidth">Ширина кадра</param>
/// <param name="frameHeight">Высота кадра</param>
/// <param name="keyFramesInformation">Список нужных кадров</param>
/// <returns>Ключевые кадры</returns>
private List<GreyVideoFrame> GetKeyFrames(string videoFileName, int frameWidth, int frameHeight, List<KeyFrameIOInformation> keyFramesInformation)
{
try
{
List<GreyVideoFrame> keyFrames = new List<GreyVideoFrame>();
ImageConvertor imageConvertor = new ImageConvertor();
string videoPath = System.IO.Path.GetDirectoryName(videoFileName);
string framesDirName = Path.Combine(videoPath, "VideoFrames");
if (!Directory.Exists(framesDirName))
Directory.CreateDirectory(framesDirName);
Capture capture = new Capture(videoFileName);
Image<Gray, Byte> frame = capture.QueryGrayFrame();
int frameNumber = 0;
CheckKeyFrameAndAddIfInList(keyFrames, keyFramesInformation, frame, framesDirName, frameNumber, frameWidth, frameHeight);
if (frame != null)
{
keyFrameExtractedEvent(frameNumber, frameNumber + 1, false);
do
{
frame = capture.QueryGrayFrame();
++frameNumber;
CheckKeyFrameAndAddIfInList(keyFrames, keyFramesInformation, frame, framesDirName, frameNumber, frameWidth, frameHeight);
if (frame != null)
keyFrameExtractedEvent(frameNumber, frameNumber + 1, false);
else
keyFrameExtractedEvent(frameNumber, frameNumber + 1, true);
}
while (frame != null);
}
else
keyFrameExtractedEvent(frameNumber, frameNumber + 1, true);
return keyFrames;
}
catch (Exception exception)
{
throw exception;
}
}
示例3: GetFramesDifferences
/// <summary>
/// Вычисление разницы кадров (первый проход алгоритма)
/// </summary>
/// <param name="videoFileName">Имя видеофайла</param>
/// <param name="cannyThreshold">Порог для Кенни</param>
/// <param name="cannyThresholdLinking">Порог слияния границ для Кении</param>
/// <returns></returns>
private List<int> GetFramesDifferences(string videoFileName, int frameWidth, int frameHeight, Gray cannyThreshold, Gray cannyThresholdLinking)
{
try
{
List<int> framesDifferences = new List<int>();
Capture capture = new Capture(videoFileName);
Image<Gray, Byte> currentFrame = capture.QueryGrayFrame().Resize(frameWidth, frameHeight, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR);
Image<Gray, Byte> nextFrame = null;
int frameNumber = 0;
do
{
nextFrame = capture.QueryGrayFrame();//.Resize(frameWidth, frameHeight, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR);
++frameNumber;
if (nextFrame != null)
{
nextFrame = nextFrame.Resize(frameWidth, frameHeight, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR);
Image<Gray, Byte> currentCannyFrame = currentFrame.Canny(cannyThreshold, cannyThresholdLinking);
Image<Gray, Byte> nextCannyFrame = nextFrame.Canny(cannyThreshold, cannyThresholdLinking);
int framesDifference = CountFramesDifference(currentCannyFrame, nextCannyFrame);
framesDifferences.Add(framesDifference);
currentFrame = nextFrame;
framesDifferenceEvent(frameNumber - 1, frameNumber, false);
}
else
framesDifferenceEvent(frameNumber - 1, frameNumber, true);
}
while (nextFrame != null);
return framesDifferences;
}
catch (Exception exception)
{
throw exception;
}
}
示例4: LoadHandTrainingPatternsFromDir
private void LoadHandTrainingPatternsFromDir(string path)
{
try
{
byte[] TrainPatterns;
MNistHeight = 32;
MNistWidth = 32;
MNistSize = MNistWidth * MNistHeight;
int TrainingLabelCount = 10;
int LabelImageCount = 20;
TrainingPatternsCount = TrainingLabelCount*LabelImageCount;
TrainPatterns = new byte[TrainingPatternsCount * MNistSize];
unsafe
{
for (int ii = 0; ii < TrainingLabelCount; ii++)
{
string type = ii.ToString("D1");
//Image<Bgr, Byte> image = new Image<Bgr, byte>(path + "\\" + type + ".jpg").Resize(32, 32, Emgu.CV.CvEnum.INTER.CV_INTER_AREA); //Read the files as an 8-bit Bgr image
//Image<Gray, Byte> gray = image.Convert<Gray, Byte>(); //Convert it to Grayscale
Capture cap = new Capture(path + "\\" + type + ".MOV");
for (int i = 0; i < LabelImageCount; i++)
{
Image<Gray, Byte> gray = cap.QueryGrayFrame().Resize(32, 32, Emgu.CV.CvEnum.INTER.CV_INTER_AREA);
for (int j = 0; j < MNistSize; j++)
{
TrainPatterns[ii * MNistSize * LabelImageCount + i * MNistSize + j] = ((byte*)gray.MIplImage.imageData + j)[0];
}
}
cap.Dispose();
}
}
MNISTTraining = new ByteImageData[TrainingPatternsCount];
Parallel.For(0, TrainingPatternsCount, parallelOption, j =>
{
int label = j / LabelImageCount;
ByteImageData imageData = new ByteImageData(label, new byte[MNistSize]);
for (int i = 0; i < MNistSize; i++)
{
imageData.Image[i] = TrainPatterns[(j * MNistSize) + i];
}
MNISTTraining[j] = imageData;
});
}
catch (Exception)
{
throw;
}
}
示例5: LoadHandTestingPatternsFromDir
private void LoadHandTestingPatternsFromDir(string path)
{
try
{
byte[] TestPatterns;
MNistHeight = 32;
MNistWidth = 32;
MNistSize = MNistWidth * MNistHeight;
int TrainingLabelCount = 9;
int LabelImageCount = 100;
TestingPatternsCount = TrainingLabelCount * LabelImageCount;
TestPatterns = new byte[TestingPatternsCount * MNistSize];
//Capture cap = new Capture(@"D:\ebooks\hand gestrue recognition\hand data set\mov\0.MOV");
unsafe
{
for (int ii = 0; ii < TrainingLabelCount; ii++)
{
string type = ii.ToString("D1");
//Image<Bgr, Byte> image = new Image<Bgr, byte>(path + "\\" + type + ".jpg").Resize(32, 32, Emgu.CV.CvEnum.INTER.CV_INTER_AREA); //Read the files as an 8-bit Bgr image
//Image<Gray, Byte> gray = image.Convert<Gray, Byte>(); //Convert it to Grayscale
Capture cap = new Capture(path + "\\" + type + ".MOV");
for(int i =0; i<200;i++)
{
cap.QueryGrayFrame();//skip first 200 frames
}
for (int i = 0; i < LabelImageCount; i++)
{
Image<Gray, Byte> gray = cap.QueryGrayFrame().Resize(32, 32, Emgu.CV.CvEnum.INTER.CV_INTER_AREA);
for (int j = 0; j < MNistSize; j++)
{
TestPatterns[ii * MNistSize * LabelImageCount + i * MNistSize + j] = ((byte*)gray.MIplImage.imageData + j)[0];
}
}
cap.Dispose();
}
}
MNISTTesting = new ByteImageData[TestingPatternsCount];
Parallel.For(0, TestingPatternsCount, parallelOption, j =>
{
ByteImageData pattern = new ByteImageData(j / LabelImageCount, new byte[MNistSize]);
for (int i = 0; i < MNistSize; i++)
{
pattern.Image[i] = TestPatterns[(j * MNistSize) + i];
}
MNISTTesting[j] = pattern;
});
}
catch (Exception)
{
throw;
}
}
示例6: LoadFrameAsync
/// <summary>
/// Загрузка кадра по номеру (с видео)
/// </summary>
/// <param name="videoFileName">Имя видеофайла</param>
/// <param name="keyFrameIOInformation">Информация о кадре</param>
/// <returns>Кард</returns>
public Task<GreyVideoFrame> LoadFrameAsync(string videoFileName, KeyFrameIOInformation keyFrameIOInformation)
{
try
{
if (videoFileName == null || videoFileName.Length == 0)
throw new ArgumentNullException("Null videoFileName in LoadFrameAsync");
if (keyFrameIOInformation == null)
throw new ArgumentNullException("Null keyFrameIOInformation in LoadFrameAsync");
if (keyFrameIOInformation.Number < 0)
throw new ArgumentException("Error frameNumber in LoadFrameAsync");
if (keyFrameIOInformation.Width <= 0)
throw new ArgumentException("Error Width in LoadFrameAsync");
if (keyFrameIOInformation.Height <= 0)
throw new ArgumentException("Error Height in LoadFrameAsync");
return Task.Run(() =>
{
/* string videoPath = System.IO.Path.GetDirectoryName(videoFileName);
string framesDirName = System.IO.Path.Combine(videoPath, "VideoFrames");
if (!Directory.Exists(framesDirName))
Directory.CreateDirectory(framesDirName);*/
GreyVideoFrame videoFrame = null;
int currentFrameNumnber = -1;
Capture capture = new Capture(videoFileName);
Image<Gray, byte> frame = null;
while (currentFrameNumnber != keyFrameIOInformation.Number)
{
frame = capture.QueryGrayFrame();
currentFrameNumnber++;
}
if (frame != null)
{
// string frameFileName = Path.Combine(framesDirName, keyFrameIOInformation.Number.ToString() + ".jpg");
frame = frame.Resize(keyFrameIOInformation.Width, keyFrameIOInformation.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR);
// frame.Save(frameFileName);
videoFrame = CreateVideoFrame(frame, keyFrameIOInformation);
}
capture.Dispose();
return videoFrame;
});
}
catch (Exception exception)
{
throw exception;
}
}