本文整理汇总了C#中Emgu.CV.Capture.Dispose方法的典型用法代码示例。如果您正苦于以下问题:C# Capture.Dispose方法的具体用法?C# Capture.Dispose怎么用?C# Capture.Dispose使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Emgu.CV.Capture
的用法示例。
在下文中一共展示了Capture.Dispose方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: write
public void write() {
int codec = Emgu.CV.CvInvoke.CV_FOURCC('P', 'I', 'M', '1');
int fps = 25;
if (list_timestamps.Count > 0)
{
String tempvideopath = Program.getConfiguration().Mediafolderpath + @"\" + list_timestamps[0].ToString() + ".mpg";
Capture tempcapture = new Capture(tempvideopath);
fps = (int)tempcapture.GetCaptureProperty(CAP_PROP.CV_CAP_PROP_FPS);
tempcapture.Dispose();
}
VideoWriter videowriter = new VideoWriter(videopath, codec, fps, 640, 480, true);
for (int i = 0; i < list_timestamps.Count; i++)
{
videopath = Program.getConfiguration().Mediafolderpath + @"\" + list_timestamps[i].ToString() + ".mpg";
try
{
Capture joincapture = new Capture(videopath);
Image<Bgr, byte> frame = joincapture.QueryFrame();
for (int n = 1; n < 15; n++)
joincapture.QueryFrame();
while (frame != null)
{
videowriter.WriteFrame(frame);
frame = joincapture.QueryFrame();
}
joincapture.Dispose();
// Notify main frame to update its progressbar
ExportVideoProgressEventArgs e = new ExportVideoProgressEventArgs(i);
DoneAppendingRallyVideoEvent(this, e);
}
catch (NullReferenceException) { Console.WriteLine("unreadable video file"); }
}
videowriter.Dispose();
}
示例2: processarVideo
public void processarVideo(ParametrosDinamicos parametros)
{
mCapture = new Capture(mNomeDoArquivo);
inicializarVariaveis();
carregarParametrosNaTela(parametros);
while (mImagemColorida != null)
{
atualizarParametros(parametros);
mContadorDeFrames++;
processarImagem(false);
CvInvoke.WaitKey(100);
// CvInvoke.cvShowImage("Imagem", mImagemColorida);
desenharNaImagem(parametros);
exibirImagem(false);
if (mSalvarImagem)
{
/*CvInvoke.SaveImage(String.Format(@"C:\Users\Tadeu Rahian\Dropbox\Dropbox\UFMG\PFC1\Imagens\mImagemColorida{0}.jpg", mContadorDeFrames), mImagemColorida);
EnviarImagensEmail(new Attachment(String.Format(@"C:\Users\Tadeu Rahian\Dropbox\Dropbox\UFMG\PFC1\Imagens\mImagemColorida{0}.jpg", mContadorDeFrames)));
mSalvarImagem = false;*/
}
mImagemColorida = mCapture.QueryFrame();
}
mCapture.Dispose();
}
示例3: CatchImages
//filename: Catch image from video file
//ms: Capture every ms. ms = 0 means capture all frames. (24 frames per second)
public List<string> CatchImages(string fileName, int ms, string outputPath)
{
_log.Debug("Start to capture");
if (string.IsNullOrWhiteSpace(fileName) || string.IsNullOrWhiteSpace(outputPath))
{
_log.Error("Cannot catch images from path: " + fileName + " and output to: " + outputPath);
}
//List<Image<Bgr, Byte>> imageList = new List<Image<Bgr, Byte>>();
List<string> imagePath = new List<string>();
Capture capture = new Capture(fileName);
double frameCount = capture.GetCaptureProperty(CapProp.FrameCount);
capture.Dispose();
int index = 0;
int fc = (int)frameCount;
Mat mat = null;
try
{
//TODO: Modified this to change period of capture image.
while (index < 30/*fc*/)
{
index++;
using (capture = new Capture(fileName))
{
capture.SetCaptureProperty(CapProp.PosFrames, (double)index);
using (mat = capture.QueryFrame())
{
string indexStr = index < 10 ? "0" + index : index.ToString();
string imgPath = outputPath + "\\" + indexStr;
if (!Directory.Exists(outputPath))
{
Directory.CreateDirectory(outputPath);
}
//long quality = 60;
//saveJpeg(imgPath, mat.Bitmap, quality);
string grayImgName = saveGrayAndThreshold(imgPath, mat.Bitmap);
if (!string.IsNullOrEmpty(grayImgName))
{
imagePath.Add(grayImgName);
}
}
}
}
}
catch (System.Exception ex)
{
_log.Error("Exception:", ex);
}
return imagePath;
}
示例4: TestFileCapturePause
public void TestFileCapturePause()
{
int totalFrames1 = 0;
Capture capture1 = new Capture(EmguAssert.GetFile("tree.avi"));
//capture one will continute capturing all the frames.
EventHandler captureHandle1 = delegate
{
Mat img = new Mat();
capture1.Retrieve(img);
totalFrames1++;
Trace.WriteLine(String.Format("capture 1 frame {0}: {1}", totalFrames1, DateTime.Now.ToString()));
};
capture1.ImageGrabbed += captureHandle1;
capture1.Start();
System.Threading.Thread.Sleep(2);
int totalFrames2 = 0;
Capture capture2 = new Capture(EmguAssert.GetFile("tree.avi"));
int counter = 0;
//capture 2 will capture 2 frames, pause for 1 seconds, then continute;
EventHandler captureHandle = delegate
{
counter++;
totalFrames2++;
bool needPause = (counter >= 2);
if (needPause)
{
capture2.Pause();
counter = 0;
}
Mat img = new Mat();
capture2.Retrieve(img);
Trace.WriteLine(String.Format("capture 2 frame {0}: {1}", totalFrames2, DateTime.Now.ToString()));
if (needPause)
{
System.Threading.ThreadPool.QueueUserWorkItem(delegate
{
Trace.WriteLine("Sleep for 1 sec");
System.Threading.Thread.Sleep(1000);
capture2.Start();
});
}
};
capture2.ImageGrabbed += captureHandle;
capture2.Start();
//int totalFrames = 69;
Stopwatch s = Stopwatch.StartNew();
while (! (totalFrames1 == totalFrames2))
{
System.Threading.Thread.Sleep(1000);
if (s.ElapsedMilliseconds > 120 * 1000)
{
EmguAssert.IsTrue(false, "Unable to finished reading frames in 2 mins");
break;
}
}
capture1.Dispose();
capture2.Dispose();
}
示例5: AddImagesToHuman
public List<Image<Gray, byte>> AddImagesToHuman(string name)
{
var images = new List<Image<Gray, byte>>();
var count = 0;
var capture = new Capture();
while (count < FaceCount)
{
var image = capture.QueryFrame().ToImage<Gray, byte>();
var detectedFace = DetectFace(image);
if (detectedFace != null)
{
images.Add(detectedFace);
count++;
OnCount(count, FaceCount);
Thread.Sleep(500);
}
}
ServicesWorker.GetInstance<HumanService>().AddHuman(name, images);
capture.Dispose();
return images;
}
示例6: StartCapture
public async void StartCapture()
{
// Can only access the first camera without CL Eye SDK
if (_camera.TrackerId == 0 && !_camera.Design)
{
_capture = new Capture(_camera.TrackerId);
_ctsCameraCalibration = new CancellationTokenSource();
CancellationToken token = _ctsCameraCalibration.Token;
_capture.Start();
try
{
// needed to avoid bitmapsource access violation?
_captureTask = Task.Run(() =>
{
while (!token.IsCancellationRequested)
{
ImageGrabbed();
}
}, token);
await _captureTask;
}
catch (OperationCanceledException)
{
}
catch (Exception ex)
{
Console.WriteLine(ex.StackTrace);
}
finally
{
_capture.Stop();
_capture.Dispose();
}
}
}
示例7: button_startmove_Click
private void button_startmove_Click(object sender, EventArgs e)
{
long start_time;
// initiating a new move along with a new timestamp as identifier
if (!new_move)
{
live_video_click_count = 0;
// Enable the Spielzug/Move property buttons
button_kill.Enabled = true;
button_smash.Enabled = true;
button_drop.Enabled = true;
button_bigPoint.Enabled = true;
button_timeout.Enabled = true;
radioButton_playerupright.Enabled = true;
radioButton_playerupleft.Enabled = true;
radioButton_playerdownleft.Enabled = true;
radioButton_playerdownright.Enabled = true;
radioButton_playerupright.Checked = false;
radioButton_playerupleft.Checked = false;
radioButton_playerdownleft.Checked = false;
radioButton_playerdownright.Checked = false;
start_time = getCurrentTime(); // get current time as identifier
while (List_timestamps.Contains(start_time))
start_time = getCurrentTime();
List_timestamps.Add(start_time); // add timestamp to the list we use for the screenshots
// Create a new Rally
Game.Current_rally =
new Rally(configuration.Teama.Player1.Current_position,
configuration.Teama.Player2.Current_position,
configuration.Teamb.Player1.Current_position,
configuration.Teamb.Player2.Current_position,
start_time, Game.Sets.Count);
// Clear the BirdView
pictureBox_birdview.Invalidate();
rallyframes = new List<Image<Bgr, byte>>();
String move_identifier = start_time.ToString();
String videopath = Program.getConfiguration().Mediafolderpath + @"\" + move_identifier + ".mpg";
if (capture_device_index != -1)
this.videoWriter = new VideoWriter(videopath, Emgu.CV.CvInvoke.CV_FOURCC('P', 'I', 'M', '1'), fps, 640, 480, true);
// start a new video capture from video
if (capture_device_index == -1)
{
Capture tempcapture = new Capture(loaded_videopath);
int tempfps = (int)tempcapture.GetCaptureProperty(CAP_PROP.CV_CAP_PROP_FPS);
//this.videoWriter = new VideoWriter(videopath, Emgu.CV.CvInvoke.CV_FOURCC('P', 'I', 'M', '1'), tempfps, 640, 480, true);
startmilisecond = axWindowsMediaPlayer_live.Ctlcontrols.currentPosition;
axWindowsMediaPlayer_live.Ctlcontrols.play();
tempcapture.Dispose();
}
button_startmove.Text = "End of rally";
button_startmove.ForeColor = System.Drawing.Color.Red;
new_move = true;
}
else
{
live_video_click_count = 0;
// Disable the Spielzug/Move property buttons
button_kill.Enabled = false;
button_smash.Enabled = false;
button_drop.Enabled = false;
button_bigPoint.Enabled = false;
button_timeout.Enabled = false;
radioButton_playerupright.Enabled = false;
radioButton_playerupleft.Enabled = false;
radioButton_playerdownleft.Enabled = false;
radioButton_playerdownright.Enabled = false;
radioButton_playerupright.Checked = false;
radioButton_playerupleft.Checked = false;
radioButton_playerdownleft.Checked = false;
radioButton_playerdownright.Checked = false;
// AUTO handling of score
// Save into the list and add to xml output
if (Game.Current_rally != null)
{
Set current_set = Game.Sets[Game.Sets.Count - 1];
//.........这里部分代码省略.........
示例8: CaptureFrame
public void CaptureFrame()
{
lbl3 = "0";
lbl4 = "";
NamePersons.Add("");
grabber = new Capture();
//Get the current frame form capture device
try
{
currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
}
catch { }
//Convert it to Grayscale
gray = currentFrame.Convert<Gray, Byte>();
//Face Detector
MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
face,
1.2,
10,
Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
new Size(20, 20));
//Action for each element detected
foreach (MCvAvgComp f in facesDetected[0])
{
t = t + 1;
result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
//draw the face detected in the 0th (gray) channel with blue color
currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);
if (trainingImages.ToArray().Length != 0)
{
// UpdateRecognizer();
name = recognizer.Recognize(new Image<Gray, byte>(ImageProcessing.ImagePreProcessing(result.ToBitmap())));
//Draw the label for each face detected and recognized
currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));
}
NamePersons[t - 1] = name;
NamePersons.Add("");
//Set the number of faces detected on the scene
lbl3 = facesDetected[0].Length.ToString();
}
t = 0;
//Names concatenation of persons recognized
for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
{
names = names + NamePersons[nnn] + ", ";
}
//Show the faces procesed and recognized
pictureBoxFrameGrabber.Image = currentFrame.ToBitmap();
lbl3 = names;
names = "";
//Clear the list(vector) of names
NamePersons.Clear();
grabber.Dispose();
grabber = null;
}
示例9: camListComboBox_SelectedIndexChanged
private void camListComboBox_SelectedIndexChanged(object sender, EventArgs e)
{
KeyValuePair<int, string> SelectedItem = (KeyValuePair<int, string>)camListComboBox.SelectedItem;
if (_camIndex != SelectedItem.Key)
{
_camIndex = SelectedItem.Key;
if (_capture != null)
{
_capture.Dispose();
}
_capture = new Capture(_camIndex);
GetCaptureInfo();
_capture.Dispose();
}
}
示例10: writeRallyVideoFromLoaded
private void writeRallyVideoFromLoaded(double s, double e, VideoWriter writer, String loadedvideopath)
{
double start = Math.Floor(s);
double end = Math.Ceiling(e);
double startmsec = start * 1000;
double endmsec = end * 1000;
Capture tempcapture = new Capture(loaded_videopath);
Image<Bgr, Byte> frame;
if (tempcapture != null)
{
//tempcapture.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_POS_MSEC, start);
double fps2 = tempcapture.GetCaptureProperty(CAP_PROP.CV_CAP_PROP_FPS);
//tempcapture.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_POS_MSEC, 100);
for (int i = 0; i < (start * fps2); i++)
(tempcapture).QueryFrame();
int durationframes = (int)((end - start) * fps2); // since c# sucks i have to do it manually just like any other crap
int count = 0;
while (count < durationframes)
{
frame = (tempcapture).QueryFrame();
videoWriter.WriteFrame(frame);
count++;
}
}
tempcapture.Dispose();
videoWriter.Dispose();
}
示例11: LoadFrameAsync
/// <summary>
/// Загрузка кадра по номеру (с видео)
/// </summary>
/// <param name="videoFileName">Имя видеофайла</param>
/// <param name="keyFrameIOInformation">Информация о кадре</param>
/// <returns>Кард</returns>
public Task<GreyVideoFrame> LoadFrameAsync(string videoFileName, KeyFrameIOInformation keyFrameIOInformation)
{
try
{
if (videoFileName == null || videoFileName.Length == 0)
throw new ArgumentNullException("Null videoFileName in LoadFrameAsync");
if (keyFrameIOInformation == null)
throw new ArgumentNullException("Null keyFrameIOInformation in LoadFrameAsync");
if (keyFrameIOInformation.Number < 0)
throw new ArgumentException("Error frameNumber in LoadFrameAsync");
if (keyFrameIOInformation.Width <= 0)
throw new ArgumentException("Error Width in LoadFrameAsync");
if (keyFrameIOInformation.Height <= 0)
throw new ArgumentException("Error Height in LoadFrameAsync");
return Task.Run(() =>
{
/* string videoPath = System.IO.Path.GetDirectoryName(videoFileName);
string framesDirName = System.IO.Path.Combine(videoPath, "VideoFrames");
if (!Directory.Exists(framesDirName))
Directory.CreateDirectory(framesDirName);*/
GreyVideoFrame videoFrame = null;
int currentFrameNumnber = -1;
Capture capture = new Capture(videoFileName);
Image<Gray, byte> frame = null;
while (currentFrameNumnber != keyFrameIOInformation.Number)
{
frame = capture.QueryGrayFrame();
currentFrameNumnber++;
}
if (frame != null)
{
// string frameFileName = Path.Combine(framesDirName, keyFrameIOInformation.Number.ToString() + ".jpg");
frame = frame.Resize(keyFrameIOInformation.Width, keyFrameIOInformation.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR);
// frame.Save(frameFileName);
videoFrame = CreateVideoFrame(frame, keyFrameIOInformation);
}
capture.Dispose();
return videoFrame;
});
}
catch (Exception exception)
{
throw exception;
}
}
示例12: backgroundWorker1_DoWork
private void backgroundWorker1_DoWork(object sender, DoWorkEventArgs e)
{
//StreamWriter sw = new StreamWriter(@opendutpath.FileName, true);
globalpar.testnum = 0;
byte[] DUTResp = new byte[0];
int inihsa = globalpar.hsa;
int inihbp = globalpar.hbp;
int inihfp = globalpar.hfp;
do
{
if (backgroundWorker1.CancellationPending == true)
{
e.Cancel = true;
break;
}
else
{
globalpar.bitrate = ((globalpar.hsa + globalpar.hbp + globalpar.hfp + globalpar.hact) * (globalpar.vsa + globalpar.vbp + globalpar.vfp + globalpar.vact) * globalpar.pixelformat / globalpar.lanecnt * globalpar.fr / 1000000 / 2 + 1);
PE(client.PGRemoteCmd(RPCCmds.START_EDIT_CONFIG, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.SET_HS_FREQ, ((float)globalpar.bitrate + 1) * 1000000, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.SET_LP_FREQ, (float)18e+6, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.END_EDIT_CONFIG, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.SET_TIMING_HFPORCH, globalpar.hfp, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.SET_TIMING_HBPORCH, globalpar.hbp, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.SET_TIMING_HSYNC, globalpar.hsa, ref errMsg, ref statusMsg));
System.Threading.Thread.Sleep(1000);
if (globalpar.pixelformat == 24)
{
PE(client.MIPICmd(RPCDefs.PACKED_PIXEL_STREAM_888, 0, false, RPCDefs.DT_HS, 0, 1, 0, 0, textbox_videopicpath.Text, null, ref errMsg, ref statusMsg));
}
else if (globalpar.pixelformat == 18)
{
PE(client.MIPICmd(RPCDefs.PACKED_PIXEL_STREAM_666, 0, false, RPCDefs.DT_HS, 0, 1, 0, 0, textbox_videopicpath.Text, null, ref errMsg, ref statusMsg));
}
else
{
PE(client.MIPICmd(RPCDefs.PACKED_PIXEL_STREAM_565, 0, false, RPCDefs.DT_HS, 0, 1, 0, 0, textbox_videopicpath.Text, null, ref errMsg, ref statusMsg));
}
PE(client.MIPICmd(RPCDefs.BTA, 0, false, RPCDefs.DT_LP, 0, 0, 0, 0, "", null, ref errMsg, ref statusMsg));
PE(client.PGRemoteQuery(RPCCmds.GET_DUT_RESPONSE, 0, ref DUTResp, ref errMsg, ref statusMsg));
System.Threading.Thread.Sleep(globalpar.waittime);
PE(client.MIPICmd(RPCDefs.BTA, 0, false, RPCDefs.DT_LP, 0, 0, 0, 0, "", null, ref errMsg, ref statusMsg));
PE(client.PGRemoteQuery(RPCCmds.GET_DUT_RESPONSE, 0, ref DUTResp, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.SAVE_DUT_RESPONSE, textboxrpcsave.Text, 100, globalpar.pixelformat + "bit" + globalpar.videotype + (globalpar.bitrate) * 2 + " Mbps" + " " + globalpar.hsa + " " + globalpar.hbp + " " + globalpar.hfp + measfluke(), 0, ref errMsg, ref statusMsg));
backgroundWorker1.ReportProgress(0);
if (checkBox_webcam.Checked == true)
{
cap = new Capture(0);
Image<Bgr, Byte> camimage = cap.QueryFrame();
//because we are using an autosize picturebox we need to do a thread safe update
DisplayImage(camimage.ToBitmap());
cap.Dispose();
string savepath = Path.GetDirectoryName(opendutpath.FileName);
pictureBox1.Image.Save(@savepath +"\\" + globalpar.pixelformat + "bit" + globalpar.videotype + (Convert.ToInt32(textbox_hsfreq.Text) * 2).ToString() + "Mbps.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);
}
else
{
}
PE(client.PGRemoteCmd(RPCCmds.PG_ABORT, ref errMsg, ref statusMsg));
switch (globalpar.testnum)
{
case 0:
globalpar.hsa = globalpar.hsa + 20;
globalpar.testnum = 1;
break;
case 1:
globalpar.hbp = globalpar.hbp + 20;
globalpar.testnum = 2;
break;
case 2:
globalpar.hfp = globalpar.hfp + 20;
globalpar.testnum = 0;
break;
}
/*
globalpar.bitrate = ((globalpar.hsa + globalpar.hbp + globalpar.hfp + globalpar.hact) * (globalpar.vsa + globalpar.vbp + globalpar.vfp + globalpar.vact) * globalpar.pixelformat / globalpar.lanecnt * globalpar.fr / 1000000 / 2 + 1);
PE(client.PGRemoteCmd(RPCCmds.START_EDIT_CONFIG, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.SET_HS_FREQ, ((float)globalpar.bitrate + 1) * 1000000, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.SET_LP_FREQ, (float)18e+6, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.END_EDIT_CONFIG, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.SET_TIMING_HFPORCH, globalpar.hfp, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.SET_TIMING_HBPORCH, globalpar.hbp, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.SET_TIMING_HSYNC, globalpar.hsa, ref errMsg, ref statusMsg));
//sw.WriteLine(globalpar.hsa + " " + globalpar.hbp + " " + globalpar.hfp + " " + globalpar.bitrate);
*/
}
} while (globalpar.bitrate < globalpar.targetbitrate);
//.........这里部分代码省略.........
示例13: backgroundskewswing_DoWork
//.........这里部分代码省略.........
PE(client.PGRemoteCmd(RPCCmds.END_EDIT_CONFIG, ref errMsg, ref statusMsg));
PE(client.MIPICmd(RPCDefs.PACKED_PIXEL_STREAM_666, 0, false, RPCDefs.DT_HS, 0, 1, 0, 0, textbox_videopicpath.Text, null, ref errMsg, ref statusMsg));
PE(client.MIPICmd(RPCDefs.BTA, 0, false, RPCDefs.DT_LP, 0, 0, 0, 0, "", null, ref errMsg, ref statusMsg));
PE(client.PGRemoteQuery(RPCCmds.GET_DUT_RESPONSE, 0, ref DUTResp, ref errMsg, ref statusMsg));
System.Threading.Thread.Sleep(globalpar.waittime);
PE(client.MIPICmd(RPCDefs.BTA, 0, false, RPCDefs.DT_LP, 0, 0, 0, 0, "", null, ref errMsg, ref statusMsg));
PE(client.PGRemoteQuery(RPCCmds.GET_DUT_RESPONSE, 0, ref DUTResp, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.SAVE_DUT_RESPONSE, textboxrpcsave.Text, 100, globalpar.pixelformat + "bit , " + globalpar.videotype + " , " + (globalpar.bitrate) * 2 + " Mbps , " + ", CM=" + cmvolt[i] + " mV , Diff = " + difvolt[j] + ", UI = " + (float)1 / 20 * k + measfluke(), 0, ref errMsg, ref statusMsg));
}
}
}
}
else
{
for (int i = 0; i < 3; i++) // CM volt
{
for (int j = 0; j < 5; j++) //diff volt
{
PE(client.PGRemoteCmd(RPCCmds.START_EDIT_CONFIG, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.ENABLE_AUTO_SET_CLOCK_DELAY, 0, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.SET_HS_LOW_VOLT, 1, (cmvolt[i] - difvolt[j] * 2), ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.SET_HS_HIGH_VOLT, 1, (cmvolt[i] + difvolt[j] * 2), ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.END_EDIT_CONFIG, ref errMsg, ref statusMsg));
for (float k = 1; k <= 19; k++)
{
float skew = ui / 20 * k;
PE(client.PGRemoteCmd(RPCCmds.START_EDIT_CONFIG, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.SET_HS_DELAY, 4, skew, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.END_EDIT_CONFIG, ref errMsg, ref statusMsg));
PE(client.MIPICmd(RPCDefs.PACKED_PIXEL_STREAM_565, 0, false, RPCDefs.DT_HS, 0, 1, 0, 0, textbox_videopicpath.Text, null, ref errMsg, ref statusMsg));
PE(client.MIPICmd(RPCDefs.BTA, 0, false, RPCDefs.DT_LP, 0, 0, 0, 0, "", null, ref errMsg, ref statusMsg));
PE(client.PGRemoteQuery(RPCCmds.GET_DUT_RESPONSE, 0, ref DUTResp, ref errMsg, ref statusMsg));
System.Threading.Thread.Sleep(globalpar.waittime);
PE(client.MIPICmd(RPCDefs.BTA, 0, false, RPCDefs.DT_LP, 0, 0, 0, 0, "", null, ref errMsg, ref statusMsg));
PE(client.PGRemoteQuery(RPCCmds.GET_DUT_RESPONSE, 0, ref DUTResp, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.SAVE_DUT_RESPONSE, textboxrpcsave.Text, 100, globalpar.pixelformat + "bit , " + globalpar.videotype + " , " + (globalpar.bitrate) * 2 + " Mbps , " + ", CM=" + cmvolt[i] + " mV , Diff = " + difvolt[j] + ", UI = " + (float)1 / 20 * k + measfluke(), 0, ref errMsg, ref statusMsg));
}
}
}
}
backgroundskewswing.ReportProgress(0);
if (checkBox_webcam.Checked == true)
{
cap = new Capture(0);
Image<Bgr, Byte> camimage = cap.QueryFrame();
//because we are using an autosize picturebox we need to do a thread safe update
DisplayImage(camimage.ToBitmap());
cap.Dispose();
string savepath = Path.GetDirectoryName(opendutpath.FileName);
pictureBox1.Image.Save(@savepath +"\\" + globalpar.pixelformat + "bit" + globalpar.videotype + (Convert.ToInt32(textbox_hsfreq.Text) * 2).ToString() + "Mbps.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);
}
else
{
}
PE(client.PGRemoteCmd(RPCCmds.PG_ABORT, ref errMsg, ref statusMsg));
switch (globalpar.testnum)
{
case 0:
globalpar.hsa = globalpar.hsa + 30;
globalpar.testnum = 1;
break;
case 1:
globalpar.hbp = globalpar.hbp + 30;
globalpar.testnum = 2;
break;
case 2:
globalpar.hfp = globalpar.hfp + 30;
globalpar.testnum = 0;
break;
}
/*
globalpar.bitrate = ((globalpar.hsa + globalpar.hbp + globalpar.hfp + globalpar.hact) * (globalpar.vsa + globalpar.vbp + globalpar.vfp + globalpar.vact) * globalpar.pixelformat / globalpar.lanecnt * globalpar.fr / 1000000 / 2 + 1);
PE(client.PGRemoteCmd(RPCCmds.START_EDIT_CONFIG, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.SET_HS_FREQ, ((float)globalpar.bitrate + 1) * 1000000, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.SET_LP_FREQ, (float)18e+6, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.END_EDIT_CONFIG, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.SET_TIMING_HFPORCH, globalpar.hfp, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.SET_TIMING_HBPORCH, globalpar.hbp, ref errMsg, ref statusMsg));
PE(client.PGRemoteCmd(RPCCmds.SET_TIMING_HSYNC, globalpar.hsa, ref errMsg, ref statusMsg));
//sw.WriteLine(globalpar.hsa + " " + globalpar.hbp + " " + globalpar.hfp + " " + globalpar.bitrate);
*/
}
} while (globalpar.bitrate < globalpar.targetbitrate);
//sw.Close();
}
示例14: LoadHandTestingPatternsFromDir
private void LoadHandTestingPatternsFromDir(string path)
{
try
{
byte[] TestPatterns;
MNistHeight = 32;
MNistWidth = 32;
MNistSize = MNistWidth * MNistHeight;
int TrainingLabelCount = 9;
int LabelImageCount = 100;
TestingPatternsCount = TrainingLabelCount * LabelImageCount;
TestPatterns = new byte[TestingPatternsCount * MNistSize];
//Capture cap = new Capture(@"D:\ebooks\hand gestrue recognition\hand data set\mov\0.MOV");
unsafe
{
for (int ii = 0; ii < TrainingLabelCount; ii++)
{
string type = ii.ToString("D1");
//Image<Bgr, Byte> image = new Image<Bgr, byte>(path + "\\" + type + ".jpg").Resize(32, 32, Emgu.CV.CvEnum.INTER.CV_INTER_AREA); //Read the files as an 8-bit Bgr image
//Image<Gray, Byte> gray = image.Convert<Gray, Byte>(); //Convert it to Grayscale
Capture cap = new Capture(path + "\\" + type + ".MOV");
for(int i =0; i<200;i++)
{
cap.QueryGrayFrame();//skip first 200 frames
}
for (int i = 0; i < LabelImageCount; i++)
{
Image<Gray, Byte> gray = cap.QueryGrayFrame().Resize(32, 32, Emgu.CV.CvEnum.INTER.CV_INTER_AREA);
for (int j = 0; j < MNistSize; j++)
{
TestPatterns[ii * MNistSize * LabelImageCount + i * MNistSize + j] = ((byte*)gray.MIplImage.imageData + j)[0];
}
}
cap.Dispose();
}
}
MNISTTesting = new ByteImageData[TestingPatternsCount];
Parallel.For(0, TestingPatternsCount, parallelOption, j =>
{
ByteImageData pattern = new ByteImageData(j / LabelImageCount, new byte[MNistSize]);
for (int i = 0; i < MNistSize; i++)
{
pattern.Image[i] = TestPatterns[(j * MNistSize) + i];
}
MNISTTesting[j] = pattern;
});
}
catch (Exception)
{
throw;
}
}
示例15: fFTWebcamToolStripMenuItem_Click
private void fFTWebcamToolStripMenuItem_Click(object sender, EventArgs e)
{
Image<Gray, float> fft_Amp = null;
Image<Gray, float> fft_Phase = null;
Capture capture = new Capture();
img = capture.QueryFrame().Clone();
capture.Dispose();
FFT.GetFFTAmpAndPhase(img, out fft_Amp, out fft_Phase);
fft_Amp = FFT.PrepareForVizualization(fft_Amp, true);
fft_Phase = FFT.PrepareForVizualization(fft_Phase, false);
ShowIMG.ShowIMGStatic(fft_Amp, fft_Phase);
}