本文整理汇总了C++中VideoCapture::release方法的典型用法代码示例。如果您正苦于以下问题:C++ VideoCapture::release方法的具体用法?C++ VideoCapture::release怎么用?C++ VideoCapture::release使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类VideoCapture
的用法示例。
在下文中一共展示了VideoCapture::release方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: catch
bool
openCaptureDevice(VideoCapture &reader, int dev)
{
bool opened = false;
try
{
if (!reader.isOpened())
{
std::cout << "\nTrying to open device:\n " << dev << "\n";
opened = reader.open(dev);
}
} catch (...)
{
std::cerr << "Stopping search. Cam not detected with input:\n " << dev
<< "\n";
reader.release();
opened = false;
}
if (!opened)
{
reader.release();
}
return opened;
};
示例2: forced
/*
This function handles the releasing of objects when this node is
requested or forced (via CTRL+C) to shutdown.
*/
void onShutdown(int sig){
destroyWindow(CVWINDOW);
cap1.release();
cap2.release();
cap3.release();
ROS_INFO("All objects should have been released, proper shutdown complete");
ros::shutdown();
}
示例3: my_handler
void my_handler(int s)
{
printf("Caught signal %d\n", s);
control.stop();
cap.release();
exit(1);
}
示例4: main
//==============================================================================
int main(int argc,char** argv)
{
//parse command line arguments
if(argc < 2){cout << usage << endl; return 0;}
if(parse_help(argc,argv)){cout << usage << endl; return 0;}
//load detector model
face_detector detector = load_ft<face_detector>(argv[1]);
//open video stream
VideoCapture cam;
if(argc > 2)cam.open(argv[2]); else cam.open(0);
if(!cam.isOpened()){
cout << "Failed opening video file." << endl
<< usage << endl; return 0;
}
//detect until user quits
namedWindow("face detector");
while(cam.get(CV_CAP_PROP_POS_AVI_RATIO) < 0.999999){
Mat im; cam >> im;
vector<Point2f> p = detector.detect(im);
if(p.size() > 0){
for(int i = 0; i < int(p.size()); i++)
circle(im,p[i],1,CV_RGB(0,255,0),2,CV_AA);
}
imshow("face detector",im);
if(waitKey(10) == 'q')break;
}
destroyWindow("face detector"); cam.release(); return 0;
}
示例5: LOGD
JNIEXPORT void JNICALL Java_org_opencv_highgui_VideoCapture_n_1release
(JNIEnv* env, jclass, jlong self)
{
try {
#ifdef DEBUG
LOGD("highgui::VideoCapture_n_1release()");
#endif // DEBUG
VideoCapture* me = (VideoCapture*) self; //TODO: check for NULL
me->release( );
return;
} catch(cv::Exception e) {
#ifdef DEBUG
LOGD("highgui::VideoCapture_n_1release() catched cv::Exception: %s", e.what());
#endif // DEBUG
jclass je = env->FindClass("org/opencv/core/CvException");
if(!je) je = env->FindClass("java/lang/Exception");
env->ThrowNew(je, e.what());
return;
} catch (...) {
#ifdef DEBUG
LOGD("highgui::VideoCapture_n_1release() catched unknown exception (...)");
#endif // DEBUG
jclass je = env->FindClass("java/lang/Exception");
env->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1release()}");
return;
}
}
示例6: processVideo
int processVideo(VideoCapture& capture) {
if(!capture.isOpened()){
//error in opening the video input
exit(EXIT_FAILURE);
}
Ptr<BackgroundSubtractor> pMOG2 = createBackgroundSubtractorMOG2(); //MOG2 approach
//read input data. ESC or 'q' for quitting
while( (char)keyboard != 'q' && (char)keyboard != 27 ){
//read the current frame
if(!capture.read(frame)) {
cerr << "Unable to read next frame." << endl;
cerr << "Exiting..." << endl;
exit(EXIT_FAILURE);
}
//update the background model
pMOG2->apply(frame, fgMaskMOG2,-2);
//get the frame number and write it on the current frame
stringstream ss;
rectangle(frame, cv::Point(10, 2), cv::Point(100,20),
cv::Scalar(140,89,255), -1);
ss << capture.get(CAP_PROP_POS_FRAMES);
string frameNumberString = ss.str();
putText(frame, frameNumberString.c_str(), cv::Point(15, 15),
FONT_HERSHEY_SIMPLEX, 0.5 , cv::Scalar(0,0,0));
//show the current frame and the fg masks
imshow("Frame", frame);
imshow("FG Mask MOG 2", fgMaskMOG2);
//get the input from the keyboard
keyboard = waitKey( 30 );
}
//delete capture object
capture.release();
return 0;
}
示例7: main
int main()
{
int key;
VideoCapture webcam;
webcam.open(0);
Mat img;
while(webcam.read(img))
{
imshow("WEbcam",img);
key=cvWaitKey(10); //wait upto10 sec for next key stroke
if(key==27)
break; // if user press escape (ESC) key then video capturing stops and last frame will be saved.
}
/*saving image*/
imwrite("image.jpg",img);
webcam.release();
/*Re-display image*/
Mat img2=imread("image.jpg");
imshow("Taken Image",img2);
cvWaitKey(0);
return 0;
}
示例8: main
int main(int argc, char * argv[]){
VideoCapture cam;
//Read options
CommandLineParser parser(argc, argv, keys);
string video = parser.get<string>("v");
int f0 = parser.get<int>("i");
//Init cam
if (video != "null")
cam.open(video);
else
cam.open(0);
if (!cam.isOpened()){
cout << "cam device failed to open!" << endl;
return 1;
}
//Read first frame
Mat frame;
cam.set(CV_CAP_PROP_POS_FRAMES,f0);
cam.read(frame);
while(cam.read(frame)){
//Display
imshow("Player", frame);
char key = cvWaitKey(33);
if (key == 'q')
break;
}
cam.release();
return 0;
}
示例9: quit
/**
* this function provides a way to exit nicely from the system
*/
void quit(string msg, int retval)
{
if (retval == 0) {
cout << (msg == "NULL" ? "" : msg) << "\n" << endl;
} else {
cerr << (msg == "NULL" ? "" : msg) << "\n" << endl;
}
if (clientSock){
close(clientSock);
}
if (capture.isOpened()){
capture.release();
}
if (!(img0.empty())){
(~img0);
}
if (!(img1.empty())){
(~img1);
}
if (!(img2.empty())){
(~img2);
}
pthread_mutex_destroy(&gmutex);
exit(retval);
}
示例10: video_thread_CL
void video_thread_CL(void* pParams)
{
FaceDetector *faceDetector;
if (threadUseCL){
faceDetector = (FaceDetectorCL*)pParams;
}
else{
faceDetector = (FaceDetectorCpu*)pParams;
}
std::string name = faceDetector->name();
//HAAR_EYE_TREE_EYEGLASSES_DATA
//HAAR_EYE_DATA
//HAAR_FRONT_FACE_DEFAULT_DATA
//LBP_FRONTAL_FACE
//LBP_PROFILE_FACE
faceDetector->load(HAAR_FRONT_FACE_DEFAULT_DATA);
VideoCapture videoCapture;
cv::Mat frame, frameCopy, image;
videoCapture.open(faceDetector->videoFile().c_str());
if (!videoCapture.isOpened()) { cout << "No video detected" << endl; return; }
if (imshowFlag) { cv::namedWindow(name.c_str(), 1); }
if (videoCapture.isOpened())
{
cout << "In capture ..." << name.c_str() << endl;
while (videoCapture.grab())
{
if (!videoCapture.retrieve(frame, 0)) { break; }
faceDetector->setSrcImg(frame, 1);
faceDetector->doWork();
if (imshowFlag){ cv::imshow(name.c_str(), faceDetector->resultMat()); }
std::vector<cv::Rect> &faces_result = faceDetector->getResultFaces();
std::cout << "face --" << name.c_str() << std::endl;
for (int i = 0; i < faces_result.size(); ++i){
std::cout << faces_result.at(i).x << ", " << faces_result.at(i).y << std::endl;
}
if (waitKey(10) >= 0){
videoCapture.release();
break;
}
Sleep(1);
}
}
if (imshowFlag) { cvDestroyWindow(name.c_str()); }
finishTaskFlag++;
_endthread();
return;
}
示例11: GlobalExits
VOID inline GlobalExits(VOID)
{
g_writer.release();
g_cap.release();
g_captureStat = capture_IDLE;
SecureEndThread(threadStat);
cv::destroyAllWindows();
g_runningONE = FALSE;
}
示例12: unload
void unload() {
if(cap != NULL) {
if(cap->isOpened()) {
cap->release();
}
delete cap;
cap = NULL;
}
}
示例13: main
int main(int argc, char *argv[])
{
start_fps();
sleep(1);
/*********************PARAMETROS*****************/
if(argc<2)
{
Cwarning;
printf("Nenhum argumento adicionado ao programa\n");
Cwarning;
printf("Por default o programa ira selecionar o maior ID de camera\n");
int idCamera=3;
cap.open(idCamera);
while(!cap.isOpened())
{
Cerro;
printf("Erro ao abrir a camera id %d!\n",idCamera);
idCamera--;
if(idCamera==-1)
return -1;
cap.release();
cap.open(idCamera);
}
sleep(1);
}
else
{
char *local_video;
local_video=argv[1];
Cok;
printf("Video ! %s ! escolhido pelo usuario\n",local_video);
cap.open(local_video);
if(!cap.isOpened())
{
Cerro;
printf("Arquivo nao encontrado !\n");
return -1;
}
sleep(1);
}
/************************************************/
pthread_t get_img;
pthread_t show_img;
pthread_create(&get_img, NULL, streaming , NULL); //pega imagem da camera ou do arquivo
pthread_create(&show_img, NULL, image_show , NULL); //pega imagem da camera ou do arquivo
pthread_join(get_img,NULL);
pthread_join(show_img,NULL);
}
示例14:
extern "C" JNIEXPORT void JNICALL Java_narl_itrc_vision_CapVidcap_implDone(
JNIEnv* env,
jobject thiz
){
PREPARE_CONTEXT;
VideoCapture* vid = (VideoCapture*)(cntx);
vid->release();
delete vid;
}
示例15: main
int main( int argc, char* argv[] )
{
if(argc > 2)
NOWRITE = 0;
cout << "nowrite = " << NOWRITE << endl;
namedWindow( "Example2_10", CV_WINDOW_AUTOSIZE );
namedWindow( "Log_Polar", CV_WINDOW_AUTOSIZE );
Mat bgr_frame;
VideoCapture capture;
if( argc < 2 || !capture.open( argv[1] ) ){
help();
cout << "Failed to open " << argv[1] << "\n" << endl;
return -1;
}
double fps = capture.get(CV_CAP_PROP_FPS);
cout << "fps = " << fps << endl;
Size size((int)capture.get(CV_CAP_PROP_FRAME_WIDTH),
(int)capture.get(CV_CAP_PROP_FRAME_HEIGHT));
cout << " frame (w, h) = (" << size.width << ", " << size.height << ")" <<endl;
VideoWriter writer;
if(! NOWRITE)
{ writer.open( // On linux Will only work if you've installed ffmpeg development files correctly,
argv[2], // otherwise segmentation fault. Windows probably better.
CV_FOURCC('M','J','P','G'),
fps,
size
);
}
Mat logpolar_frame(size,CV_8UC3);
Mat gray_frame(size,CV_8UC1);
for(;;) {
capture >> bgr_frame;
if( bgr_frame.empty() ) break;
imshow( "Example2_10", bgr_frame );
cvtColor( //We never make use of this gray image
bgr_frame, gray_frame, CV_BGR2GRAY);
IplImage lp = logpolar_frame;
IplImage bgrf = bgr_frame;
cvLogPolar( &bgrf, &lp, //This is just a fun conversion the mimic's the human visual system
cvPoint2D32f(bgr_frame.cols/2,
bgr_frame.rows/2),
40,
CV_WARP_FILL_OUTLIERS );
imshow( "Log_Polar", logpolar_frame );
//Sigh, on linux, depending on your ffmpeg, this often won't work ...
if(! NOWRITE)
writer << logpolar_frame;
char c = waitKey(10);
if( c == 27 ) break;
}
capture.release();
}