本文整理汇总了C++中VideoStream::stop方法的典型用法代码示例。如果您正苦于以下问题:C++ VideoStream::stop方法的具体用法?C++ VideoStream::stop怎么用?C++ VideoStream::stop使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类VideoStream
的用法示例。
在下文中一共展示了VideoStream::stop方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: closeOpenNIDevice
int closeOpenNIDevice(Device &device , VideoStream &color , VideoStream &depth , VideoStream &ir)
{
fprintf(stderr,"Stopping depth and color streams\n");
depth.stop();
color.stop();
#if MOD_IR
ir.stop();
ir.destroy();
#endif // MOD_IR
depth.destroy();
color.destroy();
device.close();
return 1;
}
示例2: close
OniStatus Device::close()
{
--m_openCount;
if (m_openCount == 0)
{
while(m_streams.Begin() != m_streams.End())
{
VideoStream* pStream = *m_streams.Begin();
pStream->stop();
m_streams.Remove(pStream);
}
for (int i = 0; i < MAX_SENSORS_PER_DEVICE; ++i)
{
if (m_sensors[i] != NULL)
{
XN_DELETE(m_sensors[i]);
m_sensors[i] = NULL;
}
}
if (m_deviceHandle != NULL)
{
m_driverHandler.deviceClose(m_deviceHandle);
}
m_deviceHandle = NULL;
}
return ONI_STATUS_OK;
}
示例3: beginConfigure
virtual bool beginConfigure()
{
was_running_ = running_;
if(was_running_) stream_.stop();
running_ = false;
return true;
}
示例4:
virtual ~SensorStreamManager()
{
stream_.removeNewFrameListener(this);
stream_.stop();
stream_.destroy();
publisher_.shutdown();
}
示例5: main
int main()
{
Status rc = OpenNI::initialize();
if (rc != STATUS_OK)
{
printf("Initialize failed\n%s\n", OpenNI::getExtendedError());
return 1;
}
OpenNIEventListener eventPrinter;
OpenNI::addListener(&eventPrinter);
Device device;
rc = device.open(ANY_DEVICE);
if (rc != STATUS_OK)
{
printf("Couldn't open device\n%s\n", OpenNI::getExtendedError());
return 2;
}
VideoStream depth;
if (device.getSensorInfo(SENSOR_DEPTH) != NULL)
{
rc = depth.create(device, SENSOR_DEPTH);
if (rc != STATUS_OK)
{
printf("Couldn't create depth stream\n%s\n", OpenNI::getExtendedError());
}
}
rc = depth.start();
if (rc != STATUS_OK)
{
printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError());
}
PrintCallback depthPrinter;
// Register to new frame
depth.addListener(&depthPrinter);
// Wait while we're getting frames through the printer
while (!wasKeyboardHit())
{
Sleep(100);
}
depth.removeListener(&depthPrinter);
depth.stop();
depth.destroy();
device.close();
OpenNI::shutdown();
return 0;
}
示例6: onSubscriptionChanged
virtual void onSubscriptionChanged(const image_transport::SingleSubscriberPublisher& topic)
{
if(topic.getNumSubscribers() > 0)
{
if(!running_ && stream_.start() == STATUS_OK)
{
running_ = true;
}
}
else
{
stream_.stop();
running_ = false;
}
}
示例7: Size
int
main (int argc, char** argv)
{
Status rc = OpenNI::initialize();
if (rc != STATUS_OK)
{
std::cout << "Initialize failed: " << OpenNI::getExtendedError() << std::endl;
return 1;
}
Device device;
rc = device.open(ANY_DEVICE);
if (rc != STATUS_OK)
{
std::cout << "Couldn't open device: " << OpenNI::getExtendedError() << std::endl;
return 2;
}
VideoStream stream;
if (device.getSensorInfo(currentSensor) != NULL)
{
rc = stream.create(device, currentSensor);
if (rc != STATUS_OK)
{
std::cout << "Couldn't create stream: " << OpenNI::getExtendedError() << std::endl;
return 3;
}
}
rc = stream.start();
if (rc != STATUS_OK)
{
std::cout << "Couldn't start the stream: " << OpenNI::getExtendedError() << std::endl;
return 4;
}
VideoFrameRef frame;
//now open the video writer
Size S = Size(stream.getVideoMode().getResolutionX(),
stream.getVideoMode().getResolutionY());
VideoWriter outputVideo;
std::string fileName = "out.avi";
outputVideo.open(fileName, -1, stream.getVideoMode().getFps(), S, currentSensor == SENSOR_COLOR ? true : false);
if (!outputVideo.isOpened())
{
std::cout << "Could not open the output video for write: " << fileName << std::endl;
return -1;
}
while (waitKey(50) == -1)
{
int changedStreamDummy;
VideoStream* pStream = &stream;
rc = OpenNI::waitForAnyStream(&pStream, 1, &changedStreamDummy, SAMPLE_READ_WAIT_TIMEOUT);
if (rc != STATUS_OK)
{
std::cout << "Wait failed! (timeout is " << SAMPLE_READ_WAIT_TIMEOUT << "ms): " << OpenNI::getExtendedError() << std::endl;
continue;
}
rc = stream.readFrame(&frame);
if (rc != STATUS_OK)
{
std::cout << "Read failed:" << OpenNI::getExtendedError() << std::endl;
continue;
}
Mat image;
switch (currentSensor)
{
case SENSOR_COLOR:
image = Mat(frame.getHeight(), frame.getWidth(), CV_8UC3, (void*)frame.getData());
break;
case SENSOR_DEPTH:
image = Mat(frame.getHeight(), frame.getWidth(), DataType<DepthPixel>::type, (void*)frame.getData());
break;
case SENSOR_IR:
image = Mat(frame.getHeight(), frame.getWidth(), CV_8U, (void*)frame.getData());
break;
default:
break;
}
namedWindow( "Display window", WINDOW_AUTOSIZE ); // Create a window for display.
imshow( "Display window", image ); // Show our image inside it.
outputVideo << image;
}
stream.stop();
stream.destroy();
device.close();
OpenNI::shutdown();
return 0;
}
示例8: main
//.........这里部分代码省略.........
rc = depth.start();
if (rc != STATUS_OK)
{
printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError());
return 5;
}
rc = color.start();
if (rc != STATUS_OK){
printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError());
return 6;
}
VideoFrameRef colorframe;
VideoFrameRef depthframe;
int iMaxDepth = depth.getMaxPixelValue();
int iColorFps = color.getVideoMode().getFps();
cv::Size iColorFrameSize = cv::Size(color.getVideoMode().getResolutionX(), color.getVideoMode().getResolutionY());
cv::Mat colorimageRGB;
cv::Mat colorimageBGR;
cv::Mat depthimage;
cv::Mat depthimageScaled;
#ifdef F_RECORDVIDEO
cv::VideoWriter outputvideo_color;
cv::FileStorage outputfile_depth;
time_t timenow = time(0);
tm ltime;
localtime_s(<ime, &timenow);
int tyear = 1900 + ltime.tm_year;
int tmouth = 1 + ltime.tm_mon;
int tday = ltime.tm_mday;
int thour = ltime.tm_hour;
int tmin = ltime.tm_min;
int tsecond = ltime.tm_sec;
string filename_rgb = "RGB/rgb_" + to_string(tyear) + "_" + to_string(tmouth) + "_" + to_string(tday)
+ "_" + to_string(thour) + "_" + to_string(tmin) + "_" + to_string(tsecond) + ".avi";
string filename_d = "D/d_" + to_string(tyear) + "_" + to_string(tmouth) + "_" + to_string(tday)
+ "_" + to_string(thour) + "_" + to_string(tmin) + "_" + to_string(tsecond) + ".yml";
outputvideo_color.open(filename_rgb, CV_FOURCC('I', '4', '2', '0'), iColorFps, iColorFrameSize, true);
if (!outputvideo_color.isOpened()){
cout << "Could not open the output color video for write: " << endl;
return 7;
}
outputfile_depth.open(filename_d, cv::FileStorage::WRITE);
if (!outputfile_depth.isOpened()){
cout << "Could not open the output depth file for write: " << endl;
return 8;
}
#endif // F_RECORDVIDEO
// 7. main loop, continue read
while (!wasKeyboardHit())
{
// 8. check is color stream is available
if (color.isValid()){
if (color.readFrame(&colorframe) == STATUS_OK){
colorimageRGB = { colorframe.getHeight(), colorframe.getWidth(), CV_8UC3, (void*)colorframe.getData() };
cv::cvtColor(colorimageRGB, colorimageBGR, CV_RGB2BGR);
}
}
// 9. check is depth stream is available
if (depth.isValid()){
if (depth.readFrame(&depthframe) == STATUS_OK){
depthimage = { depthframe.getHeight(), depthframe.getWidth(), CV_16UC1, (void*)depthframe.getData() };
depthimage.convertTo(depthimageScaled, CV_8U, 255.0 / iMaxDepth);
}
}
cv::imshow("Color Image", colorimageBGR);
cv::imshow("Depth Image", depthimageScaled);
#ifdef F_RECORDVIDEO
outputvideo_color << colorimageBGR;
outputfile_depth << "Mat" << depthimage;
#endif // F_RECORDVIDEO
cv::waitKey(10);
}
color.stop();
depth.stop();
color.destroy();
depth.destroy();
device.close();
OpenNI::shutdown();
return 0;
}
示例9: main
//.........这里部分代码省略.........
//DEPTH
pub_depth = n.advertise<sensor_msgs::Image>("/"+topic+"/depth/image_raw", 1);
pub_camera_info_depth = n.advertise<sensor_msgs::CameraInfo>("/"+topic+"/depth/camera_info", 1);
}
}
if(_rgb_mode>=0){
if (device.getSensorInfo(SENSOR_COLOR) != NULL){
rc = rgb.create(device, SENSOR_COLOR);
if (rc != STATUS_OK){
printf("Couldn't create rgb stream\n%s\n", OpenNI::getExtendedError());
fflush(stdout);
return 3;
}
//RGB
pub_rgb = n.advertise<sensor_msgs::Image>("/"+topic+"/rgb/image_raw", 1);
pub_camera_info_rgb = n.advertise<sensor_msgs::CameraInfo>("/"+topic+"/rgb/camera_info", 1);
}
}
if(_depth_mode<0 && _rgb_mode<0){
cout << "Depth modes" << endl;
const openni::SensorInfo* sinfo = device.getSensorInfo(openni::SENSOR_DEPTH); // select index=4 640x480, 30 fps, 1mm
const openni::Array< openni::VideoMode>& modesDepth = sinfo->getSupportedVideoModes();
printf("Enums data:\nPIXEL_FORMAT_DEPTH_1_MM = 100,\nPIXEL_FORMAT_DEPTH_100_UM = 101,\nPIXEL_FORMAT_SHIFT_9_2 = 102,\nPIXEL_FORMAT_SHIFT_9_3 = 103,\nPIXEL_FORMAT_RGB888 = 200,\nPIXEL_FORMAT_YUV422 = 201,\nPIXEL_FORMAT_GRAY8 = 202,\nPIXEL_FORMAT_GRAY16 = 203,\nPIXEL_FORMAT_JPEG = 204,\nPIXEL_FORMAT_YUYV = 205,\n\n");
cout << "Depth modes" << endl;
for (int i = 0; i<modesDepth.getSize(); i++) {
printf("%i: %ix%i, %i fps, %i format\n", i, modesDepth[i].getResolutionX(), modesDepth[i].getResolutionY(),modesDepth[i].getFps(), modesDepth[i].getPixelFormat()); //PIXEL_FORMAT_DEPTH_1_MM = 100, PIXEL_FORMAT_DEPTH_100_UM = 101
}
cout << "Rgb modes" << endl;
const openni::SensorInfo* sinfoRgb = device.getSensorInfo(openni::SENSOR_COLOR); // select index=4 640x480, 30 fps, 1mm
const openni::Array< openni::VideoMode>& modesRgb = sinfoRgb->getSupportedVideoModes();
for (int i = 0; i<modesRgb.getSize(); i++) {
printf("%i: %ix%i, %i fps, %i format\n", i, modesRgb[i].getResolutionX(), modesRgb[i].getResolutionY(),modesRgb[i].getFps(), modesRgb[i].getPixelFormat()); //PIXEL_FORMAT_DEPTH_1_MM = 100, PIXEL_FORMAT_DEPTH_100_UM
}
depth.stop();
depth.destroy();
rgb.stop();
rgb.destroy();
device.close();
OpenNI::shutdown();
exit(1);
}
if(_depth_mode>=0){
rc = depth.setVideoMode(device.getSensorInfo(SENSOR_DEPTH)->getSupportedVideoModes()[_depth_mode]);
depth.setMirroringEnabled(false);
rc = depth.start();
}
if(_rgb_mode>=0){
rc = rgb.setVideoMode(device.getSensorInfo(SENSOR_COLOR)->getSupportedVideoModes()[_rgb_mode]);
rgb.setMirroringEnabled(false);
rgb.getCameraSettings()->setAutoExposureEnabled(true);
rgb.getCameraSettings()->setAutoWhiteBalanceEnabled(true);
cerr << "Camera settings valid: " << rgb.getCameraSettings()->isValid() << endl;
rc = rgb.start();
}
if(_depth_mode>=0 && _rgb_mode>=0 && _sync==1){
rc =device.setDepthColorSyncEnabled(true);
if (rc != STATUS_OK) {
printf("Couldn't enable de pth and rgb images synchronization\n%s\n",
OpenNI::getExtendedError());
exit(2);
}
}
if(_depth_mode>=0 && _rgb_mode>=0 && _registration==1){
device.setImageRegistrationMode(openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR);
}
run = true;
pthread_t runner;
pthread_create(&runner, 0, camera_thread, 0);
ros::spin();
void* result;
run =false;
pthread_join(runner, &result);
depth.stop();
depth.destroy();
rgb.stop();
rgb.destroy();
device.close();
OpenNI::shutdown();
return 0;
}
示例10: main
int main()
{
// 2. initialize OpenNI
Status rc = OpenNI::initialize();
if (rc != STATUS_OK)
{
printf("Initialize failed\n%s\n", OpenNI::getExtendedError());
return 1;
}
// 3. open a device
Device device;
rc = device.open(ANY_DEVICE);
if (rc != STATUS_OK)
{
printf("Couldn't open device\n%s\n", OpenNI::getExtendedError());
return 2;
}
// 4. create depth stream
VideoStream depth;
if (device.getSensorInfo(SENSOR_DEPTH) != NULL){
rc = depth.create(device, SENSOR_DEPTH);
if (rc != STATUS_OK){
printf("Couldn't create depth stream\n%s\n", OpenNI::getExtendedError());
return 3;
}
}
VideoStream color;
if (device.getSensorInfo(SENSOR_COLOR) != NULL){
rc = color.create(device, SENSOR_COLOR);
if (rc != STATUS_OK){
printf("Couldn't create color stream\n%s\n", OpenNI::getExtendedError());
return 4;
}
}
// 5. create OpenCV Window
cv::namedWindow("Depth Image", CV_WINDOW_AUTOSIZE);
cv::namedWindow("Color Image", CV_WINDOW_AUTOSIZE);
// 6. start
rc = depth.start();
if (rc != STATUS_OK)
{
printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError());
return 5;
}
rc = color.start();
if (rc != STATUS_OK){
printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError());
return 6;
}
VideoFrameRef colorframe;
VideoFrameRef depthframe;
int iMaxDepth = depth.getMaxPixelValue();
cv::Mat colorimageRGB;
cv::Mat colorimageBGR;
cv::Mat depthimage;
cv::Mat depthimageScaled;
// 7. main loop, continue read
while (!wasKeyboardHit())
{
// 8. check is color stream is available
if (color.isValid()){
if (color.readFrame(&colorframe) == STATUS_OK){
colorimageRGB = { colorframe.getHeight(), colorframe.getWidth(), CV_8UC3, (void*)colorframe.getData() };
cv::cvtColor(colorimageRGB, colorimageBGR, CV_RGB2BGR);
}
}
// 9. check is depth stream is available
if (depth.isValid()){
if (depth.readFrame(&depthframe) == STATUS_OK){
depthimage = { depthframe.getHeight(), depthframe.getWidth(), CV_16UC1, (void*)depthframe.getData() };
depthimage.convertTo(depthimageScaled, CV_8U, 255.0 / iMaxDepth);
}
}
cv::imshow("Color Image", colorimageBGR);
cv::imshow("Depth Image", depthimageScaled);
cv::waitKey(10);
}
color.stop();
depth.stop();
color.destroy();
depth.destroy();
device.close();
OpenNI::shutdown();
return 0;
}
示例11: main
int main()
{
Status rc = OpenNI::initialize();
if (rc != STATUS_OK)
{
printf("Initialize failed\n%s\n", OpenNI::getExtendedError());
return 1;
}
OpenNIDeviceListener devicePrinter;
OpenNI::addDeviceConnectedListener(&devicePrinter);
OpenNI::addDeviceDisconnectedListener(&devicePrinter);
OpenNI::addDeviceStateChangedListener(&devicePrinter);
openni::Array<openni::DeviceInfo> deviceList;
openni::OpenNI::enumerateDevices(&deviceList);
for (int i = 0; i < deviceList.getSize(); ++i)
{
printf("Device \"%s\" already connected\n", deviceList[i].getUri());
}
Device device;
rc = device.open(ANY_DEVICE);
if (rc != STATUS_OK)
{
printf("Couldn't open device\n%s\n", OpenNI::getExtendedError());
return 2;
}
VideoStream depth;
if (device.getSensorInfo(SENSOR_DEPTH) != NULL)
{
rc = depth.create(device, SENSOR_DEPTH);
if (rc != STATUS_OK)
{
printf("Couldn't create depth stream\n%s\n", OpenNI::getExtendedError());
}
}
rc = depth.start();
if (rc != STATUS_OK)
{
printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError());
}
PrintCallback depthPrinter;
// Register to new frame
depth.addNewFrameListener(&depthPrinter);
int i = 1;
while(i > 0)
{
scanf_s("%d", i);
printf("%d\n", i);
}
depth.removeNewFrameListener(&depthPrinter);
depth.stop();
depth.destroy();
device.close();
OpenNI::shutdown();
return 0;
}