本文整理汇总了C++中openni::VideoStream::start方法的典型用法代码示例。如果您正苦于以下问题:C++ VideoStream::start方法的具体用法?C++ VideoStream::start怎么用?C++ VideoStream::start使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类openni::VideoStream
的用法示例。
在下文中一共展示了VideoStream::start方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: startcamera
void KinectCamera::startcamera(void)
{
openni::OpenNI::initialize();//初始化
mDevice.open( openni::ANY_DEVICE );//打开设备(已在全局变量中声明设备mDevice)
mColorStream.create( mDevice, openni::SENSOR_COLOR );// 创建数据流
mColorStream.start();//开启数据流
mDepthStream.create( mDevice, openni::SENSOR_DEPTH );// 创建数据流
mDepthStream.start();//开启数据流
fig=1;
}
示例2: initONI2RGBStream
bool initONI2RGBStream(openni::Device& device, openni::VideoStream& rgb, int w, int h, int fps, openni::PixelFormat format){
openni::Status rc = openni::STATUS_OK;
rc = rgb.create(device, openni::SENSOR_COLOR);
if(rc != openni::STATUS_OK){
printf("%s:Couldn't find RGB stream:\n%s\n", __FUNCTION__, openni::OpenNI::getExtendedError());
return false;
}
rc = rgb.setMirroringEnabled(false);
if (rc != openni::STATUS_OK){
printf("%s:setMirroringEnabled(false) failed:\n%s\n", __FUNCTION__, openni::OpenNI::getExtendedError());
return false;
}
openni::VideoMode options = rgb.getVideoMode();
printf("Initial resolution RGB (%d, %d) FPS %d Format %d\n", options.getResolutionX(), options.getResolutionY(), options.getFps(), options.getPixelFormat());
if(setONI2StreamMode(rgb, w, h, fps, format) == false){
printf("%s:Can't find desired rgb mode\n", __FUNCTION__ );
return false;
}
options = rgb.getVideoMode();
printf(" -> (%d, %d) FPS %d Format %d\n", options.getResolutionX(), options.getResolutionY(), options.getFps(), options.getPixelFormat());
rc = rgb.start();
if (rc != openni::STATUS_OK){
printf("%s:Couldn't start RGB stream:\n%s\n", __FUNCTION__, openni::OpenNI::getExtendedError());
rgb.destroy();
return false;
}
return true;
}
示例3: toggleStreamState
void toggleStreamState(openni::VideoStream& stream, openni::VideoFrameRef& frame, bool& isOn, openni::SensorType type, const char* name)
{
openni::Status nRetVal = openni::STATUS_OK;
if (!stream.isValid())
{
nRetVal = stream.create(g_device, type);
if (nRetVal != openni::STATUS_OK)
{
displayError("Failed to create %s stream:\n%s", name, openni::OpenNI::getExtendedError());
return;
}
}
if (isOn)
{
stream.stop();
frame.release();
}
else
{
nRetVal = stream.start();
if (nRetVal != openni::STATUS_OK)
{
displayError("Failed to start %s stream:\n%s", name, openni::OpenNI::getExtendedError());
return;
}
}
isOn = !isOn;
}
示例4: openStream
int openStream(openni::Device& device, const char* name, openni::SensorType sensorType, SensorOpenType openType, openni::VideoStream& stream, const openni::SensorInfo** ppSensorInfo, bool* pbIsStreamOn)
{
*ppSensorInfo = device.getSensorInfo(sensorType);
*pbIsStreamOn = false;
if (openType == SENSOR_OFF)
{
return 0;
}
if (*ppSensorInfo == NULL)
{
if (openType == SENSOR_ON)
{
printf("No %s sensor available\n", name);
return -1;
}
else
{
return 0;
}
}
openni::Status nRetVal = stream.create(device, sensorType);
if (nRetVal != openni::STATUS_OK)
{
if (openType == SENSOR_ON)
{
printf("Failed to create %s stream:\n%s\n", openni::OpenNI::getExtendedError(), name);
return -2;
}
else
{
return 0;
}
}
nRetVal = stream.start();
if (nRetVal != openni::STATUS_OK)
{
stream.destroy();
if (openType == SENSOR_ON)
{
printf("Failed to start depth stream:\n%s\n", openni::OpenNI::getExtendedError());
return -3;
}
else
{
return 0;
}
}
*pbIsStreamOn = true;
return 0;
}
示例5: record_oni
bool record_oni(char *tmpfile, int bufsize, openni::VideoStream &depth, openni::VideoStream &color, Config &conf) {
openni::Recorder recorder;
time_t t = time(NULL);
strftime(tmpfile, bufsize, "rgbd_%Y%m%d_%H-%M-%S_", localtime(&t));
strncat(tmpfile, getenv("HOSTNAME"), bufsize);
strncat(tmpfile, ".oni", bufsize);
printf("Starting ONI Capture.\n");
depth.start();
color.start();
openni::Status rc = recorder.create(tmpfile);
if(rc != openni::STATUS_OK) {
printf("Error: Failed to open '%s' for writing!\n%s", tmpfile, openni::OpenNI::getExtendedError());
return false;
}
recorder.attach(color);
recorder.attach(depth);
recorder.start();
struct timespec start, tp;
clock_gettime(CLOCK_MONOTONIC, &start);
long tt;
do {
usleep(100);
clock_gettime(CLOCK_MONOTONIC, &tp);
tt = (tp.tv_sec-start.tv_sec)*1000+(tp.tv_nsec-start.tv_nsec)/1000000;
} while(tt < conf.capture_time);
recorder.stop();
color.stop();
depth.stop();
recorder.destroy();
printf("Captured ONI to '%s'\n", tmpfile);
return true;
}
示例6: Init
openni::Status HandGesture::Init(int argc, char **argv)
{
openni::OpenNI::initialize();
const char* deviceUri = openni::ANY_DEVICE;
for (int i = 1; i < argc-1; ++i)
{
if (strcmp(argv[i], "-device") == 0)
{
deviceUri = argv[i+1];
break;
}
}
openni::Status rc = m_device.open(deviceUri);
if (rc != openni::STATUS_OK)
{
printf("Open Device failed:\n%s\n", openni::OpenNI::getExtendedError());
return rc;
}
nite::NiTE::initialize();
if (m_pHandTracker->create(&m_device) != nite::STATUS_OK)
{
return openni::STATUS_ERROR;
}
rc = m_depthStream.create(m_device, openni::SENSOR_DEPTH);
if (rc == openni::STATUS_OK)
{
rc = m_depthStream.start();
if (rc != openni::STATUS_OK)
{
printf("SimpleViewer: Couldn't start depth stream:\n%s\n", openni::OpenNI::getExtendedError());
m_depthStream.destroy();
}
}
else
{
printf("SimpleViewer: Couldn't find depth stream:\n%s\n", openni::OpenNI::getExtendedError());
}
m_pHandTracker->startGestureDetection(nite::GESTURE_WAVE);
m_pHandTracker->startGestureDetection(nite::GESTURE_CLICK);
// m_pHandTracker->startGestureDetection(nite::GESTURE_HAND_RAISE);
return InitOpenCV(argc, argv);
}
示例7: setDepthVideoMode
void setDepthVideoMode(int mode)
{
bool bIsStreamOn = g_bIsDepthOn;
if (bIsStreamOn)
{
g_bIsDepthOn = false;
g_depthStream.stop();
}
g_depthStream.setVideoMode(g_depthSensorInfo->getSupportedVideoModes()[mode]);
if (bIsStreamOn)
{
g_depthStream.start();
g_bIsDepthOn = true;
}
}
示例8: setIRVideoMode
void setIRVideoMode(int mode)
{
bool bIsStreamOn = g_bIsIROn;
if (bIsStreamOn)
{
g_bIsIROn = false;
g_irStream.stop();
}
g_irFrame.release();
g_irStream.setVideoMode(g_irSensorInfo->getSupportedVideoModes()[mode]);
if (bIsStreamOn)
{
g_irStream.start();
g_bIsIROn = true;
}
}
示例9: toggleIRState
void toggleIRState(int )
{
if (g_irStream.isValid())
{
if(g_bIsIROn)
{
g_irStream.stop();
g_irFrame.release();
}
else
{
openni::Status nRetVal = g_irStream.start();
if (nRetVal != openni::STATUS_OK)
{
displayError("Failed to start IR stream:\n%s", openni::OpenNI::getExtendedError());
return;
}
}
g_bIsIROn = !g_bIsIROn;
}
}
示例10: initONI2DepthStream
bool initONI2DepthStream(openni::Device& device, openni::VideoStream& depth, int w, int h, int fps, openni::PixelFormat format){
openni::Status rc = depth.create(device, openni::SENSOR_DEPTH);
if (rc != openni::STATUS_OK){
printf("%s:Couldn't find depth stream:\n%s\n", __FUNCTION__, openni::OpenNI::getExtendedError());
return false;
}
openni::VideoMode options = depth.getVideoMode();
printf("Initial resolution Depth(%d, %d) FPS %d Format %d\n", options.getResolutionX(), options.getResolutionY(), options.getFps(), options.getPixelFormat());
if(setONI2StreamMode(depth, w, h, fps, format) == false){
printf("%s:Can't find desired depth mode\n", __FUNCTION__ );
return false;
}
options = depth.getVideoMode();
printf(" -> (%d, %d) FPS %d Format %d\n", options.getResolutionX(), options.getResolutionY(), options.getFps(), options.getPixelFormat());
depth.setMirroringEnabled(false);
rc = depth.start();
if (rc != openni::STATUS_OK){
printf("Couldn't start depth stream:\n%s\n", openni::OpenNI::getExtendedError());
depth.destroy();
return false;
}
return true;
}
示例11: open
void open(const char* uri) {
if (device.open(uri) != openni::STATUS_OK)
BOOST_THROW_EXCEPTION(GrabberException("Failed to open device")
<< GrabberException::ErrorInfo(openni::OpenNI::getExtendedError()));
if (color_stream.create(device, openni::SENSOR_COLOR) != openni::STATUS_OK)
BOOST_THROW_EXCEPTION(GrabberException("Failed to create color stream")
<< GrabberException::ErrorInfo(openni::OpenNI::getExtendedError()));
openni::VideoMode color_mode;
color_mode.setFps(30);
color_mode.setResolution(color_image_resolution.width, color_image_resolution.height);
color_mode.setPixelFormat(openni::PIXEL_FORMAT_RGB888);
color_stream.setVideoMode(color_mode);
color_image_size = color_image_resolution.width * color_image_resolution.height * 3;
color_stream.setMirroringEnabled(false);
if (color_stream.start() != openni::STATUS_OK) {
color_stream.destroy();
BOOST_THROW_EXCEPTION(GrabberException("Failed to start color stream")
<< GrabberException::ErrorInfo(openni::OpenNI::getExtendedError()));
}
streams.push_back(&color_stream);
auto control = device.getPlaybackControl();
if (control != nullptr) {
// This is a file, make sure we get every frame
control->setSpeed(-1.0f);
control->setRepeatEnabled(false);
num_frames = control->getNumberOfFrames(color_stream);
is_file = true;
if (num_frames == -1)
BOOST_THROW_EXCEPTION(GrabberException("Unable to determine number of frames in ONI file"));
}
}
示例12: initialize
int SensorOpenNI::initialize()
{
LOG(INFO) << "Initializing OpenNI";
///< force shutdown before starting!!
kinect::OpenNI::shutdown();
kinect::Status rc;
rc = kinect::STATUS_OK;
/// Fetch the device URI to pass to Device::open()
const char* deviceURI = kinect::ANY_DEVICE;
/// Initialize the device
rc = kinect::OpenNI::initialize();
if(rc!=kinect::STATUS_OK)
{
mDebug()<<"Initialization Errors (if any): "<< kinect::OpenNI::getExtendedError();
kinect::OpenNI::shutdown();
exit(0);
}
/// Open the device using the previously fetched device URI
rc = device.open(deviceURI);
if (rc != kinect::STATUS_OK)
{
mDebug()<<"Device open failed: "<<kinect::OpenNI::getExtendedError();
kinect::OpenNI::shutdown();
exit(0);
}
/// Create the depth stream
rc = g_depthStream.create(device, kinect::SENSOR_DEPTH);
if (rc == kinect::STATUS_OK)
{
/// start the depth stream, if its creation was successful
rc = g_depthStream.start();
if (rc != kinect::STATUS_OK)
{
mDebug()<<"Couldn't start depth stream: "<<kinect::OpenNI::getExtendedError();
g_depthStream.destroy();
exit(0);
}
}
else
{
mDebug()<<"Couldn't find depth stream: "<<kinect::OpenNI::getExtendedError();
exit(0);
}
if (!g_depthStream.isValid())
{
mDebug()<<"No valid depth streams. Exiting";
kinect::OpenNI::shutdown();
exit(0);
}
/// Create the color stream
rc = g_colorStream.create(device, kinect::SENSOR_COLOR);
if (rc == kinect::STATUS_OK)
{
/// start the color stream, if its creation was successful
rc = g_colorStream.start();
if (rc != kinect::STATUS_OK)
{
mDebug()<<"Couldn't start color stream: "<<kinect::OpenNI::getExtendedError();
g_colorStream.destroy();
exit(0);
}
}
else
{
mDebug()<<"Couldn't find color stream: "<<kinect::OpenNI::getExtendedError();
exit(0);
}
if (!g_colorStream.isValid())
{
mDebug()<<"No valid color streams. Exiting";
kinect::OpenNI::shutdown();
exit(0);
}
/// Configure resolutions
{
/// Attempt to set for depth
{
kinect::VideoMode mode = g_depthStream.getVideoMode();
if(((int)camera->FPS())==60)
mode.setFps(60);
else
mode.setFps(30);
mode.setResolution(camera->width(), camera->height());
rc = g_depthStream.setVideoMode(mode);
if (rc != kinect::STATUS_OK)
std::cerr << "error setting video mode (depth)" << std::endl;
}
/// Attempt to set for color
//.........这里部分代码省略.........
示例13: openCommon
void openCommon(openni::Device& device, bool defaultRightColor)
{
XnStatus nRetVal = XN_STATUS_OK;
g_bIsDepthOn = false;
g_bIsColorOn = false;
g_bIsIROn = false;
g_depthSensorInfo = device.getSensorInfo(openni::SENSOR_DEPTH);
g_colorSensorInfo = device.getSensorInfo(openni::SENSOR_COLOR);
g_irSensorInfo = device.getSensorInfo(openni::SENSOR_IR);
if (g_depthSensorInfo != NULL)
{
nRetVal = g_depthStream.create(device, openni::SENSOR_DEPTH);
if (nRetVal != openni::STATUS_OK)
{
printf("Failed to create depth stream:\n%s\n", openni::OpenNI::getExtendedError());
return;
}
nRetVal = g_depthStream.start();
if (nRetVal != openni::STATUS_OK)
{
printf("Failed to start depth stream:\n%s\n", openni::OpenNI::getExtendedError());
g_depthStream.destroy();
return;
}
g_bIsDepthOn = true;
}
if (g_colorSensorInfo != NULL)
{
nRetVal = g_colorStream.create(device, openni::SENSOR_COLOR);
if (nRetVal != openni::STATUS_OK)
{
printf("Failed to create color stream:\n%s\n", openni::OpenNI::getExtendedError());
return;
}
if (defaultRightColor)
{
nRetVal = g_colorStream.start();
if (nRetVal != openni::STATUS_OK)
{
printf("Failed to start color stream:\n%s\n", openni::OpenNI::getExtendedError());
g_colorStream.destroy();
return;
}
g_bIsColorOn = true;
}
}
if (g_irSensorInfo != NULL)
{
nRetVal = g_irStream.create(device, openni::SENSOR_IR);
if (nRetVal != openni::STATUS_OK)
{
printf("Failed to create IR stream:\n%s\n", openni::OpenNI::getExtendedError());
return;
}
if (!g_bIsColorOn)
{
nRetVal = g_irStream.start();
if (nRetVal != openni::STATUS_OK)
{
printf("Failed to start IR stream:\n%s\n", openni::OpenNI::getExtendedError());
g_irStream.destroy();
return;
}
g_bIsIROn = true;
}
}
initConstants();
readFrame();
}
示例14:
// Start recording
void Oni2Grabber::createAndStartStream_(openni::VideoStream &stream, openni::SensorType sensor_type)
{
stream.create(device_, sensor_type);
stream.start();
streams_.push_back(&stream);
}
示例15:
bool OpenNI2Driver::start()
{
if ( rgbStream.start() != STATUS_OK ) return false ;
if ( depthStream.start() != STATUS_OK ) return false ;
return true ;
}