本文整理汇总了C++中openni::VideoStream类的典型用法代码示例。如果您正苦于以下问题:C++ VideoStream类的具体用法?C++ VideoStream怎么用?C++ VideoStream使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了VideoStream类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: readFrame
void readFrame()
{
openni::Status rc = openni::STATUS_OK;
openni::VideoStream* streams[] = {&g_depthStream, &g_colorStream, &g_irStream};
int changedIndex = -1;
while (rc == openni::STATUS_OK)
{
rc = openni::OpenNI::waitForAnyStream(streams, 3, &changedIndex, 0);
if (rc == openni::STATUS_OK)
{
switch (changedIndex)
{
case 0:
g_depthStream.readFrame(&g_depthFrame); break;
case 1:
g_colorStream.readFrame(&g_colorFrame); break;
case 2:
g_irStream.readFrame(&g_irFrame); break;
default:
printf("Error in wait\n");
}
}
}
}
示例2: setResolution
/*
void openni::VideoMode::setResolution()
Setter function for the resolution of this VideoMode. Application use of this function is not recommended.
Instead, use SensorInfo::getSupportedVideoModes() to obtain a list of valid video modes
-- cited from OpenNI2 help. setResolution() is not recommended.
*/
bool setONI2StreamMode(openni::VideoStream& stream, int w, int h, int fps, openni::PixelFormat format){
//std::cout << "Ask mode: " << w << "x" << h << " " << fps << " fps. format " << format << std::endl;
bool found = false;
const openni::Array<openni::VideoMode>& modes = stream.getSensorInfo().getSupportedVideoModes();
for(int i = 0, i_end = modes.getSize();i < i_end;++i){
// std::cout << "Mode: " << modes[i].getResolutionX() << "x" << modes[i].getResolutionY() << " " << modes[i].getFps() << " fps. format " << modes[i].getPixelFormat() << std::endl;
if(modes[i].getResolutionX() != w){
continue;
}
if(modes[i].getResolutionY() != h){
continue;
}
if(modes[i].getFps() != fps){
continue;
}
if(modes[i].getPixelFormat() != format){
continue;
}
openni::Status rc = stream.setVideoMode(modes[i]);
if(rc != openni::STATUS_OK){
return false;
}
return true;
}
return false;
}
示例3: initONI2Stream
bool initONI2Stream(openni::Device& device, openni::SensorType sensorType, openni::VideoStream& stream, int w, int h, int fps, openni::PixelFormat format){
openni::Status rc = openni::STATUS_OK;
const char* strSensor;
if(sensorType == openni::SENSOR_COLOR){
strSensor = "openni::SENSOR_COLOR";
}else if(sensorType == openni::SENSOR_DEPTH){
strSensor = "openni::SENSOR_DEPTH";
}else{
printf("%s:Unknown SensorType -> %d\n", __FUNCTION__, sensorType);
return false;
}
rc = stream.create(device, sensorType);
if(rc != openni::STATUS_OK){
printf("%s:Couldn't find sensor %s: %s\n", __FUNCTION__, strSensor, openni::OpenNI::getExtendedError());
return false;
}
openni::VideoMode options = stream.getVideoMode();
printf("%s:Initial resolution %s (%d, %d) FPS %d Format %d\n", __FUNCTION__, strSensor, options.getResolutionX(), options.getResolutionY(), options.getFps(), options.getPixelFormat());
if(setONI2StreamMode(stream, w, h, fps, format) == false){
printf("%s:Can't find desired mode in the %s\n", __FUNCTION__, strSensor);
return false;
}
options = stream.getVideoMode();
printf(" -> (%d, %d) FPS %d Format %d\n", options.getResolutionX(), options.getResolutionY(), options.getFps(), options.getPixelFormat());
return true;
}
示例4: toggleStreamState
void toggleStreamState(openni::VideoStream& stream, openni::VideoFrameRef& frame, bool& isOn, openni::SensorType type, const char* name)
{
openni::Status nRetVal = openni::STATUS_OK;
if (!stream.isValid())
{
nRetVal = stream.create(g_device, type);
if (nRetVal != openni::STATUS_OK)
{
displayError("Failed to create %s stream:\n%s", name, openni::OpenNI::getExtendedError());
return;
}
}
if (isOn)
{
stream.stop();
frame.release();
}
else
{
nRetVal = stream.start();
if (nRetVal != openni::STATUS_OK)
{
displayError("Failed to start %s stream:\n%s", name, openni::OpenNI::getExtendedError());
return;
}
}
isOn = !isOn;
}
示例5: setONI2StreamMode
bool setONI2StreamMode(openni::VideoStream& stream, int w, int h, int fps, openni::PixelFormat format){
/*
void openni::VideoMode::setResolution()
Setter function for the resolution of this VideoMode. Application use of this function is not recommended.
Instead, use SensorInfo::getSupportedVideoModes() to obtain a list of valid video modes
-- cited from OpenNI2 help. setResolution() is not recommended.
*/
bool found = false;
const openni::Array<openni::VideoMode>& modes = stream.getSensorInfo().getSupportedVideoModes();
for(int i = 0, i_end = modes.getSize();i < i_end;++i){
if(modes[i].getResolutionX() != w){
continue;
}
if(modes[i].getResolutionY() != h){
continue;
}
if(modes[i].getPixelFormat() != format){
continue;
}
openni::Status rc = stream.setVideoMode(modes[i]);
if(rc != openni::STATUS_OK){
printf("%s:Couldn't find RGB stream:\n%s\n", __FUNCTION__, openni::OpenNI::getExtendedError());
return false;
}
return true;
}
return false;
}
示例6: openStream
int openStream(openni::Device& device, const char* name, openni::SensorType sensorType, SensorOpenType openType, openni::VideoStream& stream, const openni::SensorInfo** ppSensorInfo, bool* pbIsStreamOn)
{
*ppSensorInfo = device.getSensorInfo(sensorType);
*pbIsStreamOn = false;
if (openType == SENSOR_OFF)
{
return 0;
}
if (*ppSensorInfo == NULL)
{
if (openType == SENSOR_ON)
{
printf("No %s sensor available\n", name);
return -1;
}
else
{
return 0;
}
}
openni::Status nRetVal = stream.create(device, sensorType);
if (nRetVal != openni::STATUS_OK)
{
if (openType == SENSOR_ON)
{
printf("Failed to create %s stream:\n%s\n", openni::OpenNI::getExtendedError(), name);
return -2;
}
else
{
return 0;
}
}
nRetVal = stream.start();
if (nRetVal != openni::STATUS_OK)
{
stream.destroy();
if (openType == SENSOR_ON)
{
printf("Failed to start depth stream:\n%s\n", openni::OpenNI::getExtendedError());
return -3;
}
else
{
return 0;
}
}
*pbIsStreamOn = true;
return 0;
}
示例7: onNewFrame
void ColorListener::onNewFrame(openni::VideoStream& vs)
{
openni::VideoFrameRef frame;
vs.readFrame(&frame);
frames->push_back(frame);
if(isUpdate) w->update();
}
示例8: onNewFrame
void streamFrameListener::onNewFrame(openni::VideoStream& stream)
{
LockGuard guard(mutex);
stream.readFrame(&frameRef);
if (!frameRef.isValid() || !frameRef.getData())
{
yInfo() << "frame lost";
return;
}
int pixC;
pixF = stream.getVideoMode().getPixelFormat();
pixC = depthCameraDriver::pixFormatToCode(pixF);
w = frameRef.getWidth();
h = frameRef.getHeight();
dataSize = frameRef.getDataSize();
if (isReady == false)
{
isReady = true;
}
if(pixC == VOCAB_PIXEL_INVALID)
{
yError() << "depthCameraDriver: Pixel Format not recognized";
return;
}
image.setPixelCode(pixC);
image.resize(w, h);
if(image.getRawImageSize() != frameRef.getDataSize())
{
yError() << "depthCameraDriver:device and local copy data size doesn't match";
return;
}
memcpy((void*)image.getRawImage(), (void*)frameRef.getData(), frameRef.getDataSize());
stamp.update();
return;
}
示例9: closecamera
void KinectCamera::closecamera(void)
{
mColorStream.destroy();
mDepthStream.destroy();
mDevice.close();
openni:: OpenNI::shutdown();
}
示例10: convertDepthPointToColor
bool convertDepthPointToColor(int depthX, int depthY, openni::DepthPixel depthZ, int* pColorX, int* pColorY)
{
if (!g_depthStream.isValid() || !g_colorStream.isValid())
return false;
return (openni::STATUS_OK == openni::CoordinateConverter::convertDepthToColor(g_depthStream, g_colorStream, depthX, depthY, depthZ, pColorX, pColorY));
}
示例11: toggleImageAutoWhiteBalance
void toggleImageAutoWhiteBalance(int)
{
if (g_colorStream.getCameraSettings() == NULL)
{
displayError("Color stream doesn't support camera settings");
return;
}
g_colorStream.getCameraSettings()->setAutoWhiteBalanceEnabled(!g_colorStream.getCameraSettings()->getAutoWhiteBalanceEnabled());
displayMessage("Auto White balance: %s", g_colorStream.getCameraSettings()->getAutoWhiteBalanceEnabled() ? "ON" : "OFF");
}
示例12: startcamera
void KinectCamera::startcamera(void)
{
openni::OpenNI::initialize();//初始化
mDevice.open( openni::ANY_DEVICE );//打开设备(已在全局变量中声明设备mDevice)
mColorStream.create( mDevice, openni::SENSOR_COLOR );// 创建数据流
mColorStream.start();//开启数据流
mDepthStream.create( mDevice, openni::SENSOR_DEPTH );// 创建数据流
mDepthStream.start();//开启数据流
fig=1;
}
示例13: toggleCloseRange
void toggleCloseRange(int )
{
bool bCloseRange;
g_depthStream.getProperty(XN_STREAM_PROPERTY_CLOSE_RANGE, &bCloseRange);
bCloseRange = !bCloseRange;
g_depthStream.setProperty(XN_STREAM_PROPERTY_CLOSE_RANGE, bCloseRange);
displayMessage ("Close range: %s", bCloseRange?"On":"Off");
}
示例14: toggleCMOSAutoLoops
void toggleCMOSAutoLoops(int )
{
if (g_colorStream.getCameraSettings() == NULL)
{
displayMessage("Color stream doesn't support camera settings");
return;
}
toggleImageAutoExposure(0);
toggleImageAutoWhiteBalance(0);
displayMessage ("CMOS Auto Loops: %s", g_colorStream.getCameraSettings()->getAutoExposureEnabled()?"On":"Off");
}
示例15: Init
openni::Status HandGesture::Init(int argc, char **argv)
{
openni::OpenNI::initialize();
const char* deviceUri = openni::ANY_DEVICE;
for (int i = 1; i < argc-1; ++i)
{
if (strcmp(argv[i], "-device") == 0)
{
deviceUri = argv[i+1];
break;
}
}
openni::Status rc = m_device.open(deviceUri);
if (rc != openni::STATUS_OK)
{
printf("Open Device failed:\n%s\n", openni::OpenNI::getExtendedError());
return rc;
}
nite::NiTE::initialize();
if (m_pHandTracker->create(&m_device) != nite::STATUS_OK)
{
return openni::STATUS_ERROR;
}
rc = m_depthStream.create(m_device, openni::SENSOR_DEPTH);
if (rc == openni::STATUS_OK)
{
rc = m_depthStream.start();
if (rc != openni::STATUS_OK)
{
printf("SimpleViewer: Couldn't start depth stream:\n%s\n", openni::OpenNI::getExtendedError());
m_depthStream.destroy();
}
}
else
{
printf("SimpleViewer: Couldn't find depth stream:\n%s\n", openni::OpenNI::getExtendedError());
}
m_pHandTracker->startGestureDetection(nite::GESTURE_WAVE);
m_pHandTracker->startGestureDetection(nite::GESTURE_CLICK);
// m_pHandTracker->startGestureDetection(nite::GESTURE_HAND_RAISE);
return InitOpenCV(argc, argv);
}