本文整理汇总了C++中openni::VideoStream::isValid方法的典型用法代码示例。如果您正苦于以下问题:C++ VideoStream::isValid方法的具体用法?C++ VideoStream::isValid怎么用?C++ VideoStream::isValid使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类openni::VideoStream
的用法示例。
在下文中一共展示了VideoStream::isValid方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: convertDepthPointToColor
bool convertDepthPointToColor(int depthX, int depthY, openni::DepthPixel depthZ, int* pColorX, int* pColorY)
{
if (!g_depthStream.isValid() || !g_colorStream.isValid())
return false;
return (openni::STATUS_OK == openni::CoordinateConverter::convertDepthToColor(g_depthStream, g_colorStream, depthX, depthY, depthZ, pColorX, pColorY));
}
示例2: toggleStreamState
void toggleStreamState(openni::VideoStream& stream, openni::VideoFrameRef& frame, bool& isOn, openni::SensorType type, const char* name)
{
openni::Status nRetVal = openni::STATUS_OK;
if (!stream.isValid())
{
nRetVal = stream.create(g_device, type);
if (nRetVal != openni::STATUS_OK)
{
displayError("Failed to create %s stream:\n%s", name, openni::OpenNI::getExtendedError());
return;
}
}
if (isOn)
{
stream.stop();
frame.release();
}
else
{
nRetVal = stream.start();
if (nRetVal != openni::STATUS_OK)
{
displayError("Failed to start %s stream:\n%s", name, openni::OpenNI::getExtendedError());
return;
}
}
isOn = !isOn;
}
示例3: resetStreamCropping
void resetStreamCropping(openni::VideoStream& stream)
{
if (!stream.isValid())
{
displayMessage("Stream does not exist!");
return;
}
if (!stream.isCroppingSupported())
{
displayMessage("Stream does not support cropping!");
return;
}
openni::Status nRetVal = stream.resetCropping();
if (nRetVal != openni::STATUS_OK)
{
displayMessage("Failed to reset cropping: %s", xnGetStatusString(nRetVal));
return;
}
}
示例4: setStreamCropping
void setStreamCropping(openni::VideoStream& stream, int originX, int originY, int width, int height)
{
if (!stream.isValid())
{
displayMessage("Stream does not exist!");
return;
}
if (!stream.isCroppingSupported())
{
displayMessage("Stream does not support cropping!");
return;
}
openni::Status nRetVal = stream.setCropping(originX, originY, width, height);
if (nRetVal != openni::STATUS_OK)
{
displayMessage("Failed to set cropping: %s", xnGetStatusString(nRetVal));
return;
}
}
示例5: onNewFrame
void OccipitalSensor::onNewFrame(openni::VideoStream &stream)
{
/*static long i;
i++;
qDebug() << i << "onNewFrame()";*/
if (irStream.readFrame(&irFrame) != openni::STATUS_OK || depthStream.readFrame(&depthFrame) != openni::STATUS_OK)
{
std::cerr << "readFrame not OK " << stream.isValid() << std::endl;
return;
}
const openni::Grayscale16Pixel* irData = (const openni::Grayscale16Pixel*)(irFrame.getData());
const openni::DepthPixel* depthData = (const openni::DepthPixel*)(depthFrame.getData());
for (int r = 0; r < Height; r++)
{
for (int c = 0; c < Width; c++)
{
this->irData(r, c) = irData[Width*r + c];
this->depthData(r, c) = depthData[Width*r + c];
}
}
}
示例6: toggleIRState
void toggleIRState(int )
{
if (g_irStream.isValid())
{
if(g_bIsIROn)
{
g_irStream.stop();
g_irFrame.release();
}
else
{
openni::Status nRetVal = g_irStream.start();
if (nRetVal != openni::STATUS_OK)
{
displayError("Failed to start IR stream:\n%s", openni::OpenNI::getExtendedError());
return;
}
}
g_bIsIROn = !g_bIsIROn;
}
}
示例7: initialize
int SensorOpenNI::initialize()
{
LOG(INFO) << "Initializing OpenNI";
///< force shutdown before starting!!
kinect::OpenNI::shutdown();
kinect::Status rc;
rc = kinect::STATUS_OK;
/// Fetch the device URI to pass to Device::open()
const char* deviceURI = kinect::ANY_DEVICE;
/// Initialize the device
rc = kinect::OpenNI::initialize();
if(rc!=kinect::STATUS_OK)
{
mDebug()<<"Initialization Errors (if any): "<< kinect::OpenNI::getExtendedError();
kinect::OpenNI::shutdown();
exit(0);
}
/// Open the device using the previously fetched device URI
rc = device.open(deviceURI);
if (rc != kinect::STATUS_OK)
{
mDebug()<<"Device open failed: "<<kinect::OpenNI::getExtendedError();
kinect::OpenNI::shutdown();
exit(0);
}
/// Create the depth stream
rc = g_depthStream.create(device, kinect::SENSOR_DEPTH);
if (rc == kinect::STATUS_OK)
{
/// start the depth stream, if its creation was successful
rc = g_depthStream.start();
if (rc != kinect::STATUS_OK)
{
mDebug()<<"Couldn't start depth stream: "<<kinect::OpenNI::getExtendedError();
g_depthStream.destroy();
exit(0);
}
}
else
{
mDebug()<<"Couldn't find depth stream: "<<kinect::OpenNI::getExtendedError();
exit(0);
}
if (!g_depthStream.isValid())
{
mDebug()<<"No valid depth streams. Exiting";
kinect::OpenNI::shutdown();
exit(0);
}
/// Create the color stream
rc = g_colorStream.create(device, kinect::SENSOR_COLOR);
if (rc == kinect::STATUS_OK)
{
/// start the color stream, if its creation was successful
rc = g_colorStream.start();
if (rc != kinect::STATUS_OK)
{
mDebug()<<"Couldn't start color stream: "<<kinect::OpenNI::getExtendedError();
g_colorStream.destroy();
exit(0);
}
}
else
{
mDebug()<<"Couldn't find color stream: "<<kinect::OpenNI::getExtendedError();
exit(0);
}
if (!g_colorStream.isValid())
{
mDebug()<<"No valid color streams. Exiting";
kinect::OpenNI::shutdown();
exit(0);
}
/// Configure resolutions
{
/// Attempt to set for depth
{
kinect::VideoMode mode = g_depthStream.getVideoMode();
if(((int)camera->FPS())==60)
mode.setFps(60);
else
mode.setFps(30);
mode.setResolution(camera->width(), camera->height());
rc = g_depthStream.setVideoMode(mode);
if (rc != kinect::STATUS_OK)
std::cerr << "error setting video mode (depth)" << std::endl;
}
/// Attempt to set for color
//.........这里部分代码省略.........