本文整理汇总了C++中openni::VideoFrameRef::isValid方法的典型用法代码示例。如果您正苦于以下问题:C++ VideoFrameRef::isValid方法的具体用法?C++ VideoFrameRef::isValid怎么用?C++ VideoFrameRef::isValid使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类openni::VideoFrameRef
的用法示例。
在下文中一共展示了VideoFrameRef::isValid方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: displayFrame
void SampleViewer::displayFrame(const openni::VideoFrameRef& frame)
{
if (!frame.isValid())
return;
const openni::DepthPixel* pDepthRow = (const openni::DepthPixel*)frame.getData();
openni::RGB888Pixel* pTexRow = m_pTexMap + frame.getCropOriginY() * m_nTexMapX;
int rowSize = frame.getStrideInBytes() / sizeof(openni::DepthPixel);
for (int y = 0; y < frame.getHeight(); ++y)
{
const openni::DepthPixel* pDepth = pDepthRow;
openni::RGB888Pixel* pTex = pTexRow + frame.getCropOriginX();
for (int x = 0; x < frame.getWidth(); ++x, ++pDepth, ++pTex)
{
if (*pDepth != 0)
{
int nHistValue = m_pDepthHist[*pDepth];
pTex->r = nHistValue;
pTex->g = nHistValue;
pTex->b = nHistValue;
}
}
pDepthRow += rowSize;
pTexRow += m_nTexMapX;
}
}
示例2: getDepthImage
// CV_16U
cv::Mat getDepthImage(openni::VideoFrameRef& depth_frame)
{
if(!depth_frame.isValid())
{
return cv::Mat();
}
openni::VideoMode video_mode = depth_frame.getVideoMode();
cv::Mat depth_img = cv::Mat(video_mode.getResolutionY(),
video_mode.getResolutionX(),
CV_16U, (char*)depth_frame.getData());
return depth_img.clone();
}
示例3: getColorImage
cv::Mat getColorImage(openni::VideoFrameRef& color_frame)
{
if(!color_frame.isValid())
{
return cv::Mat();
}
openni::VideoMode video_mode = color_frame.getVideoMode();
cv::Mat color_img = cv::Mat(video_mode.getResolutionY(),
video_mode.getResolutionX(),
CV_8UC3, (char*)color_frame.getData());
cv::Mat ret_img;
cv::cvtColor(color_img, ret_img, CV_RGB2BGR);
return ret_img;
}
示例4: grabFrame
double grabFrame(cv::Mat& color) {
int changed_index;
auto status = openni::OpenNI::waitForAnyStream(streams.data(), 1, &changed_index);
if (status != openni::STATUS_OK)
return false;
color_stream.readFrame(&color_frame);
if (!color_frame.isValid())
return false;
auto tgt = color.data;
auto src = reinterpret_cast<const uint8_t*>(color_frame.getData());
for (size_t i = 0; i < color.total(); ++i) {
*tgt++ = *(src + 2);
*tgt++ = *(src + 1);
*tgt++ = *(src + 0);
src += 3;
}
++next_frame_index;
return true;
}
示例5: display
void SampleViewer::display()
{
int changedIndex;
openni::Status rc = openni::OpenNI::waitForAnyStream(m_streams, 2, &changedIndex);
if (rc != openni::STATUS_OK)
{
printf("Wait failed\n");
return;
}
switch (changedIndex)
{
case 0:
m_depthStream.readFrame(&m_depthFrame); break;
case 1:
m_colorStream.readFrame(&m_colorFrame); break;
default:
printf("Error in wait\n");
}
glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glLoadIdentity();
glOrtho(0, GL_WIN_SIZE_X, GL_WIN_SIZE_Y, 0, -1.0, 1.0);
if (m_depthFrame.isValid())
{
calculateHistogram(m_pDepthHist, MAX_DEPTH, m_depthFrame);
}
memset(m_pTexMap, 0, m_nTexMapX*m_nTexMapY*sizeof(openni::RGB888Pixel));
// check if we need to draw image frame to texture
if ((m_eViewState == DISPLAY_MODE_OVERLAY ||
m_eViewState == DISPLAY_MODE_IMAGE) && m_colorFrame.isValid())
{
const openni::RGB888Pixel* pImageRow = (const openni::RGB888Pixel*)m_colorFrame.getData();
openni::RGB888Pixel* pTexRow = m_pTexMap + m_colorFrame.getCropOriginY() * m_nTexMapX;
int rowSize = m_colorFrame.getStrideInBytes() / sizeof(openni::RGB888Pixel);
for (int y = 0; y < m_colorFrame.getHeight(); ++y)
{
const openni::RGB888Pixel* pImage = pImageRow;
openni::RGB888Pixel* pTex = pTexRow + m_colorFrame.getCropOriginX();
for (int x = 0; x < m_colorFrame.getWidth(); ++x, ++pImage, ++pTex)
{
*pTex = *pImage;
}
pImageRow += rowSize;
pTexRow += m_nTexMapX;
}
}
// check if we need to draw depth frame to texture
if ((m_eViewState == DISPLAY_MODE_OVERLAY ||
m_eViewState == DISPLAY_MODE_DEPTH) && m_depthFrame.isValid())
{
const openni::DepthPixel* pDepthRow = (const openni::DepthPixel*)m_depthFrame.getData();
openni::RGB888Pixel* pTexRow = m_pTexMap + m_depthFrame.getCropOriginY() * m_nTexMapX;
int rowSize = m_depthFrame.getStrideInBytes() / sizeof(openni::DepthPixel);
for (int y = 0; y < m_depthFrame.getHeight(); ++y)
{
const openni::DepthPixel* pDepth = pDepthRow;
openni::RGB888Pixel* pTex = pTexRow + m_depthFrame.getCropOriginX();
for (int x = 0; x < m_depthFrame.getWidth(); ++x, ++pDepth, ++pTex)
{
if (*pDepth != 0)
{
int nHistValue = m_pDepthHist[*pDepth];
pTex->r = nHistValue;
pTex->g = nHistValue;
pTex->b = 0;
}
}
pDepthRow += rowSize;
pTexRow += m_nTexMapX;
}
}
glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP_SGIS, GL_TRUE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_nTexMapX, m_nTexMapY, 0, GL_RGB, GL_UNSIGNED_BYTE, m_pTexMap);
// Display the OpenGL texture map
glColor4f(1,1,1,1);
glBegin(GL_QUADS);
int nXRes = m_width;
int nYRes = m_height;
// upper left
//.........这里部分代码省略.........
示例6: Display
void SampleViewer::Display()
{
nite::Status rc = m_pHandTracker->readFrame(&handFrame);
if (rc != nite::STATUS_OK)
{
printf("GetNextData failed\n");
return;
}
depthFrame = handFrame.getDepthFrame();
if (m_pTexMap == NULL)
{
// Texture map init
m_nTexMapX = MIN_CHUNKS_SIZE(depthFrame.getVideoMode().getResolutionX(), TEXTURE_SIZE);
m_nTexMapY = MIN_CHUNKS_SIZE(depthFrame.getVideoMode().getResolutionY(), TEXTURE_SIZE);
m_pTexMap = new openni::RGB888Pixel[m_nTexMapX * m_nTexMapY];
}
glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnable(GL_DEPTH_TEST);
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glLoadIdentity();
glOrtho(0, GL_WIN_SIZE_X, GL_WIN_SIZE_Y, 0, -10000.0, 10000.0);
if (depthFrame.isValid())
{
calculateHistogram(m_pDepthHist, MAX_DEPTH, depthFrame);
}
memset(m_pTexMap, 0, m_nTexMapX*m_nTexMapY*sizeof(openni::RGB888Pixel));
float factor[3] = {1, 1, 1};
// check if we need to draw depth frame to texture
float av_x = 0;
float av_y = 0;
int counter= 0;
for(int i = 0; i<=7 ; i++)
note_on[i] = false;
if (depthFrame.isValid() && g_drawDepth)
{
const openni::DepthPixel* pDepthRow = (const openni::DepthPixel*)depthFrame.getData();
const openni::DepthPixel* pDepthRow1 = pDepthRow;
openni::RGB888Pixel* pTexRow = m_pTexMap + depthFrame.getCropOriginY() * m_nTexMapX;
int rowSize = depthFrame.getStrideInBytes() / sizeof(openni::DepthPixel);
glPointSize(2);
glBegin(GL_POINTS);
for (int y = 0; y < depthFrame.getHeight(); ++y)
{
const openni::DepthPixel* pDepth = pDepthRow;
openni::RGB888Pixel* pTex = pTexRow + depthFrame.getCropOriginX();
//chord_temp = 0;
for (int x = 0; x < depthFrame.getWidth(); ++x, ++pDepth, ++pTex)
{
if (*pDepth != 0)
{
factor[0] = Colors[colorCount][0];
factor[1] = Colors[colorCount][1];
factor[2] = Colors[colorCount][2];
int nHistValue = m_pDepthHist[*pDepth];
pTex->r = nHistValue*factor[0];
pTex->g = nHistValue*factor[1];
pTex->b = nHistValue*factor[2];
factor[0] = factor[1] = factor[2] = 1;
if(*pDepth <= 800)
{
//glColor3f(1,0,0);
glColor3f(float(*pDepth)/2000,float(*pDepth)/2000,float(*pDepth)/2000);
av_x = x + av_x;
counter++;
av_y = y + av_y;
}
else{
glColor3f(float(*pDepth)/2000,float(*pDepth)/2000,float(*pDepth)/2000);
}
glVertex3f(2*x,2*y,-*pDepth);
}
}
pDepthRow += rowSize;
pTexRow += m_nTexMapX;
}
//.........这里部分代码省略.........