本文整理汇总了C++中xn::DepthMetaData::Data方法的典型用法代码示例。如果您正苦于以下问题:C++ DepthMetaData::Data方法的具体用法?C++ DepthMetaData::Data怎么用?C++ DepthMetaData::Data使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类xn::DepthMetaData
的用法示例。
在下文中一共展示了DepthMetaData::Data方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: getDepthHistgram
// デプスのヒストグラムを作成
depth_hist getDepthHistgram(const xn::DepthGenerator& depth,
const xn::DepthMetaData& depthMD)
{
// デプスの傾向を計算する(アルゴリズムはNiSimpleViewer.cppを利用)
const int MAX_DEPTH = depth.GetDeviceMaxDepth();
depth_hist depthHist(MAX_DEPTH);
unsigned int points = 0;
const XnDepthPixel* pDepth = depthMD.Data();
for (XnUInt y = 0; y < depthMD.YRes(); ++y) {
for (XnUInt x = 0; x < depthMD.XRes(); ++x, ++pDepth) {
if (*pDepth != 0) {
depthHist[*pDepth]++;
points++;
}
}
}
for (int i = 1; i < MAX_DEPTH; ++i) {
depthHist[i] += depthHist[i-1];
}
if ( points != 0) {
for (int i = 1; i < MAX_DEPTH; ++i) {
depthHist[i] =
(unsigned int)(256 * (1.0f - (depthHist[i] / points)));
}
}
return depthHist;
}
示例2: retrieveDepthMap
IplImage* CvCapture_OpenNI::retrieveDepthMap()
{
if( !depthMetaData.Data() )
return 0;
getDepthMapFromMetaData( depthMetaData, outputMaps[CV_CAP_OPENNI_DEPTH_MAP].mat, noSampleValue, shadowValue );
return outputMaps[CV_CAP_OPENNI_DEPTH_MAP].getIplImagePtr();
}
示例3:
IplImage* CvCapture_OpenNI::retrieveDisparityMap_32F()
{
if( !depthMetaData.Data() )
return 0;
computeDisparity_32F( depthMetaData, outputMaps[CV_CAP_OPENNI_DISPARITY_MAP_32F].mat, baseline, depthFocalLength_VGA, noSampleValue, shadowValue );
return outputMaps[CV_CAP_OPENNI_DISPARITY_MAP_32F].getIplImagePtr();
}
示例4: retrieveDisparityMap
IplImage* CvCapture_OpenNI::retrieveDisparityMap()
{
if( !depthMetaData.Data() )
return 0;
cv::Mat disp32;
computeDisparity_32F( depthMetaData, disp32, baseline, depthFocalLength_VGA, noSampleValue, shadowValue );
disp32.convertTo( outputMaps[CV_CAP_OPENNI_DISPARITY_MAP].mat, CV_8UC1 );
return outputMaps[CV_CAP_OPENNI_DISPARITY_MAP].getIplImagePtr();
}
示例5: retrieveValidDepthMask
IplImage* CvCapture_OpenNI::retrieveValidDepthMask()
{
if( !depthMetaData.Data() )
return 0;
cv::Mat depth;
getDepthMapFromMetaData( depthMetaData, depth, noSampleValue, shadowValue );
outputMaps[CV_CAP_OPENNI_VALID_DEPTH_MASK].mat = depth != CvCapture_OpenNI::INVALID_PIXEL_VAL;
return outputMaps[CV_CAP_OPENNI_VALID_DEPTH_MASK].getIplImagePtr();
}
示例6: getDepthMapFromMetaData
inline void getDepthMapFromMetaData( const xn::DepthMetaData& depthMetaData, cv::Mat& depthMap, XnUInt64 noSampleValue, XnUInt64 shadowValue )
{
int cols = depthMetaData.XRes();
int rows = depthMetaData.YRes();
depthMap.create( rows, cols, CV_16UC1 );
const XnDepthPixel* pDepthMap = depthMetaData.Data();
// CV_Assert( sizeof(unsigned short) == sizeof(XnDepthPixel) );
memcpy( depthMap.data, pDepthMap, cols*rows*sizeof(XnDepthPixel) );
cv::Mat badMask = (depthMap == (double)noSampleValue) | (depthMap == (double)shadowValue) | (depthMap == 0);
// mask the pixels with invalid depth
depthMap.setTo( cv::Scalar::all( CvCapture_OpenNI::INVALID_PIXEL_VAL ), badMask );
}
示例7: retrievePointCloudMap
IplImage* CvCapture_OpenNI::retrievePointCloudMap()
{
if( !depthMetaData.Data() )
return 0;
cv::Mat depth;
getDepthMapFromMetaData( depthMetaData, depth, noSampleValue, shadowValue );
const int badPoint = INVALID_PIXEL_VAL;
const float badCoord = INVALID_COORDINATE_VAL;
int cols = depthMetaData.XRes(), rows = depthMetaData.YRes();
cv::Mat pointCloud_XYZ( rows, cols, CV_32FC3, cv::Scalar::all(badPoint) );
cv::Ptr<XnPoint3D> proj = new XnPoint3D[cols*rows];
cv::Ptr<XnPoint3D> real = new XnPoint3D[cols*rows];
for( int y = 0; y < rows; y++ )
{
for( int x = 0; x < cols; x++ )
{
int ind = y*cols+x;
proj[ind].X = (float)x;
proj[ind].Y = (float)y;
proj[ind].Z = depth.at<unsigned short>(y, x);
}
}
depthGenerator.ConvertProjectiveToRealWorld(cols*rows, proj, real);
for( int y = 0; y < rows; y++ )
{
for( int x = 0; x < cols; x++ )
{
// Check for invalid measurements
if( depth.at<unsigned short>(y, x) == badPoint ) // not valid
pointCloud_XYZ.at<cv::Point3f>(y,x) = cv::Point3f( badCoord, badCoord, badCoord );
else
{
int ind = y*cols+x;
pointCloud_XYZ.at<cv::Point3f>(y,x) = cv::Point3f( real[ind].X*0.001f, real[ind].Y*0.001f, real[ind].Z*0.001f); // from mm to meters
}
}
}
outputMaps[CV_CAP_OPENNI_POINT_CLOUD_MAP].mat = pointCloud_XYZ;
return outputMaps[CV_CAP_OPENNI_POINT_CLOUD_MAP].getIplImagePtr();
}
示例8: captureOne
bool DataCapture::captureOne()
{
XnStatus rc = context_.WaitAndUpdateAll(); // want this to be WaitOneUpdateAll(RGB image)
if( rc != XN_STATUS_OK )
{
std::cout << "WaitAndUpdateAll: " << xnGetStatusString(rc) << std::endl;
return false;
}
// grab image
imageGen_.GetMetaData(imageMd_);
const XnRGB24Pixel* rgbData = imageMd_.RGB24Data();
for( unsigned int i = 0; i < 640 * 480; ++i )
{
pRgbData_[3*i] = rgbData->nRed;
pRgbData_[3*i + 1] = rgbData->nGreen;
pRgbData_[3*i + 2] = rgbData->nBlue;
++rgbData;
}
// grab depth image
depthGen_.GetMetaData(depthMd_);
const uint16_t* pDepthDataU16 = depthMd_.Data();
for( int i = 0; i < 640 * 480; ++i)
{
uint16_t d = pDepthDataU16[i];
if( d != 0 )
{
pDepthData_[i] = (d * 255)/2048;
}
else
{
pDepthData_[i] = 0; // should be NAN
}
}
return true;
}
示例9: setDepthHistgram
//----------------------------------------------------
// ヒストグラム作成関数
//----------------------------------------------------
void setDepthHistgram(const xn::DepthGenerator& depth, const xn::DepthMetaData& depthMD, float _pDepthHist[]){
xnOSMemSet(_pDepthHist, 0, KINECT_MAX_DEPTH * sizeof(float)); // g_pDepthHistの全てに0を代入
unsigned int points = 0;
const XnDepthPixel* pDepth = depthMD.Data();
for (XnUInt y = 0; y < KINECT_IMAGE_HEIGHT; ++ y) {
for (XnUInt x = 0; x < KINECT_IMAGE_WIDTH; ++ x, ++ pDepth) {
if (*pDepth != 0) {
_pDepthHist[*pDepth] ++;
points ++;
}
}
}
for (int i = 1; i < KINECT_MAX_DEPTH; ++ i) {
_pDepthHist[i] += _pDepthHist[i - 1];
}
if ( points != 0) {
for (int i = 1; i < KINECT_MAX_DEPTH; ++ i) {
_pDepthHist[i] = (unsigned int)(256 * (1.0f - (_pDepthHist[i] / points)));
}
}
}
示例10: DrawDepthMapWithUsers
void SimKinect::DrawDepthMapWithUsers(const xn::DepthMetaData& dmd, const xn::SceneMetaData& smd)
{
static bool bInitialized = false;
//image used in opencv;
static unsigned char* pDepthTexBuf;
static int texWidth, texHeight;
float topLeftX;
float topLeftY;
float bottomRightY;
float bottomRightX;
float texXpos;
float texYpos;
if(!bInitialized)
{
texWidth = getClosestPowerOfTwo(dmd.XRes());
texHeight = getClosestPowerOfTwo(dmd.YRes());
pDepthTexBuf = (unsigned char*)malloc(dmd.XRes()*dmd.YRes()*3*sizeof(unsigned char));
// printf("Initializing depth texture: width = %d, height = %d\n", texWidth, texHeight);
bInitialized = true;
topLeftX = dmd.XRes();
topLeftY = 0;
bottomRightY = dmd.YRes();
bottomRightX = 0;
texXpos =(float)dmd.XRes()/texWidth;
texYpos =(float)dmd.YRes()/texHeight;
}
unsigned int nValue = 0;
unsigned int nHistValue = 0;
unsigned int nIndex = 0;
unsigned int nX = 0;
unsigned int nY = 0;
unsigned int nNumberOfPoints = 0;
XnUInt16 nXRes = dmd.XRes();
XnUInt16 nYRes = dmd.YRes();
unsigned char* pDestImage = pDepthTexBuf;
const XnDepthPixel* pDepth = dmd.Data();
const XnLabel* pLabels = smd.Data();
int* p_depth_map = depth_map;
//Calculate the accumulative histogram
memset(pDepthHist, 0, MAX_DEPTH*sizeof(float));
for (nY=0; nY<nYRes; nY++)
{
for (nX=0; nX<nXRes; nX++)
{
nValue = *pDepth;
*p_depth_map++ = nValue;
if (nValue != 0)
{
pDepthHist[nValue]++;
nNumberOfPoints++;
}
pDepth++;
}
}
for (nIndex=1; nIndex<MAX_DEPTH; nIndex++)
{
pDepthHist[nIndex] += pDepthHist[nIndex-1];
}
if (nNumberOfPoints)
{
for (nIndex=1; nIndex<MAX_DEPTH; nIndex++)
{
pDepthHist[nIndex] = (unsigned int)(256 * (1.0f - (pDepthHist[nIndex] / nNumberOfPoints)));
}
}
pDepth = dmd.Data();
if (bDrawPixels)
{
// Prepare the texture map
for (nY=0; nY<nYRes; nY++)
{
for (nX=0; nX < nXRes; nX++)
{
pDestImage[0] = 0;
pDestImage[1] = 0;
pDestImage[2] = 0;
if (bDrawBackground || *pLabels != 0)
{
nValue = *pDepth;
XnLabel label = *pLabels;
XnUInt32 nColorID = label % nColors;
if (label == 0)
{
nColorID = nColors;
}
if (nValue != 0)
{
//.........这里部分代码省略.........
示例11: DrawDepthMap
void DrawDepthMap(const xn::DepthMetaData& dmd, const xn::SceneMetaData& smd, XnUserID player)
{
static bool bInitialized = false;
static GLuint depthTexID;
static unsigned char* pDepthTexBuf;
static int texWidth, texHeight;
float topLeftX;
float topLeftY;
float bottomRightY;
float bottomRightX;
float texXpos;
float texYpos;
if(!bInitialized)
{
texWidth = getClosestPowerOfTwo(dmd.XRes());
texHeight = getClosestPowerOfTwo(dmd.YRes());
// printf("Initializing depth texture: width = %d, height = %d\n", texWidth, texHeight);
depthTexID = initTexture((void**)&pDepthTexBuf,texWidth, texHeight) ;
// printf("Initialized depth texture: width = %d, height = %d\n", texWidth, texHeight);
bInitialized = true;
topLeftX = dmd.XRes();
topLeftY = 0;
bottomRightY = dmd.YRes();
bottomRightX = 0;
texXpos =(float)dmd.XRes()/texWidth;
texYpos =(float)dmd.YRes()/texHeight;
memset(texcoords, 0, 8*sizeof(float));
texcoords[0] = texXpos, texcoords[1] = texYpos, texcoords[2] = texXpos, texcoords[7] = texYpos;
}
unsigned int nValue = 0;
unsigned int nHistValue = 0;
unsigned int nIndex = 0;
unsigned int nX = 0;
unsigned int nY = 0;
unsigned int nNumberOfPoints = 0;
XnUInt16 g_nXRes = dmd.XRes();
XnUInt16 g_nYRes = dmd.YRes();
unsigned char* pDestImage = pDepthTexBuf;
const XnDepthPixel* pDepth = dmd.Data();
const XnLabel* pLabels = smd.Data();
// Calculate the accumulative histogram
memset(g_pDepthHist, 0, MAX_DEPTH*sizeof(float));
for (nY=0; nY<g_nYRes; nY++)
{
for (nX=0; nX<g_nXRes; nX++)
{
nValue = *pDepth;
if (nValue != 0)
{
g_pDepthHist[nValue]++;
nNumberOfPoints++;
}
pDepth++;
}
}
for (nIndex=1; nIndex<MAX_DEPTH; nIndex++)
{
g_pDepthHist[nIndex] += g_pDepthHist[nIndex-1];
}
if (nNumberOfPoints)
{
for (nIndex=1; nIndex<MAX_DEPTH; nIndex++)
{
g_pDepthHist[nIndex] = (unsigned int)(256 * (1.0f - (g_pDepthHist[nIndex] / nNumberOfPoints)));
}
}
pDepth = dmd.Data();
{
XnUInt32 nIndex = 0;
// Prepare the texture map
for (nY=0; nY<g_nYRes; nY++)
{
for (nX=0; nX < g_nXRes; nX++, nIndex++)
{
nValue = *pDepth;
XnLabel label = *pLabels;
XnUInt32 nColorID = label % nColors;
if (label == 0)
{
nColorID = nColors;
}
if (nValue != 0)
{
nHistValue = g_pDepthHist[nValue];
//.........这里部分代码省略.........
示例12: DumpDepthMap
void DepthMapLogger::DumpDepthMap(const xn::DepthMetaData& dmd, const xn::SceneMetaData& smd)
{
static char name_str[20], comment_str[255];
// Don't do anything if the h5 file is not open
if(!p_h5_file_ || !p_frames_group_) { return; }
// References to various bits of the HDF5 output
H5File &file(*p_h5_file_);
Group &frames_group(*p_frames_group_);
// This frame's index is the number of frames we've previously saved
hsize_t this_frame_idx = frames_group.getNumObjs();
// Create this frame's group
snprintf(name_str, 20, "frame_%06lld", this_frame_idx);
snprintf(comment_str, 255, "Data for frame %lld", this_frame_idx);
Group this_frame_group(frames_group.createGroup(name_str));
this_frame_group.setComment(".", comment_str);
// Create attributes for this group
Attribute idx_attr = this_frame_group.createAttribute("idx", PredType::NATIVE_HSIZE, DataSpace());
idx_attr.write(PredType::NATIVE_HSIZE, &this_frame_idx);
// Create this frame's datasets
DSetCreatPropList creat_props;
uint16_t fill_value(0);
creat_props.setFillValue(PredType::NATIVE_UINT16, &fill_value);
hsize_t rows(static_cast<hsize_t>(dmd.YRes())), cols(static_cast<hsize_t>(dmd.XRes()));
hsize_t creation_dims[2] = { rows, cols };
hsize_t max_dims[2] = { rows, cols };
DataSpace mem_space(2, creation_dims, max_dims);
DataSet depth_ds(this_frame_group.createDataSet(
"depth", PredType::NATIVE_UINT16, mem_space, creat_props));
DataSet label_ds(this_frame_group.createDataSet(
"label", PredType::NATIVE_UINT16, mem_space, creat_props));
// Get depth and label buffers
const uint16_t *p_depths = dmd.Data();
const uint16_t *p_labels = smd.Data();
// Write depth data
depth_ds.write(p_depths, PredType::NATIVE_UINT16);
// Write label data
label_ds.write(p_labels, PredType::NATIVE_UINT16);
// Convert non-zero depth values into 3D point positions
XnPoint3D *pts = new XnPoint3D[rows*cols];
uint16_t *pt_labels = new uint16_t[rows*cols];
size_t n_pts(0);
for(size_t depth_idx(0); depth_idx < rows*cols; ++depth_idx) {
// Skip zero depth values
if(p_depths[depth_idx] == 0) {
continue;
}
// Store projective-values
pts[n_pts].X = depth_idx % cols;
pts[n_pts].Y = depth_idx / cols;
pts[n_pts].Z = p_depths[depth_idx];
pt_labels[n_pts] = p_labels[depth_idx];
++n_pts;
}
g_DepthGenerator.ConvertProjectiveToRealWorld(n_pts, pts, pts);
if (n_pts > 0)
{
// Create points dataset
hsize_t pts_creation_dims[2] = { n_pts, 3 };
hsize_t pts_max_dims[2] = { n_pts, 3 };
DataSpace pts_mem_space(2, pts_creation_dims, pts_max_dims);
DataSet pts_ds(this_frame_group.createDataSet(
"points", PredType::NATIVE_FLOAT, pts_mem_space, creat_props));
hsize_t pt_labels_creation_dims[1] = { n_pts };
hsize_t pt_labels_max_dims[1] = { n_pts };
DataSpace pt_labels_mem_space(1, pt_labels_creation_dims, pt_labels_max_dims);
DataSet pt_labels_ds(this_frame_group.createDataSet(
"point_labels", PredType::NATIVE_UINT16, pt_labels_mem_space, creat_props));
// Write points data
pts_ds.write(pts, PredType::NATIVE_FLOAT);
pt_labels_ds.write(pt_labels, PredType::NATIVE_UINT16);
}
// Create groups to store detected users
Group users_group(this_frame_group.createGroup("users"));
// Dump each user in turn
char strLabel[50] = "";
XnUserID aUsers[15];
XnUInt16 nUsers = 15;
g_UserGenerator.GetUsers(aUsers, nUsers);
for (int i = 0; i < nUsers; ++i)
{
// Create a group for this user
snprintf(name_str, 20, "user_%02d", aUsers[i]);
Group this_user_group(users_group.createGroup(name_str));
//.........这里部分代码省略.........
示例13: depthMapCreating
void SceneDrawer::depthMapCreating(unsigned char *pDestImage, const xn::DepthMetaData &dmd, const xn::SceneMetaData &smd)
{
unsigned int nNumberOfPoints = 0;
XnUInt16 g_nXRes = dmd.XRes();
XnUInt16 g_nYRes = dmd.YRes();
const XnDepthPixel* pDepth = dmd.Data();
const XnLabel* pLabels = smd.Data();
unsigned int nValue = 0;
static unsigned int nZRes = dmd.ZRes();
static float* pDepthHist = (float*)malloc(nZRes* sizeof(float));
// Calculate the accumulative histogram
memset(pDepthHist, 0, nZRes*sizeof(float));
for (int nY=0; nY<g_nYRes; nY++)
for (int nX=0; nX<g_nXRes; nX++)
{
nValue = *pDepth;
if (nValue != 0)
{
pDepthHist[nValue]++;
nNumberOfPoints++;
}
pDepth++;
}
for (int i=1; i<nZRes; i++)
pDepthHist[i] += pDepthHist[i-1];
if (nNumberOfPoints)
for (int i=1; i<nZRes; i++)
pDepthHist[i] = (unsigned int)(256 * (1.0f - (pDepthHist[i] / nNumberOfPoints)));
pDepth = dmd.Data();
// Prepare the texture map
for (int nY=0; nY<g_nYRes; nY++)
{
for (int nX=0; nX < g_nXRes; nX++)
{
pDestImage[0] = 0;
pDestImage[1] = 0;
pDestImage[2] = 0;
if (drawBackground || *pLabels != 0)
{
nValue = *pDepth;
XnLabel label = *pLabels;
XnUInt32 nColorID = label % nColors;
if (label == 0)
nColorID = nColors;
if (nValue != 0)
{
pDestImage[0] = pDepthHist[nValue] * Colors[nColorID][0];
pDestImage[1] = pDepthHist[nValue] * Colors[nColorID][1];
pDestImage[2] = pDepthHist[nValue] * Colors[nColorID][2];
}
}
pDepth++;
pLabels++;
pDestImage+=3;
}
pDestImage += (Width - g_nXRes) *3;
}
}
示例14: update
bool OpenNIVideo::update(osg::NodeVisitor* nv) {
//this is the main function of your video plugin
//you can either retrieve images from your video stream/camera/file
//or communicate with a thread to synchronize and get the data out
//the most important is to synchronize your data
//and copy the result to the VideoImageSteam used in this plugin
//
//0. you can collect some stats, for that you can use a timer
osg::Timer t;
{
//1. mutex lock access to the image video stream
OpenThreads::ScopedLock<OpenThreads::Mutex> _lock(this->getMutex());
osg::notify(osg::DEBUG_INFO)<<"osgART::OpenNIVideo::update() get new image.."<<std::endl;
XnStatus nRetVal = XN_STATUS_OK;
nRetVal=context.WaitAndUpdateAll();
CHECK_RC(nRetVal, "Update Data");
xnFPSMarkFrame(&xnFPS);
depth_generator.GetMetaData(depthMD);
const XnDepthPixel* pDepthMap = depthMD.Data();
//depth pixel floating point depth map.
image_generator.GetMetaData(imageMD);
const XnUInt8* pImageMap = imageMD.Data();
// Hybrid mode isn't supported in this sample
if (imageMD.FullXRes() != depthMD.FullXRes() || imageMD.FullYRes() != depthMD.FullYRes())
{
std::cerr<<"The device depth and image resolution must be equal!"<<std::endl;
exit(1);
}
// RGB is the only image format supported.
if (imageMD.PixelFormat() != XN_PIXEL_FORMAT_RGB24)
{
std::cerr<<"The device image format must be RGB24"<<std::endl;
exit(1);
}
const XnDepthPixel* pDepth=pDepthMap;
const XnUInt8* pImage=pImageMap;
XnDepthPixel zMax = depthMD.ZRes();
//convert float buffer to unsigned short
for ( unsigned int i=0; i<(depthMD.XRes() * depthMD.YRes()); ++i )
{
*(_depthBufferByte + i) = 255 * (float(*(pDepth + i)) / float(zMax));
}
memcpy(_videoStreamList[0]->data(),pImage, _videoStreamList[0]->getImageSizeInBytes());
memcpy(_videoStreamList[1]->data(),_depthBufferByte, _videoStreamList[1]->getImageSizeInBytes());
//3. don't forget to call this to notify the rest of the application
//that you have a new video image
_videoStreamList[0]->dirty();
_videoStreamList[1]->dirty();
}
//4. hopefully report some interesting data
if (nv) {
const osg::FrameStamp *framestamp = nv->getFrameStamp();
if (framestamp && _stats.valid())
{
_stats->setAttribute(framestamp->getFrameNumber(),
"Capture time taken", t.time_m());
}
}
// Increase modified count every X ms to ensure tracker updates
if (updateTimer.time_m() > 50) {
_videoStreamList[0]->dirty();
_videoStreamList[1]->dirty();
updateTimer.setStartTick();
}
return true;
}
示例15: DrawDepthMap
void DrawDepthMap(const xn::DepthMetaData& dmd, const xn::SceneMetaData& smd, XnUserID player, xn::ImageMetaData& imd)
{
texWidth = 640;
texHeight = 480;
LEFT = 0; RIGHT = 640;
TOP = 0; BOTTOM = 480;
nValue = 0;
nIndex = 0;
nX = 0; nY = 0;
nNumberOfPoints = 0;
g_nXRes = dmd.XRes();
g_nYRes = dmd.YRes();
pDestImage = pDepthTexBuf;
pDepth = dmd.Data();
pixel = imd.RGB24Data();
pLabels = smd.Data();
// Calculate the accumulative histogram
memset(g_pDepthHist, 0, MAX_DEPTH*sizeof(float));
for (nY=0; nY<g_nYRes; nY++)
{
for (nX=0; nX<g_nXRes; nX++)
{
nValue = *pDepth;
if (nValue != 0)
{
g_pDepthHist[nValue]++;
nNumberOfPoints++;
}
pDepth++;
}
}
for (nIndex=1; nIndex<MAX_DEPTH; nIndex++)
{
g_pDepthHist[nIndex] += g_pDepthHist[nIndex-1];
}
if (nNumberOfPoints)
{
for (nIndex=1; nIndex<MAX_DEPTH; nIndex++)
{
g_pDepthHist[nIndex] = (unsigned int)(256 * (1.0f - (g_pDepthHist[nIndex] / nNumberOfPoints)));
}
}
// printf("Debug: %i\n",focus);
pDepth = (short unsigned int*)dmd.Data();
///{
// Prepare the texture map
for (nY=0; nY<g_nYRes; nY++)
{
for (nX=0; nX < g_nXRes; nX++)
{
nValue = *pDepth;
if(nX == (int)centerScreen[0] && nY == (int)centerScreen[1]){
if (calibrationMode){
depthVal = nValue;
// printf("depthVal: %i\n",depthVal);
}
}
//printf("Depth: %i \n",nValue);
label = *pLabels;
// XnUInt32 nColorID = label % nColors;
if (label != focus)
{
if(calibrationMode){
pDestImage[0] = pixel->nRed;
pDestImage[1] = pixel->nGreen;
pDestImage[2] = pixel->nBlue;
pDestImage[3] = 255;
} else {
pDestImage[0] = 0;
pDestImage[1] = 0;
pDestImage[2] = 0;
pDestImage[3] = 0;
}
} else {
pDestImage[0] = pixel->nRed;
pDestImage[1] = pixel->nGreen;
pDestImage[2] = pixel->nBlue;
pDestImage[3] = 255;
//find max/min values for width and height boundaries
if (nX > (unsigned int)LEFT) {
LEFT = nX;
}
if (nX < (unsigned int)RIGHT) {
RIGHT = nX;
}
if (nY > (unsigned int)TOP) {
TOP = nY;
//.........这里部分代码省略.........