本文整理汇总了C++中IDepthFrame::get_FrameDescription方法的典型用法代码示例。如果您正苦于以下问题:C++ IDepthFrame::get_FrameDescription方法的具体用法?C++ IDepthFrame::get_FrameDescription怎么用?C++ IDepthFrame::get_FrameDescription使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类IDepthFrame
的用法示例。
在下文中一共展示了IDepthFrame::get_FrameDescription方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: GetDepthFrame
void KinectCapture::GetDepthFrame(IMultiSourceFrame* pMultiFrame)
{
IDepthFrameReference* pDepthFrameReference = NULL;
IDepthFrame* pDepthFrame = NULL;
pMultiFrame->get_DepthFrameReference(&pDepthFrameReference);
HRESULT hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
if (SUCCEEDED(hr))
{
if (pDepth == NULL)
{
IFrameDescription* pFrameDescription = NULL;
hr = pDepthFrame->get_FrameDescription(&pFrameDescription);
pFrameDescription->get_Width(&nDepthFrameWidth);
pFrameDescription->get_Height(&nDepthFrameHeight);
pDepth = new UINT16[nDepthFrameHeight * nDepthFrameWidth];
SafeRelease(pFrameDescription);
}
UINT nBufferSize = nDepthFrameHeight * nDepthFrameWidth;
hr = pDepthFrame->CopyFrameDataToArray(nBufferSize, pDepth);
}
SafeRelease(pDepthFrame);
SafeRelease(pDepthFrameReference);
}
示例2: update
cv::Mat capKinect::update(cv::Mat& depth_show)
{
if (!m_pDepthReader) return cv::Mat();
IDepthFrame* pDepthFrame = NULL;
HRESULT hr = m_pDepthReader->AcquireLatestFrame(&pDepthFrame);
cv::Mat re;
if (SUCCEEDED(hr))
{
IFrameDescription* pFrameDescription = NULL;
int nWidth = 0;
int nHeight = 0;
USHORT nDepthMinReliableDistance = 0;
USHORT nDepthMaxDistance = 0;
UINT nBufferSize = 0;
UINT16 *pBuffer = NULL;
if (SUCCEEDED(hr))
{
hr = pDepthFrame->get_FrameDescription(&pFrameDescription);
}
if (SUCCEEDED(hr))
{
hr = pFrameDescription->get_Width(&nWidth);
}
if (SUCCEEDED(hr))
{
hr = pFrameDescription->get_Height(&nHeight);
}
if (SUCCEEDED(hr))
{
hr = pDepthFrame->get_DepthMinReliableDistance(&nDepthMinReliableDistance);
}
if (SUCCEEDED(hr))
{
// In order to see the full range of depth (including the less reliable far field depth)
// we are setting nDepthMaxDistance to the extreme potential depth threshold
nDepthMaxDistance = USHRT_MAX; //here we set maxDepth as 1000 mm (1 m) to simply cut the back background
// Note: If you wish to filter by reliable depth distance, uncomment the following line.
//// hr = pDepthFrame->get_DepthMaxReliableDistance(&nDepthMaxDistance);
}
if (SUCCEEDED(hr))
{
hr = pDepthFrame->AccessUnderlyingBuffer(&nBufferSize, &pBuffer);
}
if (SUCCEEDED(hr))
{
re=capture(pBuffer, nWidth, nHeight, depth_show, nDepthMinReliableDistance, nDepthMaxDistance);
}
if(pFrameDescription)SafeRelease(pFrameDescription);
}
if(pDepthFrame)SafeRelease(pDepthFrame);
return re;
}
示例3: Mat
void Microsoft2Grabber::DepthFrameArrived(IDepthFrameReference* pDepthFrameReference) {
IDepthFrame* pDepthFrame = NULL;
HRESULT hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
//HRESULT hr = pDepthFrameReference->AcquireLatestFrame(&pDepthFrame);
if(FAILED(hr))
return;
//cout << "got a depth frame" << endl;
INT64 nDepthTime = 0;
IFrameDescription* pDepthFrameDescription = NULL;
int nDepthWidth = 0;
int nDepthHeight = 0;
UINT nDepthBufferSize = 0;
// get depth frame data
hr = pDepthFrame->get_RelativeTime(&nDepthTime);
if (SUCCEEDED(hr)) {
hr = pDepthFrame->get_FrameDescription(&pDepthFrameDescription);
}
if (SUCCEEDED(hr)) {
hr = pDepthFrameDescription->get_Width(&nDepthWidth);
}
if (SUCCEEDED(hr)) {
hr = pDepthFrameDescription->get_Height(&nDepthHeight);
}
if (SUCCEEDED(hr)) {
hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &m_pDepthBuffer);
//WaitForSingleObject(hDepthMutex,INFINITE);
Mat tmp = Mat(m_depthSize, DEPTH_PIXEL_TYPE, m_pDepthBuffer, Mat::AUTO_STEP);
MatDepth depth_img = *((MatDepth*)&(tmp.clone()));
m_depthTime = nDepthTime;
if (depth_image_signal_->num_slots () > 0) {
depth_image_signal_->operator()(depth_img);
}
if (num_slots<sig_cb_microsoft_point_cloud_rgba>() > 0 || all_data_signal_->num_slots() > 0 || image_depth_image_signal_->num_slots() > 0) {
//rgb_sync_.add1 (depth_img, m_depthTime);
imageDepthOnlyImageCallback(depth_img);
}
//ReleaseMutex(hDepthMutex);
}
SafeRelease(pDepthFrameDescription);
SafeRelease(pDepthFrame);
}
示例4: Process_AudioFrame
/*
bool MyKinect::Process_AudioFrame(IMultiSourceFrame * pMultiSourceFrame, bool *returnbool)
{
if (!pMultiSourceFrame) {
*returnbool = false;
return false;
}
*returnbool = false;
IDepthFrame * pDepthFrame = NULL;
IDepthFrameReference *pDepthFrameReference = NULL;
HRESULT hr = pMultiSourceFrame->get_(&pDepthFrameReference);
if (SUCCEEDED(hr))
{
hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
}
SafeRelease(pDepthFrameReference);
if (SUCCEEDED(hr))
{
IFrameDescription * pFrameDescription = NULL;
int nWidth = 0;
int nHeight = 0;
// USHORT nDepthMinReliableDistance = 0;
//USHORT nDepthMaxDistance = 0;
UINT nBufferSize = 0;
UINT16 *pBuffer = NULL;
if (SUCCEEDED(hr))
{
hr = pDepthFrame->get_FrameDescription(&pFrameDescription);
}
if (SUCCEEDED(hr))
{
hr = pFrameDescription->get_Width(&nWidth);
}
if (SUCCEEDED(hr))
{
hr = pFrameDescription->get_Height(&nHeight);
}
if (SUCCEEDED(hr))
{
hr = pDepthFrame->get_DepthMinReliableDistance(&cDepthMinReliableDistance);
}
if (SUCCEEDED(hr))
{
hr = pDepthFrame->get_DepthMaxReliableDistance(&cDepthMaxDistance);
}
if (SUCCEEDED(hr))
{//这里是将指针buffer提取出来了,没有拷贝
hr = pDepthFrame->AccessUnderlyingBuffer(&nBufferSize, &pBuffer);//这里的size是ushort而言的,memcopy是uchar来的。
}
if (SUCCEEDED(hr))
{
//int tempsize = cDepthHeight*cDepthWidth *sizeof(USHORT);
memcpy(m_pDepthBuffer, pBuffer, nBufferSize*sizeof(USHORT));
*returnbool = true;
//ProcessDepth(pBuffer, nWidth, nHeight, nDepthMinReliableDistance, nDepthMaxDistance);
}
SafeRelease(pFrameDescription);//Description 和Frame 都要释放的
}
SafeRelease(pDepthFrame);
return *returnbool;
}*/
bool MyKinect::Process_DepthFrame(IMultiSourceFrame * pMultiSourceFrame, bool *returnbool)
{
if (!pMultiSourceFrame) {
*returnbool = false;
return false;
}
*returnbool = false;
IDepthFrame * pDepthFrame = NULL;
IDepthFrameReference *pDepthFrameReference = NULL;
HRESULT hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference);
if (SUCCEEDED(hr))
{
hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
}
SafeRelease(pDepthFrameReference);
/*if (!SUCCEEDED(hr))
{
cout << " 深度帧丢失" << ++depthLostFrames << endl;
}*/
if (SUCCEEDED(hr))
{
IFrameDescription * pFrameDescription = NULL;
int nWidth = 0;
int nHeight = 0;
// USHORT nDepthMinReliableDistance = 0;
//USHORT nDepthMaxDistance = 0;
UINT nBufferSize = 0;
//.........这里部分代码省略.........
示例5: capture
void capture(Image::Ptr& pImage)
{
HRESULT hr;
if (m_pMultiSourceFrameReader==nullptr)
{
camera->getContext().error("CameraKinectDevice::capture: m_pMultiSourceFrameReader is nullptr\n");
// this is bad news - perhaps throw?
return; // @@@
}
IMultiSourceFrame* pMultiSourceFrame = nullptr;
IDepthFrame* pDepthFrame = nullptr;
IColorFrame* pColorFrame = nullptr;
const golem::MSecTmU32 waitStep = 1;
golem::MSecTmU32 timeWaited = 0;
golem::Sleep timer;
while (FAILED(hr = m_pMultiSourceFrameReader->AcquireLatestFrame(&pMultiSourceFrame)))
{
// this is in CameraOpenNI, but suspect may be causing problem here
// if (camera->isTerminating()) return;
timer.msleep(waitStep);
timeWaited += waitStep;
if (timeWaited >= timeout)
{
camera->getContext().error("CameraKinectDevice::capture: failed to acquire frame within %d ms\n", timeout);
// keep going - don't return with nothing; reset stopwatch @@@
timeWaited = 0;
}
}
const golem::SecTmReal systemTime1 = camera->getContext().getTimer().elapsed();
if (SUCCEEDED(hr))
{
IDepthFrameReference* pDepthFrameReference = nullptr;
hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference);
if (SUCCEEDED(hr))
{
hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
}
RELEASE_PTR(pDepthFrameReference);
}
if (SUCCEEDED(hr))
{
IColorFrameReference* pColorFrameReference = nullptr;
hr = pMultiSourceFrame->get_ColorFrameReference(&pColorFrameReference);
if (SUCCEEDED(hr))
{
hr = pColorFrameReference->AcquireFrame(&pColorFrame);
}
RELEASE_PTR(pColorFrameReference);
}
if (SUCCEEDED(hr))
{
INT64 nDepthTime = 0;
IFrameDescription* pDepthFrameDescription = nullptr;
int nDepthWidth = 0;
int nDepthHeight = 0;
UINT nDepthBufferSize = 0;
UINT16 *pDepthBuffer = nullptr;
IFrameDescription* pColorFrameDescription = nullptr;
int nColorWidth = 0;
int nColorHeight = 0;
ColorImageFormat imageFormat = ColorImageFormat_None;
UINT nColorBufferSize = 0;
RGBQUAD *pColorBuffer = nullptr;
// get depth frame data
hr = pDepthFrame->get_RelativeTime(&nDepthTime);
if (SUCCEEDED(hr))
hr = pDepthFrame->get_FrameDescription(&pDepthFrameDescription);
if (SUCCEEDED(hr))
hr = pDepthFrameDescription->get_Width(&nDepthWidth);
if (SUCCEEDED(hr))
hr = pDepthFrameDescription->get_Height(&nDepthHeight);
if (SUCCEEDED(hr))
hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pDepthBuffer);
// get color frame data
if (SUCCEEDED(hr))
hr = pColorFrame->get_FrameDescription(&pColorFrameDescription);
if (SUCCEEDED(hr))
hr = pColorFrameDescription->get_Width(&nColorWidth);
if (SUCCEEDED(hr))
//.........这里部分代码省略.........
示例6: Update
//.........这里部分代码省略.........
{
// Depth
INT64 nDepthTime = 0;
IFrameDescription* pDepthFrameDescription = NULL;
int nDepthWidth = 0;
int nDepthHeight = 0;
UINT nDepthBufferSize = 0;
UINT16 *pDepthBuffer = NULL;
// Color
IFrameDescription* pColorFrameDescription = NULL;
int nColorWidth = 0;
int nColorHeight = 0;
ColorImageFormat imageFormat = ColorImageFormat_None;
UINT nColorBufferSize = 0;
RGBQUAD *pColorBuffer = NULL;
// BodyIndex
IFrameDescription* pBodyIndexFrameDescription = NULL;
int nBodyIndexWidth = 0;
int nBodyIndexHeight = 0;
UINT nBodyIndexBufferSize = 0;
BYTE *pBodyIndexBuffer = NULL;
// Body
IBody* ppBodies[BODY_COUNT] = { 0 };
// get depth frame data
hr = pDepthFrame->get_RelativeTime(&nDepthTime);
// Depth
if (SUCCEEDED(hr))
{
hr = pDepthFrame->get_FrameDescription(&pDepthFrameDescription);
}
if (SUCCEEDED(hr))
{
hr = pDepthFrameDescription->get_Width(&nDepthWidth);
}
if (SUCCEEDED(hr))
{
hr = pDepthFrameDescription->get_Height(&nDepthHeight);
}
if (SUCCEEDED(hr))
{
hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pDepthBuffer);
}
// get color frame data
if (SUCCEEDED(hr))
{
hr = pColorFrame->get_FrameDescription(&pColorFrameDescription);
}
if (SUCCEEDED(hr))
{
hr = pColorFrameDescription->get_Width(&nColorWidth);
}
if (SUCCEEDED(hr))
{
hr = pColorFrameDescription->get_Height(&nColorHeight);
}
示例7: update
//.........这里部分代码省略.........
int32_t depthWidth = 0;
int32_t depthHeight = 0;
uint16_t depthMinReliableDistance = 0;
uint16_t depthMaxReliableDistance = 0;
uint32_t depthBufferSize = 0;
uint16_t* depthBuffer = 0;
IFrameDescription* infraredFrameDescription = 0;
int32_t infraredWidth = 0;
int32_t infraredHeight = 0;
uint32_t infraredBufferSize = 0;
uint16_t* infraredBuffer = 0;
IFrameDescription* infraredLongExposureFrameDescription = 0;
int32_t infraredLongExposureWidth = 0;
int32_t infraredLongExposureHeight = 0;
uint32_t infraredLongExposureBufferSize = 0;
uint16_t* infraredLongExposureBuffer = 0;
hr = depthFrame->get_RelativeTime( &time );
// TODO audio
if ( mDeviceOptions.isAudioEnabled() ) {
}
// TODO body
if ( mDeviceOptions.isBodyEnabled() ) {
}
if ( mDeviceOptions.isBodyIndexEnabled() ) {
if ( SUCCEEDED( hr ) ) {
hr = bodyIndexFrame->get_FrameDescription( &bodyIndexFrameDescription );
}
if ( SUCCEEDED( hr ) ) {
hr = bodyIndexFrameDescription->get_Width( &bodyIndexWidth );
}
if ( SUCCEEDED( hr ) ) {
hr = bodyIndexFrameDescription->get_Height( &bodyIndexHeight );
}
if ( SUCCEEDED( hr ) ) {
//hr = bodyIndexFrame->AccessUnderlyingBuffer( &bodyIndexBufferSize, &bodyIndexBuffer );
}
}
if ( mDeviceOptions.isColorEnabled() ) {
if ( SUCCEEDED( hr ) ) {
hr = colorFrame->get_FrameDescription( &colorFrameDescription );
}
if ( SUCCEEDED( hr ) ) {
hr = colorFrameDescription->get_Width( &colorWidth );
}
if ( SUCCEEDED( hr ) ) {
hr = colorFrameDescription->get_Height( &colorHeight );
}
if ( SUCCEEDED( hr ) ) {
hr = colorFrame->get_RawColorImageFormat( &imageFormat );
}
if ( SUCCEEDED( hr ) ) {
bool isAllocated = false;
SurfaceChannelOrder channelOrder = SurfaceChannelOrder::BGRA;
if ( imageFormat == ColorImageFormat_Bgra ) {
hr = colorFrame->AccessRawUnderlyingBuffer( &colorBufferSize, reinterpret_cast<uint8_t**>( &colorBuffer ) );
channelOrder = SurfaceChannelOrder::BGRA;
} else if ( imageFormat == ColorImageFormat_Rgba ) {
示例8: UpdateDepth
//after calling this, get the depth fram with GetDepth or GetDepthRGBX
void UpdateDepth(){
if (!m_pDepthFrameReader)
{
return;
}
IDepthFrame* pDepthFrame = NULL;
HRESULT hr = m_pDepthFrameReader->AcquireLatestFrame(&pDepthFrame);
if (SUCCEEDED(hr))
{
INT64 nTime = 0;
IFrameDescription* pFrameDescription = NULL;
int nWidth = 0;
int nHeight = 0;
USHORT nDepthMinReliableDistance = 0;
USHORT nDepthMaxReliableDistance = 0;
UINT nBufferSize = 0;
UINT16 *pBuffer = NULL;
hr = pDepthFrame->get_RelativeTime(&nTime);
if (SUCCEEDED(hr))
{
hr = pDepthFrame->get_FrameDescription(&pFrameDescription);
}
if (SUCCEEDED(hr))
{
hr = pFrameDescription->get_Width(&nWidth);
}
if (SUCCEEDED(hr))
{
m_nDepthWidth = nWidth;
hr = pFrameDescription->get_Height(&nHeight);
}
if (SUCCEEDED(hr))
{
m_nDepthHeight = nHeight;
hr = pDepthFrame->get_DepthMinReliableDistance(&nDepthMinReliableDistance);
}
if (SUCCEEDED(hr))
{
hr = pDepthFrame->get_DepthMaxReliableDistance(&nDepthMaxReliableDistance);
}
if (SUCCEEDED(hr))
{
hr = pDepthFrame->AccessUnderlyingBuffer(&nBufferSize, &pBuffer);
}
if (SUCCEEDED(hr))
{
if(m_bCalculateDepthRGBX)
ProcessDepth(nTime, pBuffer, nWidth, nHeight, nDepthMinReliableDistance, nDepthMaxReliableDistance);
else
ProcessDepthNoRGBX(nTime, pBuffer, nWidth, nHeight, nDepthMinReliableDistance, nDepthMaxReliableDistance);
if(!m_bColorDepthMapCalculated){
CalculateColorDepthMap();
}
if(m_bMapDepthToColor && m_nColorWidth > 0 && m_nColorHeight > 0 && SUCCEEDED(hr) && m_bColorDepthMapCalculated){
ProcessDepthToColor(m_pDepth, m_nDepthWidth, m_nDepthHeight, m_pColorDepthMap, m_nColorWidth, m_nColorHeight);
}
}
SafeRelease(pFrameDescription);
}
else{
DumpHR(hr);
}
SafeRelease(pDepthFrame);
}
示例9: readFrame
bool DepthStream::readFrame(IMultiSourceFrame *multiFrame)
{
bool readed = false;
if (!m_StreamHandle.depthFrameReader) {
ofLogWarning("ofxKinect2::DepthStream") << "Stream is not open.";
return readed;
}
Stream::readFrame(multiFrame);
IDepthFrame *depthFrame = nullptr;
HRESULT hr = E_FAIL;
if (!multiFrame) {
hr = m_StreamHandle.depthFrameReader->AcquireLatestFrame(&depthFrame);
}
else {
IDepthFrameReference *depthFrameReference = nullptr;
hr = multiFrame->get_DepthFrameReference(&depthFrameReference);
if (SUCCEEDED(hr)) {
hr = depthFrameReference->AcquireFrame(&depthFrame);
}
safeRelease(depthFrameReference);
}
if (SUCCEEDED(hr)) {
IFrameDescription *depthFrameDescription = nullptr;
hr = depthFrame->get_RelativeTime((INT64 *)&m_Frame.timestamp);
if (SUCCEEDED(hr)) {
hr = depthFrame->get_FrameDescription(&depthFrameDescription);
}
if (SUCCEEDED(hr)) {
hr = depthFrameDescription->get_Width(&m_Frame.width);
}
if (SUCCEEDED(hr)) {
hr = depthFrameDescription->get_Height(&m_Frame.height);
}
if (SUCCEEDED(hr)) {
hr = depthFrameDescription->get_HorizontalFieldOfView(&m_Frame.horizontalFieldOfView);
}
if (SUCCEEDED(hr)) {
hr = depthFrameDescription->get_VerticalFieldOfView(&m_Frame.verticalFieldOfView);
}
if (SUCCEEDED(hr)) {
hr = depthFrameDescription->get_DiagonalFieldOfView(&m_Frame.diagonalFieldOfView);
}
if (SUCCEEDED(hr)) {
hr = depthFrame->get_DepthMinReliableDistance((USHORT *)&m_NearValue);
}
if (SUCCEEDED(hr)) {
hr = depthFrame->get_DepthMaxReliableDistance((USHORT *)&m_FarValue);
}
if (SUCCEEDED(hr)) {
hr = depthFrame->get_DepthMinReliableDistance((USHORT *)&m_NearValue);
}
if (SUCCEEDED(hr)) {
if (m_Frame.dataSize == 0) {
m_Frame.dataSize = m_Frame.width * m_Frame.height;
m_Frame.data = new UINT16[m_Frame.width * m_Frame.height];
}
hr = depthFrame->CopyFrameDataToArray(m_Frame.width * m_Frame.height, reinterpret_cast<UINT16 *>(m_Frame.data));
}
if (SUCCEEDED(hr)) {
readed = true;
setPixels(m_Frame);
}
safeRelease(depthFrameDescription);
}
safeRelease(depthFrame);
return readed;
}
示例10: CreateSensorTexture
int TextureManager::CreateSensorTexture(char *errorString, const char *name) {
if (!depth_frame_reader_) {
sprintf_s(errorString, MAX_ERROR_LENGTH,
"No depth sensor exists for texture creation");
return -1;
}
glGenTextures(1, textureID + numTextures);
strcpy_s(textureName[numTextures], TM_MAX_FILENAME_LENGTH, name);
glBindTexture(GL_TEXTURE_2D, textureID[numTextures]);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
//gluBuild2DMipmaps(GL_TEXTURE_2D, GL_RGBA,
// TM_NOISE_TEXTURE_SIZE, TM_NOISE_TEXTURE_SIZE,
// GL_BGRA, GL_UNSIGNED_BYTE, noiseIntData);
IDepthFrame* pDepthFrame = NULL;
HRESULT hr;
bool hasSucceeded = false;
for (int tries = 0; tries < 20 && !hasSucceeded; tries++) {
Sleep(100);
hr = depth_frame_reader_->AcquireLatestFrame(&pDepthFrame);
if (SUCCEEDED(hr)) hasSucceeded = true;
}
if (!hasSucceeded) {
sprintf_s(errorString, MAX_ERROR_LENGTH,
"Could not acquire last depth sensor frame");
return -1;
}
pDepthFrame->get_RelativeTime(&last_frame_time_);
IFrameDescription* pFrameDescription = NULL;
int nWidth = 0;
int nHeight = 0;
hr = pDepthFrame->get_FrameDescription(&pFrameDescription);
if (FAILED(hr)) {
pDepthFrame->Release();
sprintf_s(errorString, MAX_ERROR_LENGTH,
"Could not get Depth Frame description");
return -1;
}
pFrameDescription->get_Width(&nWidth);
pFrameDescription->get_Height(&nHeight);
depth_sensor_width_ = nWidth;
depth_sensor_height_ = nHeight;
if (cpu_depth_sensor_buffer_) delete[] cpu_depth_sensor_buffer_;
cpu_depth_sensor_buffer_ = new float[nWidth * nHeight];
memset(cpu_depth_sensor_buffer_, 0, nWidth * nHeight);
if (smoothed_depth_sensor_buffer_[0]) {
delete[] smoothed_depth_sensor_buffer_[0];
delete[] smoothed_depth_sensor_buffer_[1];
}
smoothed_depth_sensor_buffer_[0] = new float[nWidth * nHeight];
smoothed_depth_sensor_buffer_[1] = new float[nWidth * nHeight];
memset(smoothed_depth_sensor_buffer_[0], 0,
nWidth*nHeight*sizeof(smoothed_depth_sensor_buffer_[0][0]));
memset(smoothed_depth_sensor_buffer_[1], 0,
nWidth*nHeight*sizeof(smoothed_depth_sensor_buffer_[1][0]));
glTexImage2D(GL_TEXTURE_2D, 0, GL_R32F,
nWidth, nHeight,
0, GL_RED, GL_FLOAT, smoothed_depth_sensor_buffer_[0]);
textureWidth[numTextures] = nWidth;
textureHeight[numTextures] = nHeight;
numTextures++;
pFrameDescription->Release();
pDepthFrame->Release();
return 0;
}
示例11: GetColorAndDepth
HRESULT KinectHandler::GetColorAndDepth(RGBQUAD* &color, RGBQUAD* &depth, UINT16*& depthBuffer)
{
if (!m_pMultiFrameReader)
{
cout << "No frame reader!" << endl;
return E_FAIL;
}
IColorFrame* pColorFrame = NULL;
IDepthFrame* pDepthFrame = NULL;
IMultiSourceFrame* pMultiSourceFrame = NULL;
HRESULT hr = m_pMultiFrameReader->AcquireLatestFrame(&pMultiSourceFrame);
if (SUCCEEDED(hr))
{
IColorFrameReference* pColorFrameReference = NULL;
hr = pMultiSourceFrame->get_ColorFrameReference(&pColorFrameReference);
if (SUCCEEDED(hr))
{
hr = pColorFrameReference->AcquireFrame(&pColorFrame);
}
IDepthFrameReference* pDepthFrameReference = NULL;
hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference);
if (SUCCEEDED(hr))
{
hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
}
SafeRelease(pColorFrameReference);
SafeRelease(pDepthFrameReference);
}
if (SUCCEEDED(hr) && pColorFrame != NULL && pDepthFrame != NULL)
{
INT64 nTime = 0;
IFrameDescription* pColorFrameDescription = NULL;
int nColorWidth = 0;
int nColorHeight = 0;
ColorImageFormat imageFormat = ColorImageFormat_None;
UINT nColorBufferSize = 0;
RGBQUAD *pColorBuffer = NULL;
hr = pColorFrame->get_RelativeTime(&nTime);
if (SUCCEEDED(hr))
{
hr = pColorFrame->get_FrameDescription(&pColorFrameDescription);
}
if (SUCCEEDED(hr))
{
hr = pColorFrameDescription->get_Width(&nColorWidth);
}
if (SUCCEEDED(hr))
{
hr = pColorFrameDescription->get_Height(&nColorHeight);
}
if (SUCCEEDED(hr))
{
hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
}
if (SUCCEEDED(hr))
{
if (imageFormat == ColorImageFormat_Bgra)
{
hr = pColorFrame->AccessRawUnderlyingBuffer(&nColorBufferSize, reinterpret_cast<BYTE**>(&pColorBuffer));
}
else if (m_pColorRGBX)
{
pColorBuffer = m_pColorRGBX;
nColorBufferSize = cColorWidth * cColorHeight * sizeof(RGBQUAD);
hr = pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast<BYTE*>(pColorBuffer), ColorImageFormat_Bgra);
}
else
{
cout << "FAILED" << endl;
hr = E_FAIL;
}
}
if (SUCCEEDED(hr))
{
color = pColorBuffer;
}
///===========================================////
nTime = 0;
IFrameDescription* pDepthFrameDescription = NULL;
int nDepthWidth = 0;
int nDepthHeight = 0;
USHORT nDepthMinReliableDistance = 0;
USHORT nDepthMaxDistance = 0;
UINT nDepthBufferSize = 0;
UINT16 *pDepthBuffer = NULL;
//.........这里部分代码省略.........
示例12: GetDepthImageData
HRESULT KinectHandler::GetDepthImageData(RGBQUAD* &dest)
{
if (!m_pMultiFrameReader)
{
cout << "No frame reader!" << endl;
return E_FAIL;
}
IDepthFrame* pDepthFrame = NULL;
IMultiSourceFrame* pMultiSourceFrame = NULL;
HRESULT hr = m_pMultiFrameReader->AcquireLatestFrame(&pMultiSourceFrame);
if (SUCCEEDED(hr))
{
IDepthFrameReference* pDepthFrameReference = NULL;
hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference);
if (SUCCEEDED(hr))
{
hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
}
SafeRelease(pDepthFrameReference);
}
if (SUCCEEDED(hr))
{
INT64 nTime = 0;
IFrameDescription* pDepthFrameDescription = NULL;
int nDepthWidth = 0;
int nDepthHeight = 0;
USHORT nDepthMinReliableDistance = 0;
USHORT nDepthMaxDistance = 0;
UINT nDepthBufferSize = 0;
UINT16 *pDepthBuffer = NULL;
hr = pDepthFrame->get_RelativeTime(&nTime);
if (SUCCEEDED(hr))
{
hr = pDepthFrame->get_FrameDescription(&pDepthFrameDescription);
}
if (SUCCEEDED(hr))
{
hr = pDepthFrameDescription->get_Width(&nDepthWidth);
}
if (SUCCEEDED(hr))
{
hr = pDepthFrameDescription->get_Height(&nDepthHeight);
}
if (SUCCEEDED(hr))
{
hr = pDepthFrame->get_DepthMinReliableDistance(&nDepthMinReliableDistance);
}
if (SUCCEEDED(hr))
{
// In order to see the full range of depth (including the less reliable far field depth)
// we are setting nDepthMaxDistance to the extreme potential depth threshold
nDepthMaxDistance = USHRT_MAX;
// Note: If you wish to filter by reliable depth distance, uncomment the following line.
//// hr = pDepthFrame->get_DepthMaxReliableDistance(&nDepthMaxDistance);
}
if (SUCCEEDED(hr))
{
hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pDepthBuffer);
}
if (SUCCEEDED(hr))
{
//RGBQUAD* pRGBX = new RGBQUAD[cDepthWidth * cDepthHeight];
// end pixel is start + width*height - 1
cout << "w:" << nDepthWidth << " h:" << nDepthHeight << endl;
cout << "buffersize:" << nDepthBufferSize << endl;
const UINT16* pBufferEnd = pDepthBuffer + (nDepthWidth * nDepthHeight);
RGBQUAD* auxiliar = m_pDepthRGBX;
//const UINT16* pBufferEnd = pDepthBuffer + (640 * 480);
cout << "bufferLocation:" << pDepthBuffer << endl;
cout << "bufferend:" << pBufferEnd << endl;
int counter = 0;
while (pDepthBuffer < pBufferEnd)
{
//cout << "now:" << pDepthBuffer << " end:" << pBufferEnd << endl;
USHORT depth = *pDepthBuffer;
//cout << "now:" << pDepthBuffer << " end:" << pBufferEnd << endl;
// To convert to a byte, we're discarding the most-significant
// rather than least-significant bits.
// We're preserving detail, although the intensity will "wrap."
// Values outside the reliable depth range are mapped to 0 (black).
// Note: Using conditionals in this loop could degrade performance.
// Consider using a lookup table instead when writing production code.
BYTE intensity = static_cast<BYTE>((depth >= nDepthMinReliableDistance) && (depth <= nDepthMaxDistance) ? (depth % 256) : 0);
auxiliar->rgbBlue = intensity;
//.........这里部分代码省略.........
示例13: update
//.........这里部分代码省略.........
hr = kinectBody->get_IsTracked( &isTracked );
if ( SUCCEEDED( hr ) && isTracked ) {
Joint joints[ JointType_Count ];
kinectBody->GetJoints( JointType_Count, joints );
JointOrientation jointOrientations[ JointType_Count ];
kinectBody->GetJointOrientations( JointType_Count, jointOrientations );
uint64_t id = 0;
kinectBody->get_TrackingId( &id );
std::map<JointType, Body::Joint> jointMap;
for ( int32_t j = 0; j < JointType_Count; ++j ) {
Body::Joint joint(
toVec3f( joints[ j ].Position ),
toQuatf( jointOrientations[ j ].Orientation ),
joints[ j ].TrackingState
);
jointMap.insert( pair<JointType, Body::Joint>( static_cast<JointType>( j ), joint ) );
}
Body body( id, i, jointMap );
bodies.push_back( body );
}
}
}
}
}
if ( mDeviceOptions.isBodyIndexEnabled() ) {
if ( SUCCEEDED( hr ) ) {
hr = bodyIndexFrame->get_RelativeTime( &bodyIndexTime );
}
if ( SUCCEEDED( hr ) ) {
hr = bodyIndexFrame->get_FrameDescription( &bodyIndexFrameDescription );
}
if ( SUCCEEDED( hr ) ) {
hr = bodyIndexFrameDescription->get_Width( &bodyIndexWidth );
}
if ( SUCCEEDED( hr ) ) {
hr = bodyIndexFrameDescription->get_Height( &bodyIndexHeight );
}
if ( SUCCEEDED( hr ) ) {
hr = bodyIndexFrame->AccessUnderlyingBuffer( &bodyIndexBufferSize, &bodyIndexBuffer );
}
if ( SUCCEEDED( hr ) ) {
bodyIndexChannel = Channel8u( bodyIndexWidth, bodyIndexHeight );
memcpy( bodyIndexChannel.getData(), bodyIndexBuffer, bodyIndexWidth * bodyIndexHeight * sizeof( uint8_t ) );
}
}
if ( mDeviceOptions.isColorEnabled() ) {
if ( SUCCEEDED( hr ) ) {
hr = colorFrame->get_FrameDescription( &colorFrameDescription );
if ( SUCCEEDED( hr ) ) {
float vFov = 0.0f;
float hFov = 0.0f;
float dFov = 0.0f;
colorFrameDescription->get_VerticalFieldOfView( &vFov );
colorFrameDescription->get_HorizontalFieldOfView( &hFov );
colorFrameDescription->get_DiagonalFieldOfView( &dFov );
}
}
if ( SUCCEEDED( hr ) ) {
hr = colorFrameDescription->get_Width( &colorWidth );
}
if ( SUCCEEDED( hr ) ) {
示例14: Update
/// <summary>
/// Main processing function
/// </summary>
void CDepthBasics::Update()
{
if (!m_pDepthFrameReader)
{
return;
}
IDepthFrame* pDepthFrame = NULL;
HRESULT hrDepth = m_pDepthFrameReader->AcquireLatestFrame(&pDepthFrame);
if (SUCCEEDED(hrDepth))
{
INT64 nTime = 0;
IFrameDescription* pFrameDescription = NULL;
int nWidth = 0;
int nHeight = 0;
USHORT nDepthMinReliableDistance = 0;
USHORT nDepthMaxDistance = 0;
UINT nBufferSize = 0;
UINT16 *pBuffer = NULL;
HRESULT hr = pDepthFrame->get_RelativeTime(&nTime);
if (SUCCEEDED(hr))
{
hr = pDepthFrame->get_FrameDescription(&pFrameDescription);
}
if (SUCCEEDED(hr))
{
hr = pFrameDescription->get_Width(&nWidth);
}
if (SUCCEEDED(hr))
{
hr = pFrameDescription->get_Height(&nHeight);
}
if (SUCCEEDED(hr))
{
hr = pDepthFrame->get_DepthMinReliableDistance(&nDepthMinReliableDistance);
}
if (SUCCEEDED(hr))
{
// In order to see the full range of depth (including the less reliable far field depth)
// we are setting nDepthMaxDistance to the extreme potential depth threshold
nDepthMaxDistance = USHRT_MAX;
// Note: If you wish to filter by reliable depth distance, uncomment the following line.
//// hr = pDepthFrame->get_DepthMaxReliableDistance(&nDepthMaxDistance);
}
if (SUCCEEDED(hr))
{
hr = pDepthFrame->AccessUnderlyingBuffer(&nBufferSize, &pBuffer);
}
if (SUCCEEDED(hr))
{
ProcessDepth(nTime, pBuffer, nWidth, nHeight, nDepthMinReliableDistance, nDepthMaxDistance);
}
SafeRelease(pFrameDescription);
}
SafeRelease(pDepthFrame);
}
示例15: GetNextFrame
void KinectGrabber::GetNextFrame() {
if (!m_pMultiSourceFrameReader)
{
return;
}
IMultiSourceFrame* pMultiSourceFrame = NULL;
IDepthFrame* pDepthFrame = NULL;
IColorFrame* pColorFrame = NULL;
IBodyIndexFrame* pBodyIndexFrame = NULL;
HRESULT hr = m_pMultiSourceFrameReader->AcquireLatestFrame(&pMultiSourceFrame);
if (SUCCEEDED(hr))
{
IDepthFrameReference* pDepthFrameReference = NULL;
hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference);
if (SUCCEEDED(hr))
{
hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
}
SafeRelease(pDepthFrameReference);
}
if (SUCCEEDED(hr))
{
IColorFrameReference* pColorFrameReference = NULL;
hr = pMultiSourceFrame->get_ColorFrameReference(&pColorFrameReference);
if (SUCCEEDED(hr))
{
hr = pColorFrameReference->AcquireFrame(&pColorFrame);
}
SafeRelease(pColorFrameReference);
}
if (SUCCEEDED(hr))
{
IBodyIndexFrameReference* pBodyIndexFrameReference = NULL;
hr = pMultiSourceFrame->get_BodyIndexFrameReference(&pBodyIndexFrameReference);
if (SUCCEEDED(hr))
{
hr = pBodyIndexFrameReference->AcquireFrame(&pBodyIndexFrame);
}
SafeRelease(pBodyIndexFrameReference);
}
if (SUCCEEDED(hr))
{
INT64 nDepthTime = 0;
IFrameDescription* pDepthFrameDescription = NULL;
int nDepthWidth = 0;
int nDepthHeight = 0;
UINT nDepthBufferSize = 0;
IFrameDescription* pColorFrameDescription = NULL;
int nColorWidth = 0;
int nColorHeight = 0;
ColorImageFormat imageFormat = ColorImageFormat_None;
UINT nColorBufferSize = 0;
RGBQUAD *pColorBuffer = NULL;
IFrameDescription* pBodyIndexFrameDescription = NULL;
int nBodyIndexWidth = 0;
int nBodyIndexHeight = 0;
UINT nBodyIndexBufferSize = 0;
BYTE *pBodyIndexBuffer = NULL;
// get depth frame data
hr = pDepthFrame->get_RelativeTime(&nDepthTime);
if (SUCCEEDED(hr))
{
hr = pDepthFrame->get_FrameDescription(&pDepthFrameDescription);
}
if (SUCCEEDED(hr))
{
hr = pDepthFrameDescription->get_Width(&nDepthWidth);
}
if (SUCCEEDED(hr))
{
hr = pDepthFrameDescription->get_Height(&nDepthHeight);
}
if (SUCCEEDED(hr))
{
//m_pDepthBuffer = new UINT16[cDepthWidth * cDepthHeight];
hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &m_pDepthBuffer);
//pDepthFrame->CopyFrameDataToArray(nDepthBufferSize,m_pDepthBuffer);
WaitForSingleObject(hDepthMutex,INFINITE);
m_depthImage.release();
Mat tmp = Mat(m_depthSize, DEPTH_PIXEL_TYPE, m_pDepthBuffer, Mat::AUTO_STEP);
//.........这里部分代码省略.........