本文整理汇总了C++中IFrameDescription::get_Width方法的典型用法代码示例。如果您正苦于以下问题:C++ IFrameDescription::get_Width方法的具体用法?C++ IFrameDescription::get_Width怎么用?C++ IFrameDescription::get_Width使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类IFrameDescription
的用法示例。
在下文中一共展示了IFrameDescription::get_Width方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: assert
// コンストラクタ
KinectV2::KinectV2()
{
// センサを取得する
if (sensor == NULL && GetDefaultKinectSensor(&sensor) == S_OK)
{
HRESULT hr;
// センサの使用を開始する
hr = sensor->Open();
assert(hr == S_OK);
// デプスデータの読み込み設定
IDepthFrameSource *depthSource;
hr = sensor->get_DepthFrameSource(&depthSource);
assert(hr == S_OK);
hr = depthSource->OpenReader(&depthReader);
assert(hr == S_OK);
IFrameDescription *depthDescription;
hr = depthSource->get_FrameDescription(&depthDescription);
assert(hr == S_OK);
depthSource->Release();
// デプスデータのサイズを得る
depthDescription->get_Width(&depthWidth);
depthDescription->get_Height(&depthHeight);
depthDescription->Release();
// カラーデータの読み込み設定
IColorFrameSource *colorSource;
hr = sensor->get_ColorFrameSource(&colorSource);
assert(hr == S_OK);
hr = colorSource->OpenReader(&colorReader);
assert(hr == S_OK);
IFrameDescription *colorDescription;
hr = colorSource->get_FrameDescription(&colorDescription);
assert(hr == S_OK);
colorSource->Release();
// カラーデータのサイズを得る
colorDescription->get_Width(&colorWidth);
colorDescription->get_Height(&colorHeight);
colorDescription->Release();
// 座標のマッピング
hr = sensor->get_CoordinateMapper(&coordinateMapper);
assert(hr == S_OK);
// depthCount と colorCount を計算してテクスチャとバッファオブジェクトを作成する
makeTexture();
// デプスデータからカメラ座標を求めるときに用いる一時メモリを確保する
position = new GLfloat[depthCount][3];
// カラーデータを変換する用いる一時メモリを確保する
color = new GLubyte[colorCount * 4];
}
}
示例2: GetDepthFrame
void KinectCapture::GetDepthFrame(IMultiSourceFrame* pMultiFrame)
{
IDepthFrameReference* pDepthFrameReference = NULL;
IDepthFrame* pDepthFrame = NULL;
pMultiFrame->get_DepthFrameReference(&pDepthFrameReference);
HRESULT hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
if (SUCCEEDED(hr))
{
if (pDepth == NULL)
{
IFrameDescription* pFrameDescription = NULL;
hr = pDepthFrame->get_FrameDescription(&pFrameDescription);
pFrameDescription->get_Width(&nDepthFrameWidth);
pFrameDescription->get_Height(&nDepthFrameHeight);
pDepth = new UINT16[nDepthFrameHeight * nDepthFrameWidth];
SafeRelease(pFrameDescription);
}
UINT nBufferSize = nDepthFrameHeight * nDepthFrameWidth;
hr = pDepthFrame->CopyFrameDataToArray(nBufferSize, pDepth);
}
SafeRelease(pDepthFrame);
SafeRelease(pDepthFrameReference);
}
示例3: update
//----------
void Color::update(IColorFrame * frame) {
this->isFrameNewFlag = true;
IFrameDescription * frameDescription = NULL;
try {
//allocate pixels and texture if we need to
if (FAILED(frame->get_FrameDescription(&frameDescription))) {
throw Exception("Failed to get frame description");
}
int width, height;
if (FAILED(frameDescription->get_Width(&width)) || FAILED(frameDescription->get_Height(&height))) {
throw Exception("Failed to get width and height of frame");
}
if (width != this->pixels.getWidth() || height != this->texture.getHeight()) {
this->pixels.allocate(width, height, OF_IMAGE_COLOR_ALPHA);
this->texture.allocate(this->pixels);
}
//update local rgba image
if (FAILED(frame->CopyConvertedFrameDataToArray(this->pixels.size(), this->pixels.getPixels(), ColorImageFormat_Rgba))) {
throw Exception("Couldn't pull pixel buffer");
}
if (this->useTexture) {
this->texture.loadData(this->pixels);
}
//update yuv
if (this->yuvPixelsEnabled) {
if (width != this->yuvPixels.getWidth() || height != this->yuvPixels.getHeight()) {
this->yuvPixels.allocate(width, height, OF_PIXELS_YUY2);
}
if (FAILED(frame->CopyRawFrameDataToArray(this->yuvPixels.size(), this->yuvPixels.getPixels()))) {
throw Exception("Couldn't pull raw YUV pixel buffer");
}
}
//update field of view
if (FAILED(frameDescription->get_HorizontalFieldOfView(&this->horizontalFieldOfView))) {
throw Exception("Failed to get horizonal field of view");
}
if (FAILED(frameDescription->get_VerticalFieldOfView(&this->verticalFieldOfView))) {
throw Exception("Failed to get vertical field of view");
}
if (FAILED(frameDescription->get_DiagonalFieldOfView(&this->diagonalFieldOfView))) {
throw Exception("Failed to get diagonal field of view");
}
IColorCameraSettings * cameraSettings;
if (FAILED(frame->get_ColorCameraSettings(&cameraSettings))) {
throw Exception("Failed to get color camera settings");
}
cameraSettings->get_ExposureTime(&this->exposure);
cameraSettings->get_FrameInterval(&this->frameInterval);
cameraSettings->get_Gain(&this->gain);
cameraSettings->get_Gamma(&this->gamma);
} catch (std::exception & e) {
OFXKINECTFORWINDOWS2_ERROR << e.what();
}
SafeRelease(frameDescription);
}
示例4: initColorFrameReader
HRESULT KinectHDFaceGrabber::initColorFrameReader()
{
IColorFrameSource* pColorFrameSource = nullptr;
HRESULT hr = m_pKinectSensor->get_ColorFrameSource(&pColorFrameSource);
if (SUCCEEDED(hr)){
hr = pColorFrameSource->OpenReader(&m_pColorFrameReader);
}
IFrameDescription* pFrameDescription = nullptr;
if (SUCCEEDED(hr))
{
hr = pColorFrameSource->get_FrameDescription(&pFrameDescription);
}
if (SUCCEEDED(hr))
{
hr = pFrameDescription->get_Width(&m_colorWidth);
}
if (SUCCEEDED(hr))
{
hr = pFrameDescription->get_Height(&m_colorHeight);
}
if (SUCCEEDED(hr)){
m_colorBuffer.resize(m_colorHeight * m_colorWidth);
}
SafeRelease(pFrameDescription);
SafeRelease(pColorFrameSource);
return hr;
}
示例5:
void Microsoft2Grabber::BodyIndexFrameArrived(IBodyIndexFrameReference* pBodyIndexFrameReference) {
IBodyIndexFrame* pBodyIndexFrame = NULL;
HRESULT hr = pBodyIndexFrameReference->AcquireFrame(&pBodyIndexFrame);
if(FAILED(hr))
return;
//cout << "got a body index frame" << endl;
IFrameDescription* pBodyIndexFrameDescription = NULL;
int nBodyIndexWidth = 0;
int nBodyIndexHeight = 0;
UINT nBodyIndexBufferSize = 0;
BYTE *pBodyIndexBuffer = NULL;
// get body index frame data
if (SUCCEEDED(hr)) {
hr = pBodyIndexFrame->get_FrameDescription(&pBodyIndexFrameDescription);
}
if (SUCCEEDED(hr)) {
hr = pBodyIndexFrameDescription->get_Width(&nBodyIndexWidth);
}
if (SUCCEEDED(hr)) {
hr = pBodyIndexFrameDescription->get_Height(&nBodyIndexHeight);
}
if (SUCCEEDED(hr)) {
hr = pBodyIndexFrame->AccessUnderlyingBuffer(&nBodyIndexBufferSize, &pBodyIndexBuffer);
}
SafeRelease(pBodyIndexFrameDescription);
SafeRelease(pBodyIndexFrame);
}
示例6: int
KinectColor::KinectColor(IKinectSensor *m_pKinectSensor) {
width = new int();
height = new int();
bufferSize = new unsigned int();
HRESULT hr;
IColorFrameSource* pColorFrameSource = NULL;
hr = m_pKinectSensor->get_ColorFrameSource(&pColorFrameSource);
if (SUCCEEDED(hr))
{
hr = pColorFrameSource->OpenReader(&m_pColorFrameReader);
}
IFrameDescription* pDescription;
hr = pColorFrameSource->get_FrameDescription(&pDescription);
if (SUCCEEDED(hr))
{
pDescription->get_Width(width);
pDescription->get_Height(height);
*bufferSize = *width * *height * 4 * sizeof(unsigned char);
bufferMat = new cv::Mat(*height, *width, CV_8UC4);
colorMat = new cv::Mat(HEIGHT, WIDTH, CV_8UC4);
memset(&baseImage, 0, sizeof(BASEIMAGE));
CreateXRGB8ColorData(&baseImage.ColorData);
baseImage.MipMapCount = 0;
handle = -1;
}
SafeRelease(pColorFrameSource);
}
示例7: initDepthFrameReader
HRESULT KinectHDFaceGrabber::initDepthFrameReader()
{
IDepthFrameSource* depthFrameSource = nullptr;
HRESULT hr = m_pKinectSensor->get_DepthFrameSource(&depthFrameSource);
IFrameDescription* frameDescription = nullptr;
if (SUCCEEDED(hr)){
hr = depthFrameSource->get_FrameDescription(&frameDescription);
}
if (SUCCEEDED(hr)){
hr = frameDescription->get_Width(&m_depthWidth);
}
if (SUCCEEDED(hr)){
hr = frameDescription->get_Height(&m_depthHeight);
}
if (SUCCEEDED(hr)){
m_depthBuffer.resize(m_depthHeight * m_depthWidth);
}
SafeRelease(frameDescription);
if (SUCCEEDED(hr)){
hr = depthFrameSource->OpenReader(&m_pDepthFrameReader);
}
SafeRelease(depthFrameSource);
return hr;
}
示例8: GetColorFrame
void KinectCapture::GetColorFrame(IMultiSourceFrame* pMultiFrame)
{
IColorFrameReference* pColorFrameReference = NULL;
IColorFrame* pColorFrame = NULL;
pMultiFrame->get_ColorFrameReference(&pColorFrameReference);
HRESULT hr = pColorFrameReference->AcquireFrame(&pColorFrame);
if (SUCCEEDED(hr))
{
if (pColorRGBX == NULL)
{
IFrameDescription* pFrameDescription = NULL;
hr = pColorFrame->get_FrameDescription(&pFrameDescription);
hr = pFrameDescription->get_Width(&nColorFrameWidth);
hr = pFrameDescription->get_Height(&nColorFrameHeight);
pColorRGBX = new RGB[nColorFrameWidth * nColorFrameHeight];
SafeRelease(pFrameDescription);
}
UINT nBufferSize = nColorFrameWidth * nColorFrameHeight * sizeof(RGB);
hr = pColorFrame->CopyConvertedFrameDataToArray(nBufferSize, reinterpret_cast<BYTE*>(pColorRGBX), ColorImageFormat_Bgra);
}
SafeRelease(pColorFrame);
SafeRelease(pColorFrameReference);
}
示例9: update
cv::Mat capKinect::update(cv::Mat& depth_show)
{
if (!m_pDepthReader) return cv::Mat();
IDepthFrame* pDepthFrame = NULL;
HRESULT hr = m_pDepthReader->AcquireLatestFrame(&pDepthFrame);
cv::Mat re;
if (SUCCEEDED(hr))
{
IFrameDescription* pFrameDescription = NULL;
int nWidth = 0;
int nHeight = 0;
USHORT nDepthMinReliableDistance = 0;
USHORT nDepthMaxDistance = 0;
UINT nBufferSize = 0;
UINT16 *pBuffer = NULL;
if (SUCCEEDED(hr))
{
hr = pDepthFrame->get_FrameDescription(&pFrameDescription);
}
if (SUCCEEDED(hr))
{
hr = pFrameDescription->get_Width(&nWidth);
}
if (SUCCEEDED(hr))
{
hr = pFrameDescription->get_Height(&nHeight);
}
if (SUCCEEDED(hr))
{
hr = pDepthFrame->get_DepthMinReliableDistance(&nDepthMinReliableDistance);
}
if (SUCCEEDED(hr))
{
// In order to see the full range of depth (including the less reliable far field depth)
// we are setting nDepthMaxDistance to the extreme potential depth threshold
nDepthMaxDistance = USHRT_MAX; //here we set maxDepth as 1000 mm (1 m) to simply cut the back background
// Note: If you wish to filter by reliable depth distance, uncomment the following line.
//// hr = pDepthFrame->get_DepthMaxReliableDistance(&nDepthMaxDistance);
}
if (SUCCEEDED(hr))
{
hr = pDepthFrame->AccessUnderlyingBuffer(&nBufferSize, &pBuffer);
}
if (SUCCEEDED(hr))
{
re=capture(pBuffer, nWidth, nHeight, depth_show, nDepthMinReliableDistance, nDepthMaxDistance);
}
if(pFrameDescription)SafeRelease(pFrameDescription);
}
if(pDepthFrame)SafeRelease(pDepthFrame);
return re;
}
示例10: update
//----------
void Color::update() {
CHECK_OPEN
IColorFrame * frame = NULL;
IFrameDescription * frameDescription = NULL;
try {
//acquire frame
if (FAILED(this->reader->AcquireLatestFrame(&frame))) {
return; // we often throw here when no new frame is available
}
//allocate pixels and texture if we need to
if (FAILED(frame->get_FrameDescription(&frameDescription))) {
throw Exception("Failed to get frame description");
}
int width, height;
if (FAILED(frameDescription->get_Width(&width)) || FAILED(frameDescription->get_Height(&height))) {
throw Exception("Failed to get width and height of frame");
}
if (width != this->pixels.getWidth() || height != this->texture.getHeight()) {
this->pixels.allocate(width, height, OF_IMAGE_COLOR_ALPHA);
this->texture.allocate(this->pixels);
}
//update local assets
if (FAILED(frame->CopyConvertedFrameDataToArray(this->pixels.size(), this->pixels.getPixels(), ColorImageFormat_Rgba))) {
throw Exception("Couldn't pull pixel buffer");
}
if (this->useTexture) {
this->texture.loadData(this->pixels);
}
//update field of view
if (FAILED(frameDescription->get_HorizontalFieldOfView(&this->horizontalFieldOfView))) {
throw Exception("Failed to get horizonal field of view");
}
if (FAILED(frameDescription->get_VerticalFieldOfView(&this->verticalFieldOfView))) {
throw Exception("Failed to get vertical field of view");
}
if (FAILED(frameDescription->get_DiagonalFieldOfView(&this->diagonalFieldOfView))) {
throw Exception("Failed to get diagonal field of view");
}
IColorCameraSettings * cameraSettings;
if (FAILED(frame->get_ColorCameraSettings(&cameraSettings))) {
throw Exception("Failed to get color camera settings");
}
cameraSettings->get_ExposureTime(&this->exposure);
cameraSettings->get_FrameInterval(&this->frameInterval);
cameraSettings->get_Gain(&this->gain);
cameraSettings->get_Gamma(&this->gamma);
} catch (std::exception & e) {
OFXKINECTFORWINDOWS2_ERROR << e.what();
}
SafeRelease(frameDescription);
SafeRelease(frame);
}
示例11: Mat
void Microsoft2Grabber::DepthFrameArrived(IDepthFrameReference* pDepthFrameReference) {
IDepthFrame* pDepthFrame = NULL;
HRESULT hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
//HRESULT hr = pDepthFrameReference->AcquireLatestFrame(&pDepthFrame);
if(FAILED(hr))
return;
//cout << "got a depth frame" << endl;
INT64 nDepthTime = 0;
IFrameDescription* pDepthFrameDescription = NULL;
int nDepthWidth = 0;
int nDepthHeight = 0;
UINT nDepthBufferSize = 0;
// get depth frame data
hr = pDepthFrame->get_RelativeTime(&nDepthTime);
if (SUCCEEDED(hr)) {
hr = pDepthFrame->get_FrameDescription(&pDepthFrameDescription);
}
if (SUCCEEDED(hr)) {
hr = pDepthFrameDescription->get_Width(&nDepthWidth);
}
if (SUCCEEDED(hr)) {
hr = pDepthFrameDescription->get_Height(&nDepthHeight);
}
if (SUCCEEDED(hr)) {
hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &m_pDepthBuffer);
//WaitForSingleObject(hDepthMutex,INFINITE);
Mat tmp = Mat(m_depthSize, DEPTH_PIXEL_TYPE, m_pDepthBuffer, Mat::AUTO_STEP);
MatDepth depth_img = *((MatDepth*)&(tmp.clone()));
m_depthTime = nDepthTime;
if (depth_image_signal_->num_slots () > 0) {
depth_image_signal_->operator()(depth_img);
}
if (num_slots<sig_cb_microsoft_point_cloud_rgba>() > 0 || all_data_signal_->num_slots() > 0 || image_depth_image_signal_->num_slots() > 0) {
//rgb_sync_.add1 (depth_img, m_depthTime);
imageDepthOnlyImageCallback(depth_img);
}
//ReleaseMutex(hDepthMutex);
}
SafeRelease(pDepthFrameDescription);
SafeRelease(pDepthFrame);
}
示例12: open
bool BodyIndexStream::open()
{
if (!m_Device->isOpen()) {
ofLogWarning("ofxKinect2::BodyIndexStream") << "No ready Kinect2 found.";
return false;
}
m_IsInvert = true;
IBodyIndexFrameSource *frameSource = nullptr;
HRESULT hr = E_FAIL;
hr = m_Device->get().kinect2->get_BodyIndexFrameSource(&frameSource);
if (SUCCEEDED(hr)) {
hr = frameSource->OpenReader(&m_StreamHandle.bodyIndexFrameReader);
if (SUCCEEDED(hr)) {
IFrameDescription *frameDescription = nullptr;
frameSource->get_FrameDescription(&frameDescription);
if (SUCCEEDED(hr)) {
int resX, resY = 0;
hr = frameDescription->get_Width(&resX);
hr = frameDescription->get_Width(&resY);
m_Frame.mode.resolutionX = resX;
m_Frame.mode.resolutionY = resY;
m_Frame.width = resX;
m_Frame.height = resY;
m_DoubleBuffer.allocate(resX, resY, 4);
}
safeRelease(frameDescription);
}
}
safeRelease(frameSource);
if (FAILED(hr)) {
ofLogWarning("ofxKinect2::BodyIndexStream") << "Can't open stream.";
return false;
}
return Stream::open();
}
示例13: ProcessNewBodyIndexFrame
void BodyIndexMulticaster::ProcessNewBodyIndexFrame(IBodyIndexFrame* frame)
{
IFrameDescription* frameDescription = NULL;
frame->get_FrameDescription(&frameDescription);
int width = 0;
int height = 0;
frameDescription->get_Height(&height);
frameDescription->get_Width(&width);
UINT nBufferSize = height*width*sizeof(BYTE);
UINT capacity;
HRESULT hr = frame->AccessUnderlyingBuffer(&capacity, &pBuffer); if (!SUCCEEDED(hr)) return;
if (pBuffer && (width == K4W2_BODY_INDEX_WIDTH) && (height == K4W2_BODY_INDEX_HEIGHT))
{
// send previous frame first
// encode current frame, will be send in the next cycle
BYTE* pInput = pBuffer;
BYTE* pOutput = pScaledBuffer;
const BYTE* pEnd = pInput + (width * height);
while (pInput < pEnd)
{
BYTE index = *pInput;
*pOutput = ((signed char)index + 1)*40;
++pOutput;
++pInput;
}
gstSender.SendFrame((unsigned char*) pScaledBuffer, nBufferSize);
}
SafeRelease(frameDescription);
}
示例14: processColor
void processColor() {
if (!device) return;
if (!m_pColorFrameReader) return;
IColorFrame* pColorFrame = NULL;
HRESULT hr = m_pColorFrameReader->AcquireLatestFrame(&pColorFrame);
if (SUCCEEDED(hr)) {
INT64 nTime = 0;
IFrameDescription* pFrameDescription = NULL;
int nWidth = 0;
int nHeight = 0;
ColorImageFormat imageFormat = ColorImageFormat_None;
UINT nBufferSize = 0;
RGBQUAD *src = NULL;
hr = pColorFrame->get_RelativeTime(&nTime);
if (SUCCEEDED(hr)) {
hr = pColorFrame->get_FrameDescription(&pFrameDescription);
}
if (SUCCEEDED(hr)) {
hr = pFrameDescription->get_Width(&nWidth);
}
if (SUCCEEDED(hr)) {
hr = pFrameDescription->get_Height(&nHeight);
}
if (SUCCEEDED(hr)) {
hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
}
if (imageFormat != ColorImageFormat_Bgra)
{
if (!rgb_buffer) {
rgb_buffer = new RGBQUAD[nWidth * nHeight];
}
//post("image format %d", imageFormat);
//error("not brga");
nBufferSize = nWidth * nHeight * sizeof(RGBQUAD);
hr = pColorFrame->CopyConvertedFrameDataToArray(nBufferSize, reinterpret_cast<BYTE*>(rgb_buffer), ColorImageFormat_Rgba);
if (FAILED(hr)) {
error("failed to convert image");
return;
}
src = rgb_buffer;
}
hr = pColorFrame->AccessRawUnderlyingBuffer(&nBufferSize, reinterpret_cast<BYTE**>(&src));
ARGB * dst = (ARGB *)rgb_mat.back;
int cells = nWidth * nHeight;
//if (align_depth_to_color) {
for (int i = 0; i < cells; ++i) {
dst[i].r = src[i].rgbRed;
dst[i].g = src[i].rgbGreen;
dst[i].b = src[i].rgbBlue;
}
/*}
else {
// align color to depth:
//std::fill(dst, dst + cells, RGB(0, 0, 0));
for (int i = 0; i < cells; ++i) {
int c = colorCoordinates[i * 2];
int r = colorCoordinates[i * 2 + 1];
if (c >= 0 && c < KINECT_DEPTH_WIDTH
&& r >= 0 && r < KINECT_DEPTH_HEIGHT) {
// valid location: depth value:
int idx = r*KINECT_DEPTH_WIDTH + c;
dst[i].r = src[idx].r;
dst[i].g = src[idx].g;
dst[i].b = src[idx].b;
}
}
}*/
new_rgb_data = 1;
}
}
示例15: _tmain
int _tmain( int argc, _TCHAR* argv[] )
{
cv::setUseOptimized( true );
// Sensor
IKinectSensor* pSensor;
HRESULT hResult = S_OK;
hResult = GetDefaultKinectSensor( &pSensor );
if( FAILED( hResult ) ){
std::cerr << "Error : GetDefaultKinectSensor" << std::endl;
return -1;
}
hResult = pSensor->Open( );
if( FAILED( hResult ) ){
std::cerr << "Error : IKinectSensor::Open()" << std::endl;
return -1;
}
// Source
IColorFrameSource* pColorSource;
hResult = pSensor->get_ColorFrameSource( &pColorSource );
if( FAILED( hResult ) ){
std::cerr << "Error : IKinectSensor::get_ColorFrameSource()" << std::endl;
return -1;
}
IBodyFrameSource* pBodySource;
hResult = pSensor->get_BodyFrameSource( &pBodySource );
if( FAILED( hResult ) ){
std::cerr << "Error : IKinectSensor::get_BodyFrameSource()" << std::endl;
return -1;
}
// Reader
IColorFrameReader* pColorReader;
hResult = pColorSource->OpenReader( &pColorReader );
if( FAILED( hResult ) ){
std::cerr << "Error : IColorFrameSource::OpenReader()" << std::endl;
return -1;
}
IBodyFrameReader* pBodyReader;
hResult = pBodySource->OpenReader( &pBodyReader );
if( FAILED( hResult ) ){
std::cerr << "Error : IBodyFrameSource::OpenReader()" << std::endl;
return -1;
}
// Description
IFrameDescription* pDescription;
hResult = pColorSource->get_FrameDescription( &pDescription );
if( FAILED( hResult ) ){
std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl;
return -1;
}
int width = 0;
int height = 0;
pDescription->get_Width( &width ); // 1920
pDescription->get_Height( &height ); // 1080
unsigned int bufferSize = width * height * 4 * sizeof( unsigned char );
cv::Mat bufferMat( height, width, CV_8UC4 );
cv::Mat bodyMat( height / 2, width / 2, CV_8UC4 );
cv::namedWindow( "Body" );
// Color Table
cv::Vec3b color[BODY_COUNT];
color[0] = cv::Vec3b( 255, 0, 0 );
color[1] = cv::Vec3b( 0, 255, 0 );
color[2] = cv::Vec3b( 0, 0, 255 );
color[3] = cv::Vec3b( 255, 255, 0 );
color[4] = cv::Vec3b( 255, 0, 255 );
color[5] = cv::Vec3b( 0, 255, 255 );
// Coordinate Mapper
ICoordinateMapper* pCoordinateMapper;
hResult = pSensor->get_CoordinateMapper( &pCoordinateMapper );
if( FAILED( hResult ) ){
std::cerr << "Error : IKinectSensor::get_CoordinateMapper()" << std::endl;
return -1;
}
while( 1 ){
// Frame
IColorFrame* pColorFrame = nullptr;
hResult = pColorReader->AcquireLatestFrame( &pColorFrame );
if( SUCCEEDED( hResult ) ){
hResult = pColorFrame->CopyConvertedFrameDataToArray( bufferSize, reinterpret_cast<BYTE*>( bufferMat.data ), ColorImageFormat::ColorImageFormat_Bgra );
if( SUCCEEDED( hResult ) ){
cv::resize( bufferMat, bodyMat, cv::Size(), 0.5, 0.5 );
}
}
//SafeRelease( pColorFrame );
IBodyFrame* pBodyFrame = nullptr;
hResult = pBodyReader->AcquireLatestFrame( &pBodyFrame );
if( SUCCEEDED( hResult ) ){
IBody* pBody[BODY_COUNT] = { 0 };
//.........这里部分代码省略.........