当前位置: 首页>>代码示例>>C++>>正文


C++ IFrameDescription类代码示例

本文整理汇总了C++中IFrameDescription的典型用法代码示例。如果您正苦于以下问题:C++ IFrameDescription类的具体用法?C++ IFrameDescription怎么用?C++ IFrameDescription使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了IFrameDescription类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1: int

KinectColor::KinectColor(IKinectSensor *m_pKinectSensor) {
	width = new int();
	height = new int();
	bufferSize = new unsigned int();

	HRESULT hr;
	IColorFrameSource* pColorFrameSource = NULL;

	hr = m_pKinectSensor->get_ColorFrameSource(&pColorFrameSource);

	if (SUCCEEDED(hr))
	{
		hr = pColorFrameSource->OpenReader(&m_pColorFrameReader);
	}

	IFrameDescription* pDescription;
	hr = pColorFrameSource->get_FrameDescription(&pDescription);

	if (SUCCEEDED(hr))
	{
		pDescription->get_Width(width);
		pDescription->get_Height(height);
		*bufferSize = *width * *height * 4 * sizeof(unsigned char);

		bufferMat = new cv::Mat(*height, *width, CV_8UC4);
		colorMat = new cv::Mat(HEIGHT, WIDTH, CV_8UC4);

		memset(&baseImage, 0, sizeof(BASEIMAGE));
		CreateXRGB8ColorData(&baseImage.ColorData);
		baseImage.MipMapCount = 0;
		handle = -1;
	}

	SafeRelease(pColorFrameSource);
}
开发者ID:Kousuke-N,项目名称:Mai-Kagami,代码行数:35,代码来源:KinectColor.cpp

示例2:

	void Microsoft2Grabber::BodyIndexFrameArrived(IBodyIndexFrameReference* pBodyIndexFrameReference) {
		IBodyIndexFrame* pBodyIndexFrame = NULL;
		HRESULT hr = pBodyIndexFrameReference->AcquireFrame(&pBodyIndexFrame);
		if(FAILED(hr))
			return;
		//cout << "got a body index frame" << endl;
		IFrameDescription* pBodyIndexFrameDescription = NULL;
		int nBodyIndexWidth = 0;
		int nBodyIndexHeight = 0;
		UINT nBodyIndexBufferSize = 0;
		BYTE *pBodyIndexBuffer = NULL;

		// get body index frame data
		if (SUCCEEDED(hr)) {
			hr = pBodyIndexFrame->get_FrameDescription(&pBodyIndexFrameDescription);
		}
		if (SUCCEEDED(hr)) {
			hr = pBodyIndexFrameDescription->get_Width(&nBodyIndexWidth);
		}
		if (SUCCEEDED(hr)) {
			hr = pBodyIndexFrameDescription->get_Height(&nBodyIndexHeight);
		}
		if (SUCCEEDED(hr)) {
			hr = pBodyIndexFrame->AccessUnderlyingBuffer(&nBodyIndexBufferSize, &pBodyIndexBuffer);            
		}
		SafeRelease(pBodyIndexFrameDescription);
		SafeRelease(pBodyIndexFrame);
	}
开发者ID:steevo87,项目名称:PCL_Kinect2SDK,代码行数:28,代码来源:Microsoft_grabber2.cpp

示例3: initDepthFrameReader

HRESULT KinectHDFaceGrabber::initDepthFrameReader()
{
	
	IDepthFrameSource* depthFrameSource = nullptr;
	
	HRESULT hr = m_pKinectSensor->get_DepthFrameSource(&depthFrameSource);
	
	IFrameDescription* frameDescription = nullptr;
	if (SUCCEEDED(hr)){
		hr = depthFrameSource->get_FrameDescription(&frameDescription);
	}

	if (SUCCEEDED(hr)){
		hr = frameDescription->get_Width(&m_depthWidth);
	}

	if (SUCCEEDED(hr)){
		hr = frameDescription->get_Height(&m_depthHeight);
	}

	if (SUCCEEDED(hr)){
		m_depthBuffer.resize(m_depthHeight * m_depthWidth);
	}

	SafeRelease(frameDescription);
	if (SUCCEEDED(hr)){
		hr = depthFrameSource->OpenReader(&m_pDepthFrameReader);
	}

	SafeRelease(depthFrameSource);
	return hr;
}
开发者ID:sudutgalau,项目名称:FaceGrabber,代码行数:32,代码来源:KinectHDFaceGrabber.cpp

示例4: initColorFrameReader

HRESULT KinectHDFaceGrabber::initColorFrameReader()
{
	IColorFrameSource* pColorFrameSource = nullptr;
	HRESULT hr = m_pKinectSensor->get_ColorFrameSource(&pColorFrameSource);
	
	if (SUCCEEDED(hr)){
		hr = pColorFrameSource->OpenReader(&m_pColorFrameReader);
	}

	IFrameDescription* pFrameDescription = nullptr;
	if (SUCCEEDED(hr))
	{
		hr = pColorFrameSource->get_FrameDescription(&pFrameDescription);
	}

	if (SUCCEEDED(hr))
	{
		hr = pFrameDescription->get_Width(&m_colorWidth);
	}

	if (SUCCEEDED(hr))
	{
		hr = pFrameDescription->get_Height(&m_colorHeight);
	}

	if (SUCCEEDED(hr)){
		m_colorBuffer.resize(m_colorHeight * m_colorWidth);
	}

	SafeRelease(pFrameDescription);
	SafeRelease(pColorFrameSource);
	
	return hr;
}
开发者ID:sudutgalau,项目名称:FaceGrabber,代码行数:34,代码来源:KinectHDFaceGrabber.cpp

示例5: GetColorFrame

void KinectCapture::GetColorFrame(IMultiSourceFrame* pMultiFrame)
{
	IColorFrameReference* pColorFrameReference = NULL;
	IColorFrame* pColorFrame = NULL;
	pMultiFrame->get_ColorFrameReference(&pColorFrameReference);
	HRESULT hr = pColorFrameReference->AcquireFrame(&pColorFrame);

	if (SUCCEEDED(hr))
	{
		if (pColorRGBX == NULL)
		{
			IFrameDescription* pFrameDescription = NULL;
			hr = pColorFrame->get_FrameDescription(&pFrameDescription);
			hr = pFrameDescription->get_Width(&nColorFrameWidth);
			hr = pFrameDescription->get_Height(&nColorFrameHeight);
			pColorRGBX = new RGB[nColorFrameWidth * nColorFrameHeight];
			SafeRelease(pFrameDescription);
		}

		UINT nBufferSize = nColorFrameWidth * nColorFrameHeight * sizeof(RGB);
		hr = pColorFrame->CopyConvertedFrameDataToArray(nBufferSize, reinterpret_cast<BYTE*>(pColorRGBX), ColorImageFormat_Bgra);
	}

	SafeRelease(pColorFrame);
	SafeRelease(pColorFrameReference);
}
开发者ID:caomw,项目名称:opencv-rgbd,代码行数:26,代码来源:kinectCapture.cpp

示例6: GetDepthFrame

void KinectCapture::GetDepthFrame(IMultiSourceFrame* pMultiFrame)
{
	IDepthFrameReference* pDepthFrameReference = NULL;
	IDepthFrame* pDepthFrame = NULL;
	pMultiFrame->get_DepthFrameReference(&pDepthFrameReference);
	HRESULT hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);

	if (SUCCEEDED(hr))
	{
		if (pDepth == NULL)
		{
			IFrameDescription* pFrameDescription = NULL;
			hr = pDepthFrame->get_FrameDescription(&pFrameDescription);
			pFrameDescription->get_Width(&nDepthFrameWidth);
			pFrameDescription->get_Height(&nDepthFrameHeight);
			pDepth = new UINT16[nDepthFrameHeight * nDepthFrameWidth];
			SafeRelease(pFrameDescription);
		}

		UINT nBufferSize = nDepthFrameHeight * nDepthFrameWidth;
		hr = pDepthFrame->CopyFrameDataToArray(nBufferSize, pDepth);
	}

	SafeRelease(pDepthFrame);
	SafeRelease(pDepthFrameReference);
}
开发者ID:caomw,项目名称:opencv-rgbd,代码行数:26,代码来源:kinectCapture.cpp

示例7: update

cv::Mat capKinect::update(cv::Mat& depth_show)
{
	if (!m_pDepthReader) return cv::Mat();
	IDepthFrame* pDepthFrame = NULL;

	HRESULT hr = m_pDepthReader->AcquireLatestFrame(&pDepthFrame);
	cv::Mat re;

	if (SUCCEEDED(hr))
	{
		IFrameDescription* pFrameDescription = NULL;
		int nWidth = 0;
		int nHeight = 0;
		USHORT nDepthMinReliableDistance = 0;
		USHORT nDepthMaxDistance = 0;
		UINT nBufferSize = 0;
		UINT16 *pBuffer = NULL;
		if (SUCCEEDED(hr))
		{
			hr = pDepthFrame->get_FrameDescription(&pFrameDescription);
		}
		if (SUCCEEDED(hr))
		{
			hr = pFrameDescription->get_Width(&nWidth);
		}
		if (SUCCEEDED(hr))
		{
			hr = pFrameDescription->get_Height(&nHeight);
		}
		if (SUCCEEDED(hr))
		{
			hr = pDepthFrame->get_DepthMinReliableDistance(&nDepthMinReliableDistance);
		}
		if (SUCCEEDED(hr))
		{
			// In order to see the full range of depth (including the less reliable far field depth)  
			// we are setting nDepthMaxDistance to the extreme potential depth threshold  
			nDepthMaxDistance = USHRT_MAX; //here we set maxDepth as 1000 mm (1 m) to simply cut the back background

			// Note:  If you wish to filter by reliable depth distance, uncomment the following line.  
			//// hr = pDepthFrame->get_DepthMaxReliableDistance(&nDepthMaxDistance);  
		}

		if (SUCCEEDED(hr))
		{
			hr = pDepthFrame->AccessUnderlyingBuffer(&nBufferSize, &pBuffer);
		}

		if (SUCCEEDED(hr))
		{
			re=capture(pBuffer, nWidth, nHeight, depth_show, nDepthMinReliableDistance, nDepthMaxDistance);
		}

		if(pFrameDescription)SafeRelease(pFrameDescription);
	}

	if(pDepthFrame)SafeRelease(pDepthFrame);
	return re;
}
开发者ID:CVPaul,项目名称:RealSenseVsKinect,代码行数:59,代码来源:KinectForWindows.cpp

示例8: if

IFrameDescription* Kinect2StreamImpl::getFrameDescription(OniSensorType sensorType)
{
  if (!m_pKinectSensor) {
    return NULL;
  }

  IFrameDescription* frameDescription = NULL;
  if (sensorType == ONI_SENSOR_COLOR) {
    IColorFrameSource* frameSource = NULL;
    HRESULT hr = m_pKinectSensor->get_ColorFrameSource(&frameSource);
    if (SUCCEEDED(hr)) {
      hr = frameSource->get_FrameDescription(&frameDescription);
      if (FAILED(hr) && frameDescription) {
        frameDescription->Release();
        frameDescription = NULL;
      }
    }
    if (frameSource) {
      frameSource->Release();
    }
  }
  else if (sensorType == ONI_SENSOR_DEPTH) {
    IDepthFrameSource* frameSource = NULL;
    HRESULT hr = m_pKinectSensor->get_DepthFrameSource(&frameSource);
    if (SUCCEEDED(hr)) {
      hr = frameSource->get_FrameDescription(&frameDescription);
      if (FAILED(hr) && frameDescription) {
        frameDescription->Release();
        frameDescription = NULL;
      }
    }
    if (frameSource) {
      frameSource->Release();
    }
  }
  else { // ONI_SENSOR_IR
    IInfraredFrameSource* frameSource = NULL;
    HRESULT hr = m_pKinectSensor->get_InfraredFrameSource(&frameSource);
    if (SUCCEEDED(hr)) {
      hr = frameSource->get_FrameDescription(&frameDescription);
      if (FAILED(hr) && frameDescription) {
        frameDescription->Release();
        frameDescription = NULL;
      }
    }
    if (frameSource) {
      frameSource->Release();
    }
  }

  return frameDescription;
}
开发者ID:mvm9289,项目名称:openni2_kinect2_driver,代码行数:52,代码来源:Kinect2StreamImpl.cpp

示例9: assert

// コンストラクタ
KinectV2::KinectV2()
{
  // センサを取得する
  if (sensor == NULL && GetDefaultKinectSensor(&sensor) == S_OK)
  {
    HRESULT hr;

    // センサの使用を開始する
    hr = sensor->Open();
    assert(hr == S_OK);

    // デプスデータの読み込み設定
    IDepthFrameSource *depthSource;
    hr = sensor->get_DepthFrameSource(&depthSource);
    assert(hr == S_OK);
    hr = depthSource->OpenReader(&depthReader);
    assert(hr == S_OK);
    IFrameDescription *depthDescription;
    hr = depthSource->get_FrameDescription(&depthDescription);
    assert(hr == S_OK);
    depthSource->Release();

    // デプスデータのサイズを得る
    depthDescription->get_Width(&depthWidth);
    depthDescription->get_Height(&depthHeight);
    depthDescription->Release();

    // カラーデータの読み込み設定
    IColorFrameSource *colorSource;
    hr = sensor->get_ColorFrameSource(&colorSource);
    assert(hr == S_OK);
    hr = colorSource->OpenReader(&colorReader);
    assert(hr == S_OK);
    IFrameDescription *colorDescription;
    hr = colorSource->get_FrameDescription(&colorDescription);
    assert(hr == S_OK);
    colorSource->Release();

    // カラーデータのサイズを得る
    colorDescription->get_Width(&colorWidth);
    colorDescription->get_Height(&colorHeight);
    colorDescription->Release();

    // 座標のマッピング
    hr = sensor->get_CoordinateMapper(&coordinateMapper);
    assert(hr == S_OK);

    // depthCount と colorCount を計算してテクスチャとバッファオブジェクトを作成する
    makeTexture();

    // デプスデータからカメラ座標を求めるときに用いる一時メモリを確保する
    position = new GLfloat[depthCount][3];

    // カラーデータを変換する用いる一時メモリを確保する
    color = new GLubyte[colorCount * 4];
  }
}
开发者ID:tokoik,项目名称:projection,代码行数:58,代码来源:KinectV2.cpp

示例10: Mat

	void Microsoft2Grabber::DepthFrameArrived(IDepthFrameReference* pDepthFrameReference) {
		IDepthFrame* pDepthFrame = NULL;
		HRESULT hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
		//HRESULT hr = pDepthFrameReference->AcquireLatestFrame(&pDepthFrame);
		if(FAILED(hr))
			return;
		//cout << "got a depth frame" << endl;
		INT64 nDepthTime = 0;
		IFrameDescription* pDepthFrameDescription = NULL;
		int nDepthWidth = 0;
		int nDepthHeight = 0;
		UINT nDepthBufferSize = 0;

		// get depth frame data
		hr = pDepthFrame->get_RelativeTime(&nDepthTime);
		if (SUCCEEDED(hr)) {
			hr = pDepthFrame->get_FrameDescription(&pDepthFrameDescription);
		}
		if (SUCCEEDED(hr)) {
			hr = pDepthFrameDescription->get_Width(&nDepthWidth);
		}
		if (SUCCEEDED(hr)) {
			hr = pDepthFrameDescription->get_Height(&nDepthHeight);
		}
		if (SUCCEEDED(hr)) {
			hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &m_pDepthBuffer);
			//WaitForSingleObject(hDepthMutex,INFINITE);
			Mat tmp = Mat(m_depthSize, DEPTH_PIXEL_TYPE, m_pDepthBuffer, Mat::AUTO_STEP);
			MatDepth depth_img = *((MatDepth*)&(tmp.clone()));
			m_depthTime = nDepthTime;
			if (depth_image_signal_->num_slots () > 0) {
				depth_image_signal_->operator()(depth_img);
			}
			if (num_slots<sig_cb_microsoft_point_cloud_rgba>() > 0 || all_data_signal_->num_slots() > 0 || image_depth_image_signal_->num_slots() > 0) {
				//rgb_sync_.add1 (depth_img, m_depthTime);
				imageDepthOnlyImageCallback(depth_img);
			}
			
			//ReleaseMutex(hDepthMutex);
		}
		SafeRelease(pDepthFrameDescription);
		SafeRelease(pDepthFrame);
	}
开发者ID:steevo87,项目名称:PCL_Kinect2SDK,代码行数:43,代码来源:Microsoft_grabber2.cpp

示例11: ofLogWarning

bool BodyIndexStream::open()
{
    if (!m_Device->isOpen()) {
        ofLogWarning("ofxKinect2::BodyIndexStream") << "No ready Kinect2 found.";
        return false;
    }

    m_IsInvert = true;
    IBodyIndexFrameSource *frameSource = nullptr;
    HRESULT hr = E_FAIL;

    hr = m_Device->get().kinect2->get_BodyIndexFrameSource(&frameSource);
    if (SUCCEEDED(hr)) {
        hr = frameSource->OpenReader(&m_StreamHandle.bodyIndexFrameReader);

        if (SUCCEEDED(hr)) {
            IFrameDescription *frameDescription = nullptr;
            frameSource->get_FrameDescription(&frameDescription);
            if (SUCCEEDED(hr)) {
                int resX, resY = 0;
                hr = frameDescription->get_Width(&resX);
                hr = frameDescription->get_Width(&resY);
                m_Frame.mode.resolutionX = resX;
                m_Frame.mode.resolutionY = resY;
                m_Frame.width = resX;
                m_Frame.height = resY;
                m_DoubleBuffer.allocate(resX, resY, 4);

            }
            safeRelease(frameDescription);
        }
    }

    safeRelease(frameSource);
    if (FAILED(hr)) {
        ofLogWarning("ofxKinect2::BodyIndexStream") << "Can't open stream.";
        return false;
    }

    return Stream::open();
}
开发者ID:Furkanzmc,项目名称:ofxKinect2,代码行数:41,代码来源:ofxKinect2.cpp

示例12: update

		//----------
		void Color::update(IColorFrame * frame) {
			this->isFrameNewFlag = true;
			IFrameDescription * frameDescription = NULL;
			try {
				//allocate pixels and texture if we need to
				if (FAILED(frame->get_FrameDescription(&frameDescription))) {
					throw Exception("Failed to get frame description");
				}

				int width, height;
				if (FAILED(frameDescription->get_Width(&width)) || FAILED(frameDescription->get_Height(&height))) {
					throw Exception("Failed to get width and height of frame");
				}
				if (width != this->pixels.getWidth() || height != this->texture.getHeight()) {
					this->pixels.allocate(width, height, OF_IMAGE_COLOR_ALPHA);
					this->texture.allocate(this->pixels);
				}

				//update local rgba image
				if (FAILED(frame->CopyConvertedFrameDataToArray(this->pixels.size(), this->pixels.getPixels(), ColorImageFormat_Rgba))) {
					throw Exception("Couldn't pull pixel buffer");
				}
				if (this->useTexture) {
					this->texture.loadData(this->pixels);
				}

				//update yuv
				if (this->yuvPixelsEnabled) {
					if (width != this->yuvPixels.getWidth() || height != this->yuvPixels.getHeight()) {
						this->yuvPixels.allocate(width, height, OF_PIXELS_YUY2);
					}
					if (FAILED(frame->CopyRawFrameDataToArray(this->yuvPixels.size(), this->yuvPixels.getPixels()))) {
						throw Exception("Couldn't pull raw YUV pixel buffer");
					}
				}

				//update field of view
				if (FAILED(frameDescription->get_HorizontalFieldOfView(&this->horizontalFieldOfView))) {
					throw Exception("Failed to get horizonal field of view");
				}
				if (FAILED(frameDescription->get_VerticalFieldOfView(&this->verticalFieldOfView))) {
					throw Exception("Failed to get vertical field of view");
				}
				if (FAILED(frameDescription->get_DiagonalFieldOfView(&this->diagonalFieldOfView))) {
					throw Exception("Failed to get diagonal field of view");
				}

				IColorCameraSettings * cameraSettings;
				if (FAILED(frame->get_ColorCameraSettings(&cameraSettings))) {
					throw Exception("Failed to get color camera settings");
				}
				cameraSettings->get_ExposureTime(&this->exposure);
				cameraSettings->get_FrameInterval(&this->frameInterval);
				cameraSettings->get_Gain(&this->gain);
				cameraSettings->get_Gamma(&this->gamma);
			} catch (std::exception & e) {
				OFXKINECTFORWINDOWS2_ERROR << e.what();
			}
			SafeRelease(frameDescription);
		}
开发者ID:guozanhua218,项目名称:ofxKinectForWindows2,代码行数:61,代码来源:Color.cpp

示例13: getFrameDescription

XnDouble Kinect2StreamImpl::getVerticalFov()
{
  IFrameDescription* frameDescription = NULL;
  if (m_sensorType == ONI_SENSOR_DEPTH && m_imageRegistrationMode == ONI_IMAGE_REGISTRATION_DEPTH_TO_COLOR) {
    frameDescription = getFrameDescription(ONI_SENSOR_COLOR);
  }
  else {
    frameDescription = getFrameDescription(m_sensorType);
  }

  if (frameDescription == NULL) {
    return 0;
  }

  float fov;
  HRESULT hr = frameDescription->get_VerticalFieldOfView(&fov);
  frameDescription->Release();
  if (FAILED(hr)) {
    return 0;
  }
  return fov;
}
开发者ID:mvm9289,项目名称:openni2_kinect2_driver,代码行数:22,代码来源:Kinect2StreamImpl.cpp

示例14: ProcessNewBodyIndexFrame

void BodyIndexMulticaster::ProcessNewBodyIndexFrame(IBodyIndexFrame* frame)
{
	IFrameDescription* frameDescription = NULL;
	frame->get_FrameDescription(&frameDescription);

	int width = 0;
	int height = 0;
	frameDescription->get_Height(&height);
	frameDescription->get_Width(&width);

	UINT nBufferSize = height*width*sizeof(BYTE);

	UINT capacity;

	HRESULT hr = frame->AccessUnderlyingBuffer(&capacity, &pBuffer); if (!SUCCEEDED(hr)) return;

	if (pBuffer && (width == K4W2_BODY_INDEX_WIDTH) && (height == K4W2_BODY_INDEX_HEIGHT))
	{
		// send previous frame first

		// encode current frame, will be send in the next cycle
		BYTE* pInput	= pBuffer;
		BYTE* pOutput	= pScaledBuffer;

		const BYTE* pEnd = pInput + (width * height);

		while (pInput < pEnd)
		{
			BYTE index = *pInput;
			*pOutput = ((signed char)index + 1)*40;
            
			++pOutput;
			++pInput;
		}
		gstSender.SendFrame((unsigned char*) pScaledBuffer, nBufferSize);
	}
	SafeRelease(frameDescription);
}
开发者ID:Samsung,项目名称:kv2streamer,代码行数:38,代码来源:BodyIndexMulticaster.cpp

示例15: update

		//----------
		void Color::update() {
			CHECK_OPEN

			IColorFrame * frame = NULL;
			IFrameDescription * frameDescription = NULL;
			try {
				//acquire frame
				if (FAILED(this->reader->AcquireLatestFrame(&frame))) {
					return; // we often throw here when no new frame is available
				}

				//allocate pixels and texture if we need to
				if (FAILED(frame->get_FrameDescription(&frameDescription))) {
					throw Exception("Failed to get frame description");
				}

				int width, height;
				if (FAILED(frameDescription->get_Width(&width)) || FAILED(frameDescription->get_Height(&height))) {
					throw Exception("Failed to get width and height of frame");
				}
				if (width != this->pixels.getWidth() || height != this->texture.getHeight()) {
					this->pixels.allocate(width, height, OF_IMAGE_COLOR_ALPHA);
					this->texture.allocate(this->pixels);
				}

				//update local assets
				if (FAILED(frame->CopyConvertedFrameDataToArray(this->pixels.size(), this->pixels.getPixels(), ColorImageFormat_Rgba))) {
					throw Exception("Couldn't pull pixel buffer");
				}
				if (this->useTexture) {
					this->texture.loadData(this->pixels);
				}

				//update field of view
				if (FAILED(frameDescription->get_HorizontalFieldOfView(&this->horizontalFieldOfView))) {
					throw Exception("Failed to get horizonal field of view");
				}
				if (FAILED(frameDescription->get_VerticalFieldOfView(&this->verticalFieldOfView))) {
					throw Exception("Failed to get vertical field of view");
				}
				if (FAILED(frameDescription->get_DiagonalFieldOfView(&this->diagonalFieldOfView))) {
					throw Exception("Failed to get diagonal field of view");
				}

				IColorCameraSettings * cameraSettings;
				if (FAILED(frame->get_ColorCameraSettings(&cameraSettings))) {
					throw Exception("Failed to get color camera settings");
				}
				cameraSettings->get_ExposureTime(&this->exposure);
				cameraSettings->get_FrameInterval(&this->frameInterval);
				cameraSettings->get_Gain(&this->gain);
				cameraSettings->get_Gamma(&this->gamma);
			} catch (std::exception & e) {
				OFXKINECTFORWINDOWS2_ERROR << e.what();
			}
			SafeRelease(frameDescription);
			SafeRelease(frame);
		}
开发者ID:Pixformance,项目名称:app.ofx-kinect-for-windows-2,代码行数:59,代码来源:Color.cpp


注:本文中的IFrameDescription类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。