当前位置: 首页>>代码示例>>C++>>正文


C++ UserGenerator::GetPoseDetectionCap方法代码示例

本文整理汇总了C++中UserGenerator::GetPoseDetectionCap方法的典型用法代码示例。如果您正苦于以下问题:C++ UserGenerator::GetPoseDetectionCap方法的具体用法?C++ UserGenerator::GetPoseDetectionCap怎么用?C++ UserGenerator::GetPoseDetectionCap使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在UserGenerator的用法示例。


在下文中一共展示了UserGenerator::GetPoseDetectionCap方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1: UserCalibration_CalibrationComplete

// Callback: Finished calibration
void XN_CALLBACK_TYPE UserCalibration_CalibrationComplete(xn::SkeletonCapability& capability, XnUserID nId, XnCalibrationStatus eStatus, void* pCookie)
{
	XnUInt32 epochTime = 0;
	xnOSGetEpochTime(&epochTime);
	if (eStatus == XN_CALIBRATION_STATUS_OK)
	{
		// Calibration succeeded
		printf("%d Calibration complete, start tracking user %d\n", epochTime, nId);		
		g_UserGenerator.GetSkeletonCap().StartTracking(nId);
	}
	else
	{
		// Calibration failed
		printf("%d Calibration failed for user %d\n", epochTime, nId);
        /*if(eStatus==XN_CALIBRATION_STATUS_MANUAL_ABORT)
        {
            printf("Manual abort occured, stop attempting to calibrate!");
            return;
        }*/
		if (g_bNeedPose)
		{
			g_UserGenerator.GetPoseDetectionCap().StartPoseDetection(g_strPose, nId);
		}
		else
		{
			g_UserGenerator.GetSkeletonCap().RequestCalibration(nId, TRUE);
		}
	}
}
开发者ID:Pajinek,项目名称:kinect-annotation,代码行数:30,代码来源:openni.cpp

示例2: UserPose_PoseDetected

// Callback: Detected a pose
void XN_CALLBACK_TYPE UserPose_PoseDetected(xn::PoseDetectionCapability& capability, const XnChar* strPose, XnUserID nId, void* pCookie)
{
	XnUInt32 epochTime = 0;
	xnOSGetEpochTime(&epochTime);
	printf("%d Pose %s detected for user %d\n", epochTime, strPose, nId);
	g_UserGenerator.GetPoseDetectionCap().StopPoseDetection(nId);
	g_UserGenerator.GetSkeletonCap().RequestCalibration(nId, TRUE);
}
开发者ID:Pajinek,项目名称:kinect-annotation,代码行数:9,代码来源:openni.cpp

示例3: UserPose_PoseDetected

void XN_CALLBACK_TYPE UserTracker::UserPose_PoseDetected(xn::PoseDetectionCapability& capability, const XnChar* strPose, XnUserID nId, void* pCookie)
{
    XnUInt32 epochTime = 0;
	xnOSGetEpochTime(&epochTime);
	//printf("%d Pose %s detected for user %d\n", epochTime, strPose, nId);
    UserGenerator *userGenerator = static_cast<xn::UserGenerator*>(pCookie);
    if(userGenerator)
    {
        userGenerator->GetPoseDetectionCap().StopPoseDetection(nId);
        userGenerator->GetSkeletonCap().RequestCalibration(nId, TRUE); 
    }
    
}
开发者ID:yongxiaofeng,项目名称:earthQuakeProject,代码行数:13,代码来源:UserTracker.cpp

示例4: UserPose_PoseDetected

void XN_CALLBACK_TYPE UserPose_PoseDetected(PoseDetectionCapability& capability, const XnChar* strPose, XnUserID nId, void* pCookie)
{
	if(_printUserTracking) printf("AS3OpenNI :: Pose %s detected for user: %d\n", strPose, nId);
	_userGenerator.GetPoseDetectionCap().StopPoseDetection(nId);
	_userGenerator.GetSkeletonCap().RequestCalibration(nId, true);
	
	char cValue[50];
	sprintf(cValue, "user_tracking_pose_detected:%d", nId);
	if(_useSockets) 
	{
		#if (XN_PLATFORM == XN_PLATFORM_WIN32)
			g_AS3Network.sendMessage(1,6,nId);
		#else
			sendToSocket(USER_TRACKING_SOCKET, cValue);
		#endif
	}
}
开发者ID:alfiandosengkey,项目名称:as3openni,代码行数:17,代码来源:main.cpp

示例5: UserCalibration_CalibrationEnd

void XN_CALLBACK_TYPE UserCalibration_CalibrationEnd(SkeletonCapability& capability, XnUserID nId, XnBool bSuccess, void* pCookie)
{
	if (bSuccess)
	{
		if(_printUserTracking) printf("AS3OpenNI :: Calibration complete, start tracking user: %d\n", nId);
		_userGenerator.GetSkeletonCap().StartTracking(nId);
		
		char cValue[50];
		sprintf(cValue, "user_tracking_user_calibration_complete:%d", nId);
		if(_useSockets) 
		{
			#if (XN_PLATFORM == XN_PLATFORM_WIN32)
				g_AS3Network.sendMessage(1,8,nId);
			#else
				sendToSocket(USER_TRACKING_SOCKET, cValue);
			#endif
		}
	}
	else
	{
		if(_printUserTracking) printf("AS3OpenNI :: Calibration failed for user: %d\n", nId);
		if (_needPose)
		{
			_userGenerator.GetPoseDetectionCap().StartPoseDetection(_strPose, nId);
		}
		else
		{
			_userGenerator.GetSkeletonCap().RequestCalibration(nId, true);
		}
		
		char cValue[50];
		sprintf(cValue, "user_tracking_user_calibration_failed:%d", nId);
		if(_useSockets) 
		{
			#if (XN_PLATFORM == XN_PLATFORM_WIN32)
				g_AS3Network.sendMessage(1,9,nId);
			#else
				sendToSocket(USER_TRACKING_SOCKET, cValue);
			#endif
		}
	}
}
开发者ID:alfiandosengkey,项目名称:as3openni,代码行数:42,代码来源:main.cpp

示例6: User_NewUser

void XN_CALLBACK_TYPE UserTracker::User_NewUser(xn::UserGenerator& generator, XnUserID nId, void* pCookie)
{
    XnUInt32 epochTime = 0;
	xnOSGetEpochTime(&epochTime);
	printf("%d New User %d\n", epochTime, nId);
    UserGenerator *userGenerator = static_cast<xn::UserGenerator*>(pCookie);

	// New user found
	if (g_bNeedPose)
	{
		userGenerator->GetPoseDetectionCap().StartPoseDetection(g_strPose, nId);
	}
	else
	{
       if(userGenerator)
		userGenerator->GetSkeletonCap().RequestCalibration(nId, TRUE);
	}
    
    
}
开发者ID:yongxiaofeng,项目名称:earthQuakeProject,代码行数:20,代码来源:UserTracker.cpp

示例7: User_NewUser

void XN_CALLBACK_TYPE User_NewUser(UserGenerator& generator, XnUserID nId, void* pCookie)
{
	if(_printUserTracking) printf("AS3OpenNI :: New User: %d\n", nId);
	if(_needPose)
	{
		_userGenerator.GetPoseDetectionCap().StartPoseDetection(_strPose, nId);
	}
	else
	{
		_userGenerator.GetSkeletonCap().RequestCalibration(nId, true);
	}
	
	char cValue[50];
	sprintf(cValue, "user_tracking_new_user:%d", nId);
	if(_useSockets) 
	{
		#if (XN_PLATFORM == XN_PLATFORM_WIN32)
			g_AS3Network.sendMessage(1,2,nId);
		#else
			sendToSocket(USER_TRACKING_SOCKET, cValue);
		#endif
	}
}
开发者ID:alfiandosengkey,项目名称:as3openni,代码行数:23,代码来源:main.cpp

示例8: main


//.........这里部分代码省略.........
		else
		{
			_trackPad = new XnVSelectableSlider2D(4, 9);
		}
		
		_trackPad->RegisterItemHover(NULL, &TrackPad_ItemHover);
		_trackPad->RegisterItemSelect(NULL, &TrackPad_ItemSelect);
	    _trackPad->RegisterPrimaryPointCreate(NULL, &TrackPad_PrimaryCreate);
	  	_trackPad->RegisterPrimaryPointDestroy(NULL, &TrackPad_PrimaryDestroy);
	}
	
	// Feature User Tracking.
	if(_featureUserTracking)
	{
		// Setup user generator callbacks.
		XnCallbackHandle hUserCallbacks, hCalibrationCallbacks, hPoseCallbacks;
		if (!_userGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
		{
			printf("AS3OpenNI :: Supplied user generator doesn't support skeleton\n");
			return 1;
		}
		_userGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
		
		// Setup Skeleton detection.
		_userGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, NULL, hCalibrationCallbacks);
		if (_userGenerator.GetSkeletonCap().NeedPoseForCalibration())
		{
			_needPose = true;
			if (!_userGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
			{
				printf("AS3OpenNI :: Pose required, but not supported\n");
				return 1;
			}
			_userGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, NULL, hPoseCallbacks);
			_userGenerator.GetSkeletonCap().GetCalibrationPose(_strPose);
		}
		_userGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
	}
	
	// Create the broadcaster manager.
	_broadcaster = new XnVBroadcaster();
	
	// Start generating all.
	_context.StartGeneratingAll();
	
	// Set the frame rate.
	_status = xnFPSInit(&xnFPS, 180);
	CHECK_RC(_status, "AS3OpenNI :: FPS Init");
	
	//----------------------------------------------------------------------//
	//------------------------- SETUP DISPLAY SUPPORT ---------------------//
	//--------------------------------------------------------------------//
	
	// Setup depth and image data.
	_depth.GetMetaData(_depthData);
	_image.GetMetaData(_imageData);
	
	// Hybrid mode isn't supported in this sample
	if (_imageData.FullXRes() != _depthData.FullXRes() || _imageData.FullYRes() != _depthData.FullYRes())
	{
		printf ("AS3OpenNI :: The device depth and image resolution must be equal!\n");
		return 1;
	}

	// RGB is the only image format supported.
	if (_imageData.PixelFormat() != XN_PIXEL_FORMAT_RGB24)
开发者ID:alfiandosengkey,项目名称:as3openni,代码行数:67,代码来源:main.cpp

示例9: init

bool CVKinectWrapper::init(string CalibFilePath)
{

	this->CalibFilePath = CalibFilePath;

	XnStatus rc;

	EnumerationErrors errors;
	rc = g_context.InitFromXmlFile(CalibFilePath.c_str(), g_scriptNode, &errors);

	if (rc == XN_STATUS_NO_NODE_PRESENT)
	{
		XnChar strError[1024];
		errors.ToString(strError, 1024);
		printf("%s\n", strError);
        error = 1;
		return started;
	}
	else if (rc != XN_STATUS_OK)
	{
		printf("Open failed: %s\n", xnGetStatusString(rc));
        error = 2;
		return started;
	}

	rc = g_context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_depth);
	if (rc != XN_STATUS_OK)
	{
		printf("No depth node exists! Check your XML.");
        error = 3;
		return started;
	}

	rc = g_context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_image);
	if (rc != XN_STATUS_OK)
	{
		printf("No image node exists! Check your XML.");
        error = 4;
		return started;
	}

	g_depth.GetMetaData(g_depthMD);
	g_image.GetMetaData(g_imageMD);

	// Hybrid mode isn't supported in this sample
	if (g_imageMD.FullXRes() != g_depthMD.FullXRes() || g_imageMD.FullYRes() != g_depthMD.FullYRes())
	{
		printf ("The device depth and image resolution must be equal!\n");
        error = 5;
		return started;
	}

	// RGB is the only image format supported.
	if (g_imageMD.PixelFormat() != XN_PIXEL_FORMAT_RGB24)
	{
		printf("The device image format must be RGB24\n");
        error = 6;
		return started;
	}

    // skeleton
	rc = g_context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
	if (rc != XN_STATUS_OK)
	{
		rc = g_UserGenerator.Create(g_context);
		CHECK_RC(rc, "Find user generator");
	}

	XnCallbackHandle hUserCallbacks, hCalibrationStart, hCalibrationComplete, hPoseDetected, hCalibrationInProgress, hPoseInProgress;
	if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
	{
		printf("Supplied user generator doesn't support skeleton\n");
        error = 7;
		return 1;
	}
	rc = g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
	CHECK_RC(rc, "Register to user callbacks");
	rc = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationStart(UserCalibration_CalibrationStart, NULL, hCalibrationStart);
	CHECK_RC(rc, "Register to calibration start");
	rc = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationComplete(UserCalibration_CalibrationComplete, NULL, hCalibrationComplete);
	CHECK_RC(rc, "Register to calibration complete");

	if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration())
	{
		g_bNeedPose = TRUE;
		if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
		{
			printf("Pose required, but not supported\n");
            error = 7;
			return 1;
		}
		rc = g_UserGenerator.GetPoseDetectionCap().RegisterToPoseDetected(UserPose_PoseDetected, NULL, hPoseDetected);
		CHECK_RC(rc, "Register to Pose Detected");
		g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);
	}

	g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

	rc = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationInProgress(MyCalibrationInProgress, NULL, hCalibrationInProgress);
	CHECK_RC(rc, "Register to calibration in progress");
//.........这里部分代码省略.........
开发者ID:Pajinek,项目名称:kinect-annotation,代码行数:101,代码来源:openni.cpp


注:本文中的UserGenerator::GetPoseDetectionCap方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。