本文整理汇总了C++中UserGenerator::IsCapabilitySupported方法的典型用法代码示例。如果您正苦于以下问题:C++ UserGenerator::IsCapabilitySupported方法的具体用法?C++ UserGenerator::IsCapabilitySupported怎么用?C++ UserGenerator::IsCapabilitySupported使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类UserGenerator
的用法示例。
在下文中一共展示了UserGenerator::IsCapabilitySupported方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: xnInit
//----------------------------------------------------
// OpenNI関連の初期化
//----------------------------------------------------
void xnInit(void){
XnStatus rc;
EnumerationErrors errors;
rc = g_context.InitFromXmlFile(SAMPLE_XML_PATH, &errors);
if (rc == XN_STATUS_NO_NODE_PRESENT){
XnChar strError[1024];
errors.ToString(strError, 1024);
printf("%s\n", strError);
exit(1);
}else if (rc != XN_STATUS_OK){
printf("Open failed: %s\n", xnGetStatusString(rc));
exit(1);
}
//playerInit();
rc = xnFPSInit(&g_xnFPS, 180); // FPSの初期化
//CHECK_RC(rc, "FPS Init");
// デプス・イメージ・ユーザジェネレータの作成
rc = g_context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_depth);
errorCheck(rc, "g_depth"); // エラーチェック
rc = g_context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_image);
errorCheck(rc, "g_image");
rc = g_context.FindExistingNode(XN_NODE_TYPE_USER, g_user);
//rc = g_user.Create(g_context);
errorCheck(rc, "g_user");
// ユーザー検出機能をサポートしているか確認
if (!g_user.IsCapabilitySupported(XN_CAPABILITY_SKELETON)) {
//throw std::runtime_error("ユーザー検出をサポートしてません");
cout << "ユーザー検出をサポートしてません" << endl;
exit(1);
}
// レコーダーの設定
//rc = setRecorder(g_recorder, rc);
// ユーザコールバックの登録
XnCallbackHandle userCallbacks;
g_user.RegisterUserCallbacks(UserDetected, UserLost, NULL, userCallbacks);
// デプス・イメージ・ユーザデータの取得
g_depth.GetMetaData(g_depthMD);
g_image.GetMetaData(g_imageMD);
g_user.GetUserPixels(0, g_sceneMD);
// Hybrid mode isn't supported in this sample
// イメージとデプスの大きさが違うとエラー
if (g_imageMD.FullXRes() != g_depthMD.FullXRes() || g_imageMD.FullYRes() != g_depthMD.FullYRes()){
printf ("The device depth and image resolution must be equal!\n");
exit(1);
}
// RGB is the only image format supported.
// フォーマットの確認
if (g_imageMD.PixelFormat() != XN_PIXEL_FORMAT_RGB24){
printf("The device image format must be RGB24\n");
exit(1);
}
// Texture map init
// フルスクリーン画面の大きさ調整
g_nTexMapX = (((unsigned short)(g_depthMD.FullXRes() - 1) / 512) + 1) * 512; // 大きさによって512の倍数に調整(1024)
g_nTexMapY = (((unsigned short)(g_depthMD.FullYRes() - 1) / 512) + 1) * 512; // 512
g_pTexMap = (XnRGB24Pixel*)malloc(g_nTexMapX * g_nTexMapY * sizeof(XnRGB24Pixel)); // スクリーンの大きさ分の色情報の容量を確保
// 座標ポインタの初期化
g_pPoint = (XnPoint3D*)malloc(KINECT_IMAGE_SIZE * sizeof(XnPoint3D)); // 座標を入れるポインタを作成
g_pBackTex = (XnRGB24Pixel*)malloc(KINECT_IMAGE_SIZE * sizeof(XnRGB24Pixel)); // 背景画像を入れるポインタを作成
g_pBackPoint = (XnPoint3D*)malloc(KINECT_IMAGE_SIZE * sizeof(XnPoint3D)); // 背景座標を入れるポインタを作成
g_pBackDepth = (XnDepthPixel*)malloc(KINECT_IMAGE_SIZE * sizeof(XnDepthPixel)); // 背景座標を入れるポインタを作成
}
示例2: initialize
/**
* Initialize XN functions
*/
void initialize()
{
ImageMetaData imageMD;
XnStatus status;
int dummy;
srand ( time(NULL) );
// Initializing context and checking for enumeration errors
status = g_Context.InitFromXmlFile(XML_CONFIG_FILE, &g_Error);
checkEnumError(status, g_Error);
// Finding nodes and checking for errors
STATUS_CHECK(g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator), "Finding depth node");
STATUS_CHECK(g_Context.FindExistingNode(XN_NODE_TYPE_SCENE, g_SceneAnalyzer), "Finding scene analizer");
// Note: when the image generation node is handled the program gets
// too slow.
// STATUS_CHECK(g_Context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_ImageGenerator), "Finding image node");
// Set view point of Depth generator to the image generator point of
// view.
// STATUS_CHECK(g_DepthGenerator.GetAlternativeViewPointCap().SetViewPoint(g_ImageGenerator), "Set View Point");
STATUS_CHECK(g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator), "Finding user node");
//g_ImageGenerator.GetMetaData(imageMD);
// Checking camera pixel format
//if (imageMD.PixelFormat() != XN_PIXEL_FORMAT_RGB24) {
// reportError("Camera format not supported...!\n");
//}
// Checking user generator capabilities
if(!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON)) {
reportError("Skeleton capability not supported\n");
}
if(!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION)) {
reportError("Pose detection capability not supported\n");
}
printf("Number of players: ");
dummy = scanf("%d", &g_MaxPlayers);
printf("\n");
//Initialize user detector object
g_UserDetector = UserDetector(g_UserGenerator, g_DepthGenerator);
g_UserDetector.registerCallbacks();
g_ZamusDetector = new Zamus(&g_UserDetector);
g_LinqDetector = new Linq(&g_UserDetector);
g_BusterDetector = new BusterDetector(g_ZamusDetector, &g_UserDetector);
g_IceRodDetector = new IceRodDetector(g_LinqDetector, &g_UserDetector);
// Initialize image render object
g_SceneRenderer = SceneRenderer(&g_ImageGenerator,
&g_DepthGenerator,
&g_SceneAnalyzer,
&g_UserDetector,
g_ZamusDetector,
g_LinqDetector);
STATUS_CHECK(g_Context.StartGeneratingAll(), "Context generation");
g_SFBgame = SuperFiremanBrothers(&g_UserDetector,
&g_SceneAnalyzer,
g_ZamusDetector,
g_LinqDetector,
g_MaxPlayers
);
}
示例3: main
//.........这里部分代码省略.........
_inOutSlider = new XnVSelectableSlider1D(3, 0, AXIS_Z);
_inOutSlider->RegisterActivate(NULL, &InOutSlider_OnActivate);
_inOutSlider->RegisterDeactivate(NULL, &InOutSlider_OnDeactivate);
_inOutSlider->RegisterPrimaryPointCreate(NULL, &InOutSlider_OnPrimaryCreate);
_inOutSlider->RegisterPrimaryPointDestroy(NULL, &InOutSlider_OnPrimaryDestroy);
_inOutSlider->RegisterValueChange(NULL, &InOutSlider_OnValueChange);
_inOutSlider->SetValueChangeOnOffAxis(false);
}
// Feature TrackPad.
if(_featureTrackPad)
{
// Track Pad.
if(trackpad_columns > 0 && trackpad_rows > 0)
{
_trackPad = new XnVSelectableSlider2D(trackpad_columns, trackpad_rows);
}
else
{
_trackPad = new XnVSelectableSlider2D(4, 9);
}
_trackPad->RegisterItemHover(NULL, &TrackPad_ItemHover);
_trackPad->RegisterItemSelect(NULL, &TrackPad_ItemSelect);
_trackPad->RegisterPrimaryPointCreate(NULL, &TrackPad_PrimaryCreate);
_trackPad->RegisterPrimaryPointDestroy(NULL, &TrackPad_PrimaryDestroy);
}
// Feature User Tracking.
if(_featureUserTracking)
{
// Setup user generator callbacks.
XnCallbackHandle hUserCallbacks, hCalibrationCallbacks, hPoseCallbacks;
if (!_userGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
{
printf("AS3OpenNI :: Supplied user generator doesn't support skeleton\n");
return 1;
}
_userGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
// Setup Skeleton detection.
_userGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, NULL, hCalibrationCallbacks);
if (_userGenerator.GetSkeletonCap().NeedPoseForCalibration())
{
_needPose = true;
if (!_userGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
{
printf("AS3OpenNI :: Pose required, but not supported\n");
return 1;
}
_userGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, NULL, hPoseCallbacks);
_userGenerator.GetSkeletonCap().GetCalibrationPose(_strPose);
}
_userGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
}
// Create the broadcaster manager.
_broadcaster = new XnVBroadcaster();
// Start generating all.
_context.StartGeneratingAll();
// Set the frame rate.
_status = xnFPSInit(&xnFPS, 180);
CHECK_RC(_status, "AS3OpenNI :: FPS Init");
示例4: printf
/*
* Function: KinectMonitor (Constructor)
*
* Initializes all the production nodes for the kinect to get and process data.
* Sets the camera tilt to the specified (or default) angle.
* Registers the OpenNI callbacks for user events.
* Registers the system signal handlers (for terminating the program).
* Initializes the global and member variables.
*
* Parameters:
* int* tilt - A pointer to the desired tilt angle of the camera.
* If it is NULL, the default value is used.
*/
KinectMonitor::KinectMonitor(char *tilt) {
XnStatus status;
EnumerationErrors errors;
XnCallbackHandle userCallbacks;
// Setup Context from an XML configuration file (the default supplied by OpenNI)
// CONTEXT_XML is defined in monitor.h
status = context.InitFromXmlFile(CONTEXT_XML, scriptNode, &errors);
// Check to ensure that the context was initialized properly.
if( status == XN_STATUS_NO_NODE_PRESENT ) {
XnChar strError[1024];
errors.ToString(strError, 1024);
printf("%s\n", strError);
return;
} else if( status != XN_STATUS_OK ) {
printf("Could not initialize Context: %s\n", xnGetStatusString(status));
return;
}
// Setup Depth Generator production node from the context
status = context.FindExistingNode(XN_NODE_TYPE_DEPTH, depthGenerator);
// Setup User Generator production node from the context
status = context.FindExistingNode(XN_NODE_TYPE_USER, userGenerator);
// Check that the user generator is available
if( status != XN_STATUS_OK ) {
// If the context did not define a UserGenerator node, then try to create one
status = userGenerator.Create(context);
CHECK_RC(status, "Find user generator");
}
// Set FPS
status = xnFPSInit(&xnFPS, 180);
// Check for Skeletal Mapping
if( !userGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON) ) {
printf("Skeletal mapping not supported.\n");
return;
}
// Set the skeletal profile to only include the joints in the upper body.
// Profile options are XN_SKEL_PROFILE_<option>
// Where <option> could be: NONE, ALL, UPPER, LOWER, or HEAD_HANDS
userGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_UPPER);
// Tilt camera - This feature requires the program to be run with root privilege,
// because it requires writing to the usb device.
XN_USB_DEV_HANDLE dev;
// Sets the angle to either the DEFAULT_TILT (defined in monitor.h) or the given tilt.
int angle = (tilt == nullptr)? DEFAULT_TILT : atoi(tilt);
// Open the kinect usb device
status = xnUSBOpenDevice(VID_MICROSOFT, PID_NUI_MOTOR, NULL, NULL, &dev);
// Send the proper code to the usb device to set the angle.
uint8_t empty[0x1];
status = xnUSBSendControl(
dev, XN_USB_CONTROL_TYPE_VENDOR, 0x31, (XnUInt16)angle,
0x0, empty, 0x0, 0
);
// Register Callbacks
status = userGenerator.RegisterUserCallbacks(
foundUser, lostUser, NULL, userCallbacks
);
// Register Handlers
signal(SIGABRT, &stop);
signal(SIGTERM, &stop);
signal(SIGINT, &stop);
// Initialize globals
quit = false;
out = true;
}
示例5: init
bool CVKinectWrapper::init(string CalibFilePath)
{
this->CalibFilePath = CalibFilePath;
XnStatus rc;
EnumerationErrors errors;
rc = g_context.InitFromXmlFile(CalibFilePath.c_str(), g_scriptNode, &errors);
if (rc == XN_STATUS_NO_NODE_PRESENT)
{
XnChar strError[1024];
errors.ToString(strError, 1024);
printf("%s\n", strError);
error = 1;
return started;
}
else if (rc != XN_STATUS_OK)
{
printf("Open failed: %s\n", xnGetStatusString(rc));
error = 2;
return started;
}
rc = g_context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_depth);
if (rc != XN_STATUS_OK)
{
printf("No depth node exists! Check your XML.");
error = 3;
return started;
}
rc = g_context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_image);
if (rc != XN_STATUS_OK)
{
printf("No image node exists! Check your XML.");
error = 4;
return started;
}
g_depth.GetMetaData(g_depthMD);
g_image.GetMetaData(g_imageMD);
// Hybrid mode isn't supported in this sample
if (g_imageMD.FullXRes() != g_depthMD.FullXRes() || g_imageMD.FullYRes() != g_depthMD.FullYRes())
{
printf ("The device depth and image resolution must be equal!\n");
error = 5;
return started;
}
// RGB is the only image format supported.
if (g_imageMD.PixelFormat() != XN_PIXEL_FORMAT_RGB24)
{
printf("The device image format must be RGB24\n");
error = 6;
return started;
}
// skeleton
rc = g_context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
if (rc != XN_STATUS_OK)
{
rc = g_UserGenerator.Create(g_context);
CHECK_RC(rc, "Find user generator");
}
XnCallbackHandle hUserCallbacks, hCalibrationStart, hCalibrationComplete, hPoseDetected, hCalibrationInProgress, hPoseInProgress;
if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
{
printf("Supplied user generator doesn't support skeleton\n");
error = 7;
return 1;
}
rc = g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
CHECK_RC(rc, "Register to user callbacks");
rc = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationStart(UserCalibration_CalibrationStart, NULL, hCalibrationStart);
CHECK_RC(rc, "Register to calibration start");
rc = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationComplete(UserCalibration_CalibrationComplete, NULL, hCalibrationComplete);
CHECK_RC(rc, "Register to calibration complete");
if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration())
{
g_bNeedPose = TRUE;
if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
{
printf("Pose required, but not supported\n");
error = 7;
return 1;
}
rc = g_UserGenerator.GetPoseDetectionCap().RegisterToPoseDetected(UserPose_PoseDetected, NULL, hPoseDetected);
CHECK_RC(rc, "Register to Pose Detected");
g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);
}
g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
rc = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationInProgress(MyCalibrationInProgress, NULL, hCalibrationInProgress);
CHECK_RC(rc, "Register to calibration in progress");
//.........这里部分代码省略.........