本文整理汇总了C++中EnumerationErrors类的典型用法代码示例。如果您正苦于以下问题:C++ EnumerationErrors类的具体用法?C++ EnumerationErrors怎么用?C++ EnumerationErrors使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了EnumerationErrors类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: main
int main()
{
XnStatus nRetVal = XN_STATUS_OK;
Context context;
EnumerationErrors errors;
nRetVal = context.InitFromXmlFile(SAMPLE_XML_PATH, &errors);
if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
{
XnChar strError[1024];
errors.ToString(strError, 1024);
printf("%s\n", strError);
return (nRetVal);
}
else if (nRetVal != XN_STATUS_OK)
{
printf("Open failed: %s\n", xnGetStatusString(nRetVal));
return (nRetVal);
}
DepthGenerator depth;
nRetVal = context.FindExistingNode(XN_NODE_TYPE_DEPTH, depth);
CHECK_RC(nRetVal, "Find depth generator");
XnFPSData xnFPS;
nRetVal = xnFPSInit(&xnFPS, 180);
CHECK_RC(nRetVal, "FPS Init");
DepthMetaData depthMD;
while (!xnOSWasKeyboardHit())
{
nRetVal = context.WaitOneUpdateAll(depth);
if (nRetVal != XN_STATUS_OK)
{
printf("UpdateData failed: %s\n", xnGetStatusString(nRetVal));
continue;
}
xnFPSMarkFrame(&xnFPS);
depth.GetMetaData(depthMD);
const XnDepthPixel* pDepthMap = depthMD.Data();
printf("Frame %d Middle point is: %u. FPS: %f\n", depthMD.FrameID(), depthMD(depthMD.XRes() / 2, depthMD.YRes() / 2), xnFPSCalc(&xnFPS));
}
context.Shutdown();
return 0;
}
示例2: printf
// -----------------------------------------------------------------------------------------------------
// connect
// -----------------------------------------------------------------------------------------------------
bool CameraDevice::connect()
{
//Connect to kinect
printf("Connecting to Kinect... ");
fflush(stdout);
XnStatus nRetVal = XN_STATUS_OK;
EnumerationErrors errors;
ScriptNode script;
nRetVal = g_context.InitFromXmlFile(Config::_PathKinectXmlFile.c_str(), script, &errors);
if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
{
XnChar strError[1024];
errors.ToString(strError, 1024);
printf("%s\n", strError);
return false;
}
else if (nRetVal != XN_STATUS_OK)
{
printf("Open failed: %s\n", xnGetStatusString(nRetVal));
return false;
}
printf("OK\n");
// allocate the point cloud buffer
g_cloudPointSave.width = NBPIXELS_WIDTH;
g_cloudPointSave.height = NBPIXELS_HEIGHT;
g_cloudPointSave.points.resize(NBPIXELS_WIDTH*NBPIXELS_HEIGHT);
nRetVal = g_context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_depth);
CHECK_RC(nRetVal, "Find depth generator");
nRetVal = g_context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_image);
CHECK_RC(nRetVal, "Find image generator");
nRetVal = xnFPSInit(&g_xnFPS, 180);
CHECK_RC(nRetVal, "FPS Init");
g_context.SetGlobalMirror(false); // mirror image horizontally
g_depth.GetAlternativeViewPointCap().SetViewPoint(g_image);
if (g_depth.GetIntProperty ("ShadowValue", g_shadowValue) != XN_STATUS_OK)
printf ("[OpenNIDriver] Could not read shadow value!");
if (g_depth.GetIntProperty ("NoSampleValue", g_noSampleValue) != XN_STATUS_OK)
printf ("[OpenNIDriver] Could not read no sample value!");
return (nRetVal == XN_STATUS_OK);
}
示例3: createStream
void createStream(Generator& generator, XnProductionNodeType type)
{
EnumerationErrors errors;
XnStatus nRetVal = g_Context.CreateAnyProductionTree(type, NULL, generator, &errors);
if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
{
XnChar strError[1024];
errors.ToString(strError, 1024);
displayMessage("%s", strError);
return;
}
else if (nRetVal != XN_STATUS_OK)
{
displayMessage("Open failed: %s", xnGetStatusString(nRetVal));
return;
}
}
示例4: setup
//--------------------------------------------------------------
void testApp::setup(){
XnStatus rc;
EnumerationErrors errors;
rc = g_context.InitFromXmlFile(SAMPLE_XML_PATH, g_scriptNode, &errors);
if (rc == XN_STATUS_NO_NODE_PRESENT)
{
XnChar strError[1024];
errors.ToString(strError, 1024);
printf("%s\n", strError);
return ;
}
else if (rc != XN_STATUS_OK)
{
printf("Open failed: %s\n", xnGetStatusString(rc));
return;
}
rc = g_context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_depth);
if (rc != XN_STATUS_OK)
{
printf("No depth node exists! Check your XML.");
return;
}
g_depth.GetMetaData(g_depthMD);
// Texture map init
g_nTexMapX = (((unsigned short)(g_depthMD.FullXRes()-1) / 512) + 1) * 512;
g_nTexMapY = (((unsigned short)(g_depthMD.FullYRes()-1) / 512) + 1) * 512;
g_pTexMap = (XnRGB24Pixel*)malloc(g_nTexMapX * g_nTexMapY * sizeof(XnRGB24Pixel));
std::cout << " w:" << g_depthMD.FullXRes() << " h:" << g_depthMD.FullYRes() << std::endl;
pixels = (unsigned char*)malloc(640*480*3*sizeof(unsigned char));
tex.allocate(640, 480, GL_RGB);
}
示例5: main
int main()
{
XnStatus nRetVal = XN_STATUS_OK;
Context context;
ScriptNode scriptNode;
EnumerationErrors errors;
const char *fn = NULL;
if (fileExists(SAMPLE_XML_PATH)) fn = SAMPLE_XML_PATH;
else if (fileExists(SAMPLE_XML_PATH_LOCAL)) fn = SAMPLE_XML_PATH_LOCAL;
else {
printf("Could not find '%s' nor '%s'. Aborting.\n" , SAMPLE_XML_PATH, SAMPLE_XML_PATH_LOCAL);
return XN_STATUS_ERROR;
}
printf("Reading config from: '%s'\n", fn);
nRetVal = context.InitFromXmlFile(fn, scriptNode, &errors);
if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
{
XnChar strError[1024];
errors.ToString(strError, 1024);
printf("%s\n", strError);
return (nRetVal);
}
else if (nRetVal != XN_STATUS_OK)
{
printf("Open failed: %s\n", xnGetStatusString(nRetVal));
return (nRetVal);
}
DepthGenerator depth;
nRetVal = context.FindExistingNode(XN_NODE_TYPE_DEPTH, depth);
CHECK_RC(nRetVal, "Find depth generator");
XnFPSData xnFPS;
nRetVal = xnFPSInit(&xnFPS, 180);
CHECK_RC(nRetVal, "FPS Init");
DepthMetaData depthMD;
while (!xnOSWasKeyboardHit())
{
nRetVal = context.WaitOneUpdateAll(depth);
if (nRetVal != XN_STATUS_OK)
{
printf("UpdateData failed: %s\n", xnGetStatusString(nRetVal));
continue;
}
xnFPSMarkFrame(&xnFPS);
depth.GetMetaData(depthMD);
printf("Frame %d Middle point is: %u. FPS: %f\n", depthMD.FrameID(), depthMD(depthMD.XRes() / 2, depthMD.YRes() / 2), xnFPSCalc(&xnFPS));
}
depth.Release();
scriptNode.Release();
context.Release();
return 0;
}
示例6: main
int main(int argc, char **argv)
{
XnBool bChooseDevice = false;
const char* csRecordingName = NULL;
if (argc > 1)
{
if (strcmp(argv[1], "-devices") == 0)
{
bChooseDevice = TRUE;
}
else
{
csRecordingName = argv[1];
}
}
if (csRecordingName != NULL)
{
// check if running from a different directory. If so, we need to change directory
// to the real one, so that path to INI file will be OK (for log initialization, for example)
if (0 != changeDirectory(argv[0]))
{
return(ERR_DEVICE);
}
}
// Xiron Init
XnStatus rc = XN_STATUS_OK;
EnumerationErrors errors;
if (csRecordingName != NULL)
{
xnLogInitFromXmlFile(SAMPLE_XML_PATH);
rc = openDeviceFile(argv[1]);
}
else if (bChooseDevice)
{
rc = openDeviceFromXmlWithChoice(SAMPLE_XML_PATH, errors);
}
else
{
rc = openDeviceFromXml(SAMPLE_XML_PATH, errors);
}
if (rc == XN_STATUS_NO_NODE_PRESENT)
{
XnChar strError[1024];
errors.ToString(strError, 1024);
printf("%s\n", strError);
closeSample(ERR_DEVICE);
return (rc);
}
else if (rc != XN_STATUS_OK)
{
printf("Open failed: %s\n", xnGetStatusString(rc));
closeSample(ERR_DEVICE);
}
audioInit();
captureInit();
statisticsInit();
reshaper.zNear = 1;
reshaper.zFar = 100;
glut_add_interactor(&reshaper);
cb.mouse_function = MouseCallback;
cb.motion_function = MotionCallback;
cb.passive_motion_function = MotionCallback;
cb.keyboard_function = KeyboardCallback;
cb.reshape_function = ReshapeCallback;
glut_add_interactor(&cb);
glutInit(&argc, argv);
glutInitDisplayString("stencil double rgb");
glutInitWindowSize(WIN_SIZE_X, WIN_SIZE_Y);
glutCreateWindow("OpenNI Viewer");
glutFullScreen();
glutSetCursor(GLUT_CURSOR_NONE);
init_opengl();
glut_helpers_initialize();
camera.configure_buttons(0);
camera.set_camera_mode(true);
camera.set_parent_rotation( & camera.trackball.r);
camera.enable();
object.configure_buttons(1);
object.translator.t[2] = -1;
object.translator.scale *= .1f;
object.trackball.r = rotationf(vec3f(2.0,0.01,0.01), to_radians(180));
object.set_parent_rotation( & camera.trackball.r);
object.disable();
light.configure_buttons(0);
light.translator.t = vec3f(.5, .5, -1);
light.set_parent_rotation( & camera.trackball.r);
//.........这里部分代码省略.........
示例7: Init
XnStatus TYKinect :: Init(bool depthNode, bool imageNode){ //Kinect Routines (Copied From OpenNI Sameple : Simple Viewer)
cout << "Connecting to kinect..." << endl;
status = context.Init();
CHECK_RC(status, "context init");
if(status != XN_STATUS_OK) return status;
/* >>>>> Acquisition from device*/
/* Depth Node Initialization */
if(depthNode){
status = depth.Create(context);
CHECK_RC(status, "create depth generator");
if(status == XN_STATUS_OK){
status = depth.SetMapOutputMode(outputMode); //640, 480, 30fps
CHECK_RC(status, "set output mode");
if(status == XN_STATUS_OK) hasDepthNode = true;
}
}
/* Depth Node Initialization */
if(imageNode){
status = image.Create(context);
CHECK_RC(status, "create image generator");
if(status == XN_STATUS_OK){
status = image.SetMapOutputMode(outputMode); //640, 480, 30fps
CHECK_RC(status, "set output mode");
if(status == XN_STATUS_OK) hasImageNode = true;
}
}
/* >>>>> Acquisition from file*/
if(!(hasDepthNode||hasImageNode)){
EnumerationErrors errors;
printf("Trying to create node from oni file\n");
//status = xnContextOpenFileRecording( (XnContext*) &context, oniFile.c_str());
status = context.OpenFileRecording(oniFile.c_str());
CHECK_RC(status,"Open input file");
if (status == XN_STATUS_NO_NODE_PRESENT)
{
XnChar strError[1024];
errors.ToString(strError, 1024);
printf("%s\n", strError);
return status;
}
else if (status != XN_STATUS_OK)
{
printf("Open failed: %s\n", xnGetStatusString(status));
return status;
}
status = context.FindExistingNode(XN_NODE_TYPE_DEPTH, depth);
CHECK_RC(status, "Find depth generator");
if(status == XN_STATUS_OK) hasDepthNode = true;
status = context.FindExistingNode(XN_NODE_TYPE_IMAGE, image);
CHECK_RC(status, "Find image generator");
if(status == XN_STATUS_OK) hasImageNode = true;
if(status != XN_STATUS_OK)
{
cout << "Initial Failed!! " << endl;
return status;
}
}
if(isSyncTwoView && hasImageNode && hasDepthNode) depth.GetAlternativeViewPointCap().SetViewPoint(image);
if(isMirroring) context.SetGlobalMirror(!context.GetGlobalMirror());
status = context.StartGeneratingAll();
CHECK_RC(status, "context start generating all");
return status;
}
示例8: main
int main(int argc, char* argv[])
{
XnStatus rc;
EnumerationErrors errors;
rc = g_context.InitFromXmlFile(SAMPLE_XML_PATH, &errors);
if (rc == XN_STATUS_NO_NODE_PRESENT)
{
XnChar strError[1024];
errors.ToString(strError, 1024);
printf("%s\n", strError);
return (rc);
}
else if (rc != XN_STATUS_OK)
{
printf("Open failed: %s\n", xnGetStatusString(rc));
return (rc);
}
rc = g_context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_depth);
rc = g_context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_image);
g_depth.GetMetaData(g_depthMD);
g_image.GetMetaData(g_imageMD);
// Hybrid mode isn't supported in this sample
if (g_imageMD.FullXRes() != g_depthMD.FullXRes() || g_imageMD.FullYRes() != g_depthMD.FullYRes())
{
printf ("The device depth and image resolution must be equal!\n");
return 1;
}
// RGB is the only image format supported.
if (g_imageMD.PixelFormat() != XN_PIXEL_FORMAT_RGB24)
{
printf("The device image format must be RGB24\n");
return 1;
}
// Texture map init
g_nTexMapX = (((unsigned short)(g_depthMD.FullXRes()-1) / 512) + 1) * 512;
g_nTexMapY = (((unsigned short)(g_depthMD.FullYRes()-1) / 512) + 1) * 512;
g_pTexMap = (XnRGB24Pixel*)malloc(g_nTexMapX * g_nTexMapY * sizeof(XnRGB24Pixel));
// OpenGL init
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGB | GLUT_DOUBLE | GLUT_DEPTH);
glutInitWindowSize(GL_WIN_SIZE_X, GL_WIN_SIZE_Y);
glutCreateWindow ("OpenNI Simple Viewer");
glutFullScreen();
glutSetCursor(GLUT_CURSOR_NONE);
glutKeyboardFunc(glutKeyboard);
glutDisplayFunc(glutDisplay);
glutIdleFunc(glutIdle);
glDisable(GL_DEPTH_TEST);
glEnable(GL_TEXTURE_2D);
// Per frame code is in glutDisplay
glutMainLoop();
return 0;
}
示例9: start
int OpenniWrapper::start()
{
XnStatus rc;
EnumerationErrors errors;
hasRGB = 0;
hasDepth = 0;
rc = g_context.InitFromXmlFile(SAMPLE_XML_PATH, g_scriptNode, &errors);
if (rc == XN_STATUS_NO_NODE_PRESENT)
{
XnChar strError[1024];
errors.ToString(strError, 1024);
sprintf(buf, "%s\n", strError);
__android_log_print(ANDROID_LOG_DEBUG, "OPENNI", buf);
return (rc);
}
else if (rc != XN_STATUS_OK)
{
sprintf(buf, "Open failed: %s\n", xnGetStatusString(rc));
__android_log_print(ANDROID_LOG_DEBUG, "OPENNI", buf);
return (rc);
}
rc = g_context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_depth);
hasDepth = 1;
if (rc != XN_STATUS_OK)
{
sprintf(buf, "No depth node exists! Check your XML.");
__android_log_print(ANDROID_LOG_DEBUG, "OPENNI", buf);
hasDepth = 0;
//return 1;
}
rc = g_context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_image);
hasRGB=1;
if (rc != XN_STATUS_OK)
{
sprintf(buf, "No image node exists! Check your XML.");
__android_log_print(ANDROID_LOG_DEBUG, "OPENNI", buf);
hasRGB=0;
//return 1;
}
// rc = g_depth.SetIntProperty ("OutputFormat", 0);
// if (rc != XN_STATUS_OK)
// {
// sprintf(buf, "Cannot set depth generator property");
// __android_log_print(ANDROID_LOG_DEBUG, "OPENNI", buf);
// return 1;
// }
//for ASUS XTION ONLY
//see: http://dev.pointclouds.org/projects/pcl/wiki/MacOSX
rc = g_depth.SetIntProperty ("RegistrationType", 1);
if (rc != XN_STATUS_OK)
{
sprintf(buf, "Cannot set depth generator property: RegistrationType");
__android_log_print(ANDROID_LOG_DEBUG, "OPENNI", buf);
return 1;
}
//obviously Kinect doesn't support anything else!?
// XnMapOutputMode outputMode;
//
// outputMode.nXRes = 640;
// outputMode.nYRes = 480;
// outputMode.nFPS = 30;
////
// rc = g_depth.SetMapOutputMode(outputMode);
// if (rc != XN_STATUS_OK)
// {
// sprintf(buf, "Cannot set depth generator property");
// __android_log_print(ANDROID_LOG_DEBUG, "OPENNI", buf);
// return 1;
// }
////
// rc = g_image.SetMapOutputMode(outputMode);
// if (rc != XN_STATUS_OK)
// {
// sprintf(buf, "Cannot set image generator property");
// __android_log_print(ANDROID_LOG_DEBUG, "OPENNI", buf);
// return 1;
// }
// TODO: check error code
if(hasDepth)
g_depth.GetMetaData(g_depthMD);
if(hasRGB)
g_image.GetMetaData(g_imageMD);
// Hybrid mode isn't supported in this sample
// if (g_imageMD.FullXRes() != g_depthMD.FullXRes() || g_imageMD.FullYRes() != g_depthMD.FullYRes())
// {
// sprintf (buf, "The device depth and image resolution must be equal!\n");
// __android_log_print(ANDROID_LOG_DEBUG, "OPENNI", buf);
// return 1;
// }
//show some info of the device...
// sprintf(buf, "Image Resolution: %d x %d", g_imageMD.FullXRes(), g_imageMD.FullYRes());
// __android_log_print(ANDROID_LOG_DEBUG, "OPENNI", buf);
//.........这里部分代码省略.........
示例10: main
int main(int argc, char* argv[])
{
XnStatus nRetVal = XN_STATUS_OK;
Context context;
EnumerationErrors errors;
Mode mode;
// default mode
#if XN_PLATFORM == XN_PLATFORM_WIN32
mode = MODE_PLAY;
#else
mode = MODE_RECORD;
#endif
// check if mode was provided by user
if (argc > 1)
{
if (strcmp(argv[1], "play") == 0)
{
mode = MODE_PLAY;
}
else if (strcmp(argv[1], "record") == 0)
{
mode = MODE_RECORD;
}
else
{
printUsage(argv[0]);
return -1;
}
}
// make sure mode is valid
#if XN_PLATFORM != XN_PLATFORM_WIN32
if (mode == MODE_PLAY)
{
printf("Playing is not supported on this platform!\n");
return -1;
}
#endif
ScriptNode scriptNode;
nRetVal = context.InitFromXmlFile(SAMPLE_XML_PATH, scriptNode);
if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
{
XnChar strError[1024];
errors.ToString(strError, 1024);
printf("%s\n", strError);
return (nRetVal);
}
else if (nRetVal != XN_STATUS_OK)
{
printf("Open failed: %s\n", xnGetStatusString(nRetVal));
return (nRetVal);
}
// find audio nodes
AudioGenerator gens[nSupportedNodes];
XnUInt32 nNodes = 0;
NodeInfoList list;
nRetVal = context.EnumerateExistingNodes(list, XN_NODE_TYPE_AUDIO);
CHECK_RC(nRetVal, "Enumerate audio nodes");
for (NodeInfoList::Iterator it = list.Begin(); it != list.End(); ++it)
{
NodeInfo info = *it;
nRetVal = info.GetInstance(gens[nNodes]);
CHECK_RC(nRetVal, "Get audio node");
nNodes++;
}
if (nNodes == 0)
{
printf("No audio node was found!\n");
return -1;
}
if (mode == MODE_PLAY)
{
nRetVal = play(context, gens, nNodes);
}
else if (mode == MODE_RECORD)
{
nRetVal = record(context, gens, nNodes);
}
scriptNode.Release();
for (int i = 0; i < nSupportedNodes; ++i)
gens[i].Release();
context.Release();
return nRetVal;
}
示例11: main
int main(int argc, char* argv[])
{
EnumerationErrors errors;
//rc = context.Init();
rc = context.InitFromXmlFile(strPathToXML,&errors);
if (rc == XN_STATUS_NO_NODE_PRESENT)
{
XnChar strError[1024];
errors.ToString(strError, 1024);
printf("%s\n", strError);
return (rc);
}
else if (rc != XN_STATUS_OK)
{
printf("Open failed: %s\n", xnGetStatusString(rc));
return (rc);
}
/* UNCOMMENT TO GET FILE READING
//rc = context.OpenFileRecording(strInputFile);
//CHECK_RC(rc, "Open input file");
//rc = context.FindExistingNode(XN_NODE_TYPE_PLAYER, player);
//CHECK_RC(rc, "Get player node"); */
rc = context.FindExistingNode(XN_NODE_TYPE_DEPTH, depth);
CHECK_RC(rc, "Find depth generator");
rc = context.FindExistingNode(XN_NODE_TYPE_IMAGE, image);
CHECK_RC(rc, "Find image generator");
depth.GetMetaData(depthMD);
image.GetMetaData(imageMD);
//rc = player.SetRepeat(FALSE);
XN_IS_STATUS_OK(rc);
//rc = player.GetNumFrames(image.GetName(), nNumFrames);
//CHECK_RC(rc, "Get player number of frames");
//printf("%d\n",nNumFrames);
//rc = player.GetNumFrames(depth.GetName(), nNumFrames);
//CHECK_RC(rc, "Get player number of frames");
//printf("%d\n",nNumFrames);
// Hybrid mode isn't supported
if (imageMD.FullXRes() != depthMD.FullXRes() || imageMD.FullYRes() != depthMD.FullYRes())
{
printf ("The device depth and image resolution must be equal!\n");
return 1;
}
// RGB is the only image format supported.
if (imageMD.PixelFormat() != XN_PIXEL_FORMAT_RGB24)
{
printf("The device image format must be RGB24\n");
return 1;
}
avi = cvCreateVideoWriter(strOutputFile, 0, 30, cvSize(640,480), TRUE);
depthMetersMat = cvCreateMat(480, 640, CV_16UC1);
kinectDepthImage = cvCreateImage( cvSize(640,480),16,1 );
depthMetersMat2 = cvCreateMat(480, 640, CV_16UC1);
kinectDepthImage2 = cvCreateImage( cvSize(640,480),16,1 );
colorArr[0] = cv::Mat(imageMD.YRes(),imageMD.XRes(),CV_8U);
colorArr[1] = cv::Mat(imageMD.YRes(),imageMD.XRes(),CV_8U);
colorArr[2] = cv::Mat(imageMD.YRes(),imageMD.XRes(),CV_8U);
//prepare_for_face_detection();
int b;
int g;
int r;
while ((rc = image.WaitAndUpdateData()) != XN_STATUS_EOF && (rc = depth.WaitAndUpdateData()) != XN_STATUS_EOF) {
if (rc != XN_STATUS_OK) {
printf("Read failed: %s\n", xnGetStatusString(rc));
break;
}
depth.GetMetaData(depthMD);
image.GetMetaData(imageMD);
//XnUInt32 a;
//a = g_imageMD.FPS;
printf("%d\n",imageMD.FrameID());
//a = g_depthMD.DataSize();
//printf("%d\n",a);
pDepth = depthMD.Data();
pImageRow = imageMD.RGB24Data();
for (unsigned int y=0; y<imageMD.YRes(); y++) {
pPixel = pImageRow;
uchar* Bptr = colorArr[0].ptr<uchar>(y);
//.........这里部分代码省略.........
示例12: main
//////////////////// Entry point ////////////////////
int main(int argc, char* argv[])
{
depthmask_for_mesh = cvCreateImage(MESH_SIZE, IPL_DEPTH_8U, 1);
markerSize.width = -1;
markerSize.height = -1;
//init OpenNI
EnumerationErrors errors;
switch (XnStatus rc = niContext.InitFromXmlFile(KINECT_CONFIG_FILENAME, &errors)) {
case XN_STATUS_OK:
break;
case XN_STATUS_NO_NODE_PRESENT:
XnChar strError[1024]; errors.ToString(strError, 1024);
printf("%s\n", strError);
return rc; break;
default:
printf("Open failed: %s\n", xnGetStatusString(rc));
return rc;
}
//set camera parameter
capture = new Camera(0, CAPTURE_SIZE, CAMERA_PARAMS_FILENAME);
RegistrationParams = scaleParams(capture->getParameters(), double(REGISTRATION_SIZE.width)/double(CAPTURE_SIZE.width));
//init parameter for rendering
osg_init(calcProjection(RegistrationParams, capture->getDistortion(), REGISTRATION_SIZE));
//for Kinect view
loadKinectParams(KINECT_PARAMS_FILENAME, &kinectParams, &kinectDistort);
kinectDistort =0;
kinectParams->data.db[2]=320.0;
kinectParams->data.db[5]=240.0;
//setting kinect context
niContext.FindExistingNode(XN_NODE_TYPE_DEPTH, g_depth);
niContext.FindExistingNode(XN_NODE_TYPE_IMAGE, g_image);
g_depth.GetMirrorCap().SetMirror(false);
g_depth.GetAlternativeViewPointCap().SetViewPoint(g_image);
//registration
kinectReg = new RegistrationOPIRA(new OCVSurf());
kinectReg->addResizedMarker(MARKER_FILENAME, 400);
//physics
m_world = new bt_ARMM_world();
ground_grid = new float[GRID_SIZE];
for (int i =0;i < GRID_SIZE; i++) {
ground_grid[i] = 0;
}
#ifdef SIM_PARTICLES
voxel_grid = new float[1200];
for (int i =0;i < 1200; i++) {
voxel_grid[i] = 0;
}
#endif
//controls
KeyboardController *kc = new KeyboardController(m_world);
XboxController *xc = new XboxController(m_world);
loadKinectTransform(KINECT_TRANSFORM_FILENAME);
#ifdef USE_ARMM_VRPN
//----->Server part
m_Connection = new vrpn_Connection_IP();
ARMM_server = new ARMM_Communicator(m_Connection );
//Open the imager server and set up channel zero to send our data.
//if ( (ARMM_img_server = new vrpn_Imager_Server("ARMM_Image", m_Connection, MESH_SIZE.width, MESH_SIZE.height)) == NULL) {
// fprintf(stderr, "Could not open imager server\n");
// return -1;
//}
//if ( (channel_id = ARMM_img_server->add_channel("Grid")) == -1) {
// fprintf(stderr, "Could not add channel\n");
// return -1;
//}
ARMM_server->SetObjectsData(&(m_world->Objects_Body));
ARMM_server->SetHandsData(&(m_world->HandObjectsArray));
cout << "Created VRPN server." << endl;
//<-----
#ifdef USE_ARMM_VRPN_RECEIVER //----->Receiver part
ARMM_sever_receiver = new vrpn_Tracker_Remote (ARMM_CLIENT_IP);
ARMM_sever_receiver->register_change_handler(NULL, handle_object);
#endif //<-----
#endif
#ifdef USE_SKIN_SEGMENTATION //Skin color look up
_HandRegion.LoadSkinColorProbTable();
#endif
#ifdef USE_OPTICAL_FLOW
prev_gray = cvCreateImage(cvSize(OPFLOW_SIZE.width, OPFLOW_SIZE.height), IPL_DEPTH_8U, 1);
curr_gray = cvCreateImage(cvSize(OPFLOW_SIZE.width, OPFLOW_SIZE.height), IPL_DEPTH_8U, 1);
flow_capture = new FlowCapture();
flow_capture->Init();
#endif
//.........这里部分代码省略.........
示例13: xnInit
//----------------------------------------------------
// OpenNI関連の初期化
//----------------------------------------------------
void xnInit(void){
XnStatus rc;
EnumerationErrors errors;
rc = g_context.InitFromXmlFile(SAMPLE_XML_PATH, &errors);
if (rc == XN_STATUS_NO_NODE_PRESENT){
XnChar strError[1024];
errors.ToString(strError, 1024);
printf("%s\n", strError);
exit(1);
}else if (rc != XN_STATUS_OK){
printf("Open failed: %s\n", xnGetStatusString(rc));
exit(1);
}
//playerInit();
rc = xnFPSInit(&g_xnFPS, 180); // FPSの初期化
//CHECK_RC(rc, "FPS Init");
// デプス・イメージ・ユーザジェネレータの作成
rc = g_context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_depth);
errorCheck(rc, "g_depth"); // エラーチェック
rc = g_context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_image);
errorCheck(rc, "g_image");
rc = g_context.FindExistingNode(XN_NODE_TYPE_USER, g_user);
//rc = g_user.Create(g_context);
errorCheck(rc, "g_user");
// ユーザー検出機能をサポートしているか確認
if (!g_user.IsCapabilitySupported(XN_CAPABILITY_SKELETON)) {
//throw std::runtime_error("ユーザー検出をサポートしてません");
cout << "ユーザー検出をサポートしてません" << endl;
exit(1);
}
// レコーダーの設定
//rc = setRecorder(g_recorder, rc);
// ユーザコールバックの登録
XnCallbackHandle userCallbacks;
g_user.RegisterUserCallbacks(UserDetected, UserLost, NULL, userCallbacks);
// デプス・イメージ・ユーザデータの取得
g_depth.GetMetaData(g_depthMD);
g_image.GetMetaData(g_imageMD);
g_user.GetUserPixels(0, g_sceneMD);
// Hybrid mode isn't supported in this sample
// イメージとデプスの大きさが違うとエラー
if (g_imageMD.FullXRes() != g_depthMD.FullXRes() || g_imageMD.FullYRes() != g_depthMD.FullYRes()){
printf ("The device depth and image resolution must be equal!\n");
exit(1);
}
// RGB is the only image format supported.
// フォーマットの確認
if (g_imageMD.PixelFormat() != XN_PIXEL_FORMAT_RGB24){
printf("The device image format must be RGB24\n");
exit(1);
}
// Texture map init
// フルスクリーン画面の大きさ調整
g_nTexMapX = (((unsigned short)(g_depthMD.FullXRes() - 1) / 512) + 1) * 512; // 大きさによって512の倍数に調整(1024)
g_nTexMapY = (((unsigned short)(g_depthMD.FullYRes() - 1) / 512) + 1) * 512; // 512
g_pTexMap = (XnRGB24Pixel*)malloc(g_nTexMapX * g_nTexMapY * sizeof(XnRGB24Pixel)); // スクリーンの大きさ分の色情報の容量を確保
// 座標ポインタの初期化
g_pPoint = (XnPoint3D*)malloc(KINECT_IMAGE_SIZE * sizeof(XnPoint3D)); // 座標を入れるポインタを作成
g_pBackTex = (XnRGB24Pixel*)malloc(KINECT_IMAGE_SIZE * sizeof(XnRGB24Pixel)); // 背景画像を入れるポインタを作成
g_pBackPoint = (XnPoint3D*)malloc(KINECT_IMAGE_SIZE * sizeof(XnPoint3D)); // 背景座標を入れるポインタを作成
g_pBackDepth = (XnDepthPixel*)malloc(KINECT_IMAGE_SIZE * sizeof(XnDepthPixel)); // 背景座標を入れるポインタを作成
}
示例14: if
const Int32 NInput::init(void)
{
XnStatus xn_rs = XN_STATUS_OK;
EnumerationErrors errors;
// hardware initializing
xn_rs = context.Init();
if (xn_rs == XN_STATUS_NO_NODE_PRESENT)
{
XnChar strError[1024];
errors.ToString(strError, 1024);
lasterr_str = reinterpret_cast<TCHAR*>(strError);
return (xn_rs);
}
else if (xn_rs != XN_STATUS_OK)
{
lasterr_str = Str(_T("Open failed: "))
+ Str(reinterpret_cast<const TCHAR*>(xnGetStatusString(xn_rs)));
return (xn_rs);
}
// mumble
XnLicense license;
xnOSStrCopy(license.strVendor, "PrimeSense", XN_MAX_NAME_LENGTH);
xnOSStrCopy(license.strKey, "0KOIk2JeIBYClPWVnMoRKn5cdY4=",
XN_MAX_LICENSE_LENGTH);
CHECK_RC(context.AddLicense(license), "Added license");
#ifdef MOTOR
// motor initializing
#define VID_MICROSOFT 0x45e
#define PID_NUI_MOTOR 0x02b0
// CHECK_RC(xnUSBInit(), "USB init");
XN_USB_DEV_HANDLE dev;
CHECK_RC(xnUSBOpenDevice(VID_MICROSOFT, PID_NUI_MOTOR, NULL, NULL, &dev),
"USB Open");
uChar empty[0x1];
int angle = 1;
CHECK_RC(xnUSBSendControl(dev,
XN_USB_CONTROL_TYPE_VENDOR,
0x31, (XnUInt16)angle,
0x0, empty, 0x0, 0), "USB sendcontrol");
CHECK_RC(xnUSBCloseDevice(dev), "USB Close");
#endif
// base initializing
CHECK_RC(gen_depth.Create(context), "Find depth generator");
CHECK_RC(gen_image.Create(context), "Find image generator");
// recognition initializing
CHECK_RC(gen_user.Create(context), "Find user generator");
CHECK_RC(gen_gesture.Create(context), "Find gesture generator");
CHECK_RC(gen_hands.Create(context), "Find hand generator");
// user initializing
XnCallbackHandle usr_cbh;
gen_user.RegisterUserCallbacks(cb_usr_found, cb_usr_lost, this, usr_cbh);
// gestures initializing
XnCallbackHandle gesture_cb;
gen_gesture.RegisterGestureCallbacks(cb_gst_recognized, cb_gst_beginning,
this, gesture_cb);
// hands initializing
XnCallbackHandle hand_cb;
gen_hands.RegisterHandCallbacks(cb_hnd_begin, cb_hnd_continue,
cb_hnd_finish, this, hand_cb);
xn_rs = context.StartGeneratingAll();
CHECK_RC(xn_rs, "Start generating all");
// The first getting of metadatas
gen_depth.GetMetaData(depthMD);
gen_image.GetMetaData(imageMD);
gen_user.GetUserPixels(0, sceneMD);
// gesture
add_ordinal_gestures();
CHECK_RC(xn_rs, "Start generating gesture");
// Hybrid mode isn't supported in this sample
if (imageMD.FullXRes() != depthMD.FullXRes()
|| imageMD.FullYRes() != depthMD.FullYRes())
{
lasterr_str =
_T("The device depth and image resolution must be equal!\n");
return 1;
}
// RGB is the only image format supported.
if (imageMD.PixelFormat() != XN_PIXEL_FORMAT_RGB24)
{
lasterr_str = _T("The device image format must be RGB24\n");
return 1;
}
// Calibrate rgb and depth
gen_depth.GetAlternativeViewPointCap().SetViewPoint(gen_image);
//.........这里部分代码省略.........
示例15: main
int main(int argc, char* argv[])
{
XnBool bChooseDevice = false;
const char* csRecordingName = NULL;
if (argc > 1)
{
if (strcmp(argv[1], "-devices") == 0)
{
bChooseDevice = TRUE;
}
else
{
csRecordingName = argv[1];
}
}
if (csRecordingName != NULL)
{
// check if running from a different directory. If so, we need to change directory
// to the real one, so that path to INI file will be OK (for log initialization, for example)
if (0 != changeDirectory(argv[0]))
{
return(ERR_DEVICE);
}
}
// Xiron Init
XnStatus rc = XN_STATUS_OK;
EnumerationErrors errors;
if (csRecordingName != NULL)
{
xnLogInitFromXmlFile(SAMPLE_XML_PATH);
rc = openDeviceFile(argv[1]);
}
else if (bChooseDevice)
{
rc = openDeviceFromXmlWithChoice(SAMPLE_XML_PATH, errors);
}
else
{
rc = openDeviceFromXml(SAMPLE_XML_PATH, errors);
}
if (rc == XN_STATUS_NO_NODE_PRESENT)
{
XnChar strError[1024];
errors.ToString(strError, 1024);
printf("%s\n", strError);
closeSample(ERR_DEVICE);
return (rc);
}
else if (rc != XN_STATUS_OK)
{
printf("Open failed: %s\n", xnGetStatusString(rc));
closeSample(ERR_DEVICE);
}
captureInit();
statisticsInit();
//reshaper.zNear = 1;
//reshaper.zFar = 100;
//glut_add_interactor(&reshaper);
//cb.mouse_function = MouseCallback;
//cb.motion_function = MotionCallback;
//cb.passive_motion_function = MotionCallback;
//cb.keyboard_function = KeyboardCallback;
//cb.reshape_function = ReshapeCallback;
//glut_add_interactor(&cb);
glutInit(&argc, argv);
glutInitDisplayString("stencil double rgb");
glutInitWindowSize(WIN_SIZE_X, WIN_SIZE_Y);
glutCreateWindow("OpenNI Viewer");
//glutFullScreen();
glutSetCursor(GLUT_CURSOR_NONE);
glutMouseFunc(MouseCallback);
glutMotionFunc(MotionCallback);
init_opengl();
glutIdleFunc(IdleCallback);
glutDisplayFunc(drawFunctionMain);
glutPassiveMotionFunc(MotionCallback);
//createKeyboardMap();
//createMenu();
atexit(onExit);
//Use built in hand tracker class to handle all hand movements and gestures
HandTracker mainHandTracker(g_Context);
m_HandTracker = &mainHandTracker;
drawInit(m_HandTracker);
//.........这里部分代码省略.........