本文整理汇总了C++中NodeInfoList::Begin方法的典型用法代码示例。如果您正苦于以下问题:C++ NodeInfoList::Begin方法的具体用法?C++ NodeInfoList::Begin怎么用?C++ NodeInfoList::Begin使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类NodeInfoList
的用法示例。
在下文中一共展示了NodeInfoList::Begin方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: ICLException
// Creates a DepthGenerator from Context
OpenNIDepthGenerator::OpenNIDepthGenerator(int num)
: m_DepthGenerator(NULL), m_Options(NULL)
{
XnStatus status = XN_STATUS_ERROR;
m_DepthGenerator = new DepthGenerator();
NodeInfoList l;
// enumerate depth generators
OpenNIContext::EnumerateProductionTrees(XN_NODE_TYPE_DEPTH, NULL , l);
int i = 0;
// look for generator according to number num
for (NodeInfoList::Iterator it = l.Begin(); it != l.End(); ++it, ++i){
if (i == num){
NodeInfo ni = *it;
status = OpenNIContext::CreateProductionTree(ni, *m_DepthGenerator);
}
}
if(i <= num){ // not enough generators
std::ostringstream s;
s << "Demanded depth generator nr " << num
<< " but only " << i << " available.";
DEBUG_LOG(s.str());
throw ICLException(s.str());
}
if (status != XN_STATUS_OK){ // error while creating depth generator
std::ostringstream s;
s << "Generator init error " << xnGetStatusString(status);
DEBUG_LOG(s.str());
throw ICLException(s.str());
}
// create GeneratorOptions for DepthGenerator
m_Options = new DepthGeneratorOptions(m_DepthGenerator);
m_DepthGenerator -> StartGenerating();
}
示例2: setup
//----------------------------------------
bool ofxOpenNICapture::setup(ofxOpenNI & _context, string filename, XnCodecID depthFormat, XnCodecID imageFormat, XnCodecID irFormat, XnCodecID audioFormat){
context = &_context;
csFileName = ofToDataPath(filename);
nodes[CAPTURE_DEPTH_NODE].captureFormat = depthFormat;
nodes[CAPTURE_IMAGE_NODE].captureFormat = imageFormat;
nodes[CAPTURE_IR_NODE].captureFormat = irFormat;
nodes[CAPTURE_AUDIO_NODE].captureFormat = audioFormat;
XnStatus nRetVal = XN_STATUS_OK;
NodeInfoList recordersList;
nRetVal = context->getXnContext().EnumerateProductionTrees(XN_NODE_TYPE_RECORDER, NULL, recordersList);
START_CAPTURE_CHECK_RC(nRetVal, "Enumerate recorders");
// take first
NodeInfo chosen = *recordersList.Begin();
pRecorder = new Recorder;
nRetVal = context->getXnContext().CreateProductionTree(chosen, *pRecorder);
START_CAPTURE_CHECK_RC(nRetVal, "Create recorder");
nRetVal = pRecorder->SetDestination(XN_RECORD_MEDIUM_FILE, csFileName.c_str());
START_CAPTURE_CHECK_RC(nRetVal, "Set output file");
return true;
}
示例3: OpenCommon
void SimKinect::OpenCommon()
{
XnStatus nRetVal = XN_STATUS_OK;
bIsDepthOn = false;
bIsImageOn = false;
bIsIROn = false;
bIsAudioOn = false;
bIsPlayerOn = false;
bIsUserOn = false;
NodeInfoList list;
nRetVal = context.EnumerateExistingNodes(list);
if (nRetVal == XN_STATUS_OK)
{
for (NodeInfoList::Iterator it = list.Begin(); it != list.End(); ++it)
{
switch ((*it).GetDescription().Type)
{
case XN_NODE_TYPE_DEVICE:
(*it).GetInstance(device);
break;
case XN_NODE_TYPE_DEPTH:
bIsDepthOn = true;
(*it).GetInstance(depth_generator);
break;
case XN_NODE_TYPE_IMAGE:
bIsImageOn = true;
(*it).GetInstance(color_generator);
break;
case XN_NODE_TYPE_IR:
bIsIROn = true;
(*it).GetInstance(ir_generator);
break;
case XN_NODE_TYPE_AUDIO:
bIsAudioOn = true;
(*it).GetInstance(audio_generator);
break;
case XN_NODE_TYPE_PLAYER:
bIsPlayerOn = true;
(*it).GetInstance(player);
break;
case XN_NODE_TYPE_USER :
bIsUserOn = true;
(*it).GetInstance(user_generator);
break;
}
}
}
XnCallbackHandle hDummy;
context.RegisterToErrorStateChange(onErrorStateChanged, NULL, hDummy);
}
示例4: captureOpenWriteDevice
bool captureOpenWriteDevice()
{
XnStatus nRetVal = XN_STATUS_OK;
NodeInfoList recordersList;
nRetVal = g_Context.EnumerateProductionTrees(XN_NODE_TYPE_RECORDER, NULL, recordersList);
START_CAPTURE_CHECK_RC(nRetVal, "Enumerate recorders");
// take first
NodeInfo chosen = *recordersList.Begin();
nRetVal = g_Context.CreateProductionTree(chosen);
START_CAPTURE_CHECK_RC(nRetVal, "Create recorder");
g_Capture.pRecorder = new Recorder;
nRetVal = chosen.GetInstance(*g_Capture.pRecorder);
START_CAPTURE_CHECK_RC(nRetVal, "Get recorder instance");
nRetVal = g_Capture.pRecorder->SetDestination(XN_RECORD_MEDIUM_FILE, g_Capture.csFileName);
START_CAPTURE_CHECK_RC(nRetVal, "Set output file");
if (getDevice() != NULL)
{
nRetVal = g_Capture.pRecorder->AddNodeToRecording(*getDevice(), XN_CODEC_UNCOMPRESSED);
START_CAPTURE_CHECK_RC(nRetVal, "add device node");
}
if (isDepthOn() && (g_Capture.DepthFormat != CODEC_DONT_CAPTURE))
{
nRetVal = g_Capture.pRecorder->AddNodeToRecording(*getDepthGenerator(), g_Capture.DepthFormat);
START_CAPTURE_CHECK_RC(nRetVal, "add depth node");
}
if (isImageOn() && (g_Capture.ImageFormat != CODEC_DONT_CAPTURE))
{
nRetVal = g_Capture.pRecorder->AddNodeToRecording(*getImageGenerator(), g_Capture.ImageFormat);
START_CAPTURE_CHECK_RC(nRetVal, "add image node");
}
if (isIROn() && (g_Capture.IRFormat != CODEC_DONT_CAPTURE))
{
nRetVal = g_Capture.pRecorder->AddNodeToRecording(*getIRGenerator(), g_Capture.IRFormat);
START_CAPTURE_CHECK_RC(nRetVal, "add IR stream");
}
if (isAudioOn() && (g_Capture.AudioFormat != CODEC_DONT_CAPTURE))
{
nRetVal = g_Capture.pRecorder->AddNodeToRecording(*getAudioGenerator(), g_Capture.AudioFormat);
START_CAPTURE_CHECK_RC(nRetVal, "add Audio stream");
}
return true;
}
示例5: openCommon
void openCommon()
{
XnStatus nRetVal = XN_STATUS_OK;
g_bIsDepthOn = false;
g_bIsImageOn = false;
g_bIsIROn = false;
g_bIsAudioOn = false;
g_bIsPlayerOn = false;
NodeInfoList list;
nRetVal = g_Context.EnumerateExistingNodes(list);
if (nRetVal == XN_STATUS_OK)
{
for (NodeInfoList::Iterator it = list.Begin(); it != list.End(); ++it)
{
switch ((*it).GetDescription().Type)
{
case XN_NODE_TYPE_DEVICE:
(*it).GetInstance(g_Device);
break;
case XN_NODE_TYPE_DEPTH:
g_bIsDepthOn = true;
(*it).GetInstance(g_Depth);
break;
case XN_NODE_TYPE_IMAGE:
g_bIsImageOn = true;
(*it).GetInstance(g_Image);
break;
case XN_NODE_TYPE_IR:
g_bIsIROn = true;
(*it).GetInstance(g_IR);
break;
case XN_NODE_TYPE_AUDIO:
g_bIsAudioOn = true;
(*it).GetInstance(g_Audio);
break;
case XN_NODE_TYPE_PLAYER:
g_bIsPlayerOn = true;
(*it).GetInstance(g_Player);
}
}
}
XnCallbackHandle hDummy;
g_Context.RegisterToErrorStateChange(onErrorStateChanged, NULL, hDummy);
initConstants();
readFrame();
}
示例6: captureOpenWriteDevice
bool captureOpenWriteDevice()
{
XnStatus nRetVal = XN_STATUS_OK;
NodeInfoList recordersList;
nRetVal = g_Context.EnumerateProductionTrees(XN_NODE_TYPE_RECORDER, NULL, recordersList);
START_CAPTURE_CHECK_RC(nRetVal, "Enumerate recorders");
// take first
NodeInfo chosen = *recordersList.Begin();
g_Capture.pRecorder = new Recorder;
nRetVal = g_Context.CreateProductionTree(chosen, *g_Capture.pRecorder);
START_CAPTURE_CHECK_RC(nRetVal, "Create recorder");
nRetVal = g_Capture.pRecorder->SetDestination(XN_RECORD_MEDIUM_FILE, g_Capture.csFileName);
START_CAPTURE_CHECK_RC(nRetVal, "Set output file");
return true;
}
示例7: Connect
bool KinectCamera::Connect(int index)
{
m_index = index;
XnStatus rc;
EnumerationErrors errors;
rc = context.Init();
NodeInfoList list;
rc = context.EnumerateProductionTrees(XN_NODE_TYPE_DEVICE, NULL, list, &errors);
int i = 0;
for (NodeInfoList::Iterator it = list.Begin(); it != list.End(); ++it, ++i)
{
if(i == index)
{
printf("%d\n", i);
NodeInfo deviceNodeInfo = *it;
context.CreateProductionTree(deviceNodeInfo);
rc |= depth.Create(context);
rc |= image.Create(context);
XnMapOutputMode m;
m.nXRes = 640;
m.nYRes = 480;
image.SetMapOutputMode(m);
rc |= context.StartGeneratingAll();
break;
}
}
if(i != index)
{
return false;
}
printf("Success: %d\n", rc);
return true;
}
示例8: openCommon
/**
* Inicia los nodos de información provinientes de Kinect
*
*/
void openCommon(){
XnStatus nRetVal = XN_STATUS_OK;
NodeInfoList list;
nRetVal = g_Context.EnumerateExistingNodes(list);
if (nRetVal == XN_STATUS_OK){
for (NodeInfoList::Iterator it = list.Begin(); it != list.End(); ++it){
switch ((*it).GetDescription().Type){
case XN_NODE_TYPE_DEPTH:
(*it).GetInstance(g_DepthGenerator);
break;
case XN_NODE_TYPE_IMAGE:
(*it).GetInstance(g_ImageGenerator);
break;
case XN_NODE_TYPE_USER:
(*it).GetInstance(g_UserGenerator);
break;
case XN_NODE_TYPE_PLAYER:
(*it).GetInstance(g_Player);
break;
}
}
}
}//openCommon
示例9: UpdateFrameSync
XnStatus GeneratorWatcher::UpdateFrameSync()
{
// go over all nodes, and find the frame synced one
Context context;
m_generator.GetContext(context);
NodeInfoList nodes;
XnStatus nRetVal = context.EnumerateExistingNodes(nodes);
XN_IS_STATUS_OK(nRetVal);
for (NodeInfoList::Iterator it = nodes.Begin(); it != nodes.End(); ++it)
{
NodeInfo info = *it;
// make sure this is a generator
if (xnIsTypeDerivedFrom(info.GetDescription().Type, XN_NODE_TYPE_GENERATOR))
{
Generator otherGen;
nRetVal = info.GetInstance(otherGen);
XN_IS_STATUS_OK(nRetVal);
if (m_generator.GetFrameSyncCap().IsFrameSyncedWith(otherGen))
{
nRetVal = NotifyStringPropChanged(XN_PROP_FRAME_SYNCED_WITH, otherGen.GetName());
XN_IS_STATUS_OK(nRetVal);
return XN_STATUS_OK;
}
}
}
// if we got here, we're not frame synced
nRetVal = NotifyStringPropChanged(XN_PROP_FRAME_SYNCED_WITH, "");
XN_IS_STATUS_OK(nRetVal);
return XN_STATUS_OK;
}
示例10: main
int main(int argc, char **argv)
{
XnStatus nRetVal = XN_STATUS_OK;
xn::EnumerationErrors errors;
if( USE_RECORED_DATA ){
g_Context.Init();
g_Context.OpenFileRecording(RECORD_FILE_PATH);
xn::Player player;
// Player nodeの取得
nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_PLAYER, player);
CHECK_RC(nRetVal, "Find player");
LOG_D("PlaybackSpeed: %d", player.GetPlaybackSpeed());
xn:NodeInfoList nodeList;
player.EnumerateNodes(nodeList);
for( xn::NodeInfoList::Iterator it = nodeList.Begin();
it != nodeList.End(); ++it){
if( (*it).GetDescription().Type == XN_NODE_TYPE_IMAGE ){
nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_ImageGenerator);
CHECK_RC(nRetVal, "Find image node");
LOG_D("%s", "ImageGenerator created.");
}
else if( (*it).GetDescription().Type == XN_NODE_TYPE_DEPTH ){
nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
CHECK_RC(nRetVal, "Find depth node");
LOG_D("%s", "DepthGenerator created.");
}
else{
LOG_D("%s %s %s", ::xnProductionNodeTypeToString((*it).GetDescription().Type ),
(*it).GetInstanceName(),
(*it).GetDescription().strName);
}
}
}
else{
LOG_I("Reading config from: '%s'", CONFIG_XML_PATH);
nRetVal = g_Context.InitFromXmlFile(CONFIG_XML_PATH, g_scriptNode, &errors);
if (nRetVal == XN_STATUS_NO_NODE_PRESENT){
XnChar strError[1024];
errors.ToString(strError, 1024);
LOG_E("%s\n", strError);
return (nRetVal);
}
else if (nRetVal != XN_STATUS_OK){
LOG_E("Open failed: %s", xnGetStatusString(nRetVal));
return (nRetVal);
}
nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
CHECK_RC(nRetVal,"No depth");
// ImageGeneratorの作成
nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_ImageGenerator);
CHECK_RC(nRetVal, "Find image generator");
}
// UserGeneratorの取得
nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
if(nRetVal!=XN_STATUS_OK){
nRetVal = g_UserGenerator.Create(g_Context);
CHECK_RC(nRetVal, "Create user generator");
}
XnCallbackHandle hUserCallbacks, hCalibrationStart, hCalibrationComplete, hPoseDetected;
if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON)){
LOG_E("%s", "Supplied user generator doesn't support skeleton");
return 1;
}
nRetVal = g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
CHECK_RC(nRetVal, "Register to user callbacks");
g_SkeletonCap = g_UserGenerator.GetSkeletonCap();
nRetVal = g_SkeletonCap.RegisterToCalibrationStart(UserCalibration_CalibrationStart, NULL, hCalibrationStart);
CHECK_RC(nRetVal, "Register to calibration start");
nRetVal = g_SkeletonCap.RegisterToCalibrationComplete(UserCalibration_CalibrationComplete, NULL, hCalibrationComplete);
CHECK_RC(nRetVal, "Register to calibration complete");
if (g_SkeletonCap.NeedPoseForCalibration()){
g_bNeedPose = TRUE;
if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION)){
LOG_E("%s", "Pose required, but not supported");
return 1;
}
nRetVal = g_UserGenerator.GetPoseDetectionCap().RegisterToPoseDetected(UserPose_PoseDetected, NULL, hPoseDetected);
CHECK_RC(nRetVal, "Register to Pose Detected");
g_SkeletonCap.GetCalibrationPose(g_strPose);
}
g_SkeletonCap.SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
nRetVal = g_Context.StartGeneratingAll();
CHECK_RC(nRetVal, "StartGenerating");
//.........这里部分代码省略.........
示例11: main
int main(int argc, char* argv[])
{
XnStatus rc;
EnumerationErrors errors;
Context b_context;
rc = b_context.Init();
NodeInfoList list;
rc = b_context.EnumerateProductionTrees(XN_NODE_TYPE_DEVICE, NULL, list, &errors);
int i = 0;
for (NodeInfoList::Iterator it = list.Begin(); it != list.End(); ++it, ++i)
{
printf("making node %d\n", i);
Context l_context;
l_context.Init();
//ScriptNode node;
rc = l_context.Init();
NodeInfo deviceNodeInfo = *it;
l_context.CreateProductionTree(deviceNodeInfo);
DepthGenerator l_depth;
ImageGenerator l_image;
rc = l_depth.Create(l_context);
rc = l_image.Create(l_context);
DepthMetaData l_depthMD;
ImageMetaData l_imageMD;
l_depth.GetMetaData(l_depthMD);
l_image.GetMetaData(l_imageMD);
rc = l_context.StartGeneratingAll();
g_nTexMapX = (((unsigned short)(l_depthMD.FullXRes()-1) / 512) + 1) * 512;
g_nTexMapY = (((unsigned short)(l_imageMD.FullYRes()-1) / 512) + 1) * 512;
g_pTexMaps.push_back((XnRGB24Pixel*)malloc(g_nTexMapX * g_nTexMapY * sizeof(XnRGB24Pixel)));
g_images.push_back(l_image);
g_depths.push_back(l_depth);
g_contexts.push_back(l_context);
}
// OpenGL init
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGB | GLUT_DOUBLE | GLUT_DEPTH);
glutInitWindowSize(GL_WIN_SIZE_X, GL_WIN_SIZE_Y);
glutCreateWindow ("OpenNI Simple Viewer");
//glutFullScreen();
glutSetCursor(GLUT_CURSOR_NONE);
glutKeyboardFunc(glutKeyboard);
glutDisplayFunc(glutDisplay);
glutIdleFunc(glutIdle);
glDisable(GL_DEPTH_TEST);
glEnable(GL_TEXTURE_2D);
// Per frame code is in glutDisplay
glutMainLoop();
return 0;
}
示例12: openCommon
//----------------------------------------
void ofxOpenNI::openCommon() {
XnStatus nRetVal = XN_STATUS_OK;
g_bIsDepthOn = false;
g_bIsImageOn = false;
g_bIsIROn = false;
g_bIsAudioOn = false;
g_bIsPlayerOn = false;
g_bIsDepthRawOnOption = false;
NodeInfoList list;
nRetVal = g_Context.EnumerateExistingNodes(list);
if (nRetVal == XN_STATUS_OK)
{
for (NodeInfoList::Iterator it = list.Begin(); it != list.End(); ++it)
{
switch ((*it).GetDescription().Type)
{
case XN_NODE_TYPE_DEVICE:
ofLogVerbose(LOG_NAME) << "Creating device";
(*it).GetInstance(g_Device);
break;
case XN_NODE_TYPE_DEPTH:
ofLogVerbose(LOG_NAME) << "Creating depth generator";
g_bIsDepthOn = true;
g_bIsDepthRawOnOption = true;
(*it).GetInstance(g_Depth);
break;
case XN_NODE_TYPE_IMAGE:
ofLogVerbose(LOG_NAME) << "Creating image generator";
g_bIsImageOn = true;
(*it).GetInstance(g_Image);
break;
case XN_NODE_TYPE_IR:
ofLogVerbose(LOG_NAME) << "Creating ir generator";
g_bIsIROn = true;
(*it).GetInstance(g_IR);
break;
case XN_NODE_TYPE_AUDIO:
ofLogVerbose(LOG_NAME) << "Creating audio generator";
g_bIsAudioOn = true;
(*it).GetInstance(g_Audio);
break;
case XN_NODE_TYPE_PLAYER:
ofLogVerbose(LOG_NAME) << "Creating player";
g_bIsPlayerOn = true;
(*it).GetInstance(g_Player);
break;
}
}
}
XnCallbackHandle hDummy;
g_Context.RegisterToErrorStateChange(onErrorStateChanged, this, hDummy);
initConstants();
allocateDepthBuffers();
allocateDepthRawBuffers();
allocateRGBBuffers();
readFrame();
}
示例13: openDeviceFromXmlWithChoice
XnStatus openDeviceFromXmlWithChoice(const char* csXmlFile, EnumerationErrors& errors)
{
XnStatus nRetVal = XN_STATUS_OK;
xnLogInitFromXmlFile(csXmlFile);
nRetVal = g_Context.Init();
XN_IS_STATUS_OK(nRetVal);
// find devices
NodeInfoList list;
nRetVal = g_Context.EnumerateProductionTrees(XN_NODE_TYPE_DEVICE, NULL, list, &errors);
XN_IS_STATUS_OK(nRetVal);
printf("The following devices were found:\n");
int i = 1;
for (NodeInfoList::Iterator it = list.Begin(); it != list.End(); ++it, ++i)
{
NodeInfo deviceNodeInfo = *it;
Device deviceNode;
deviceNodeInfo.GetInstance(deviceNode);
XnBool bExists = deviceNode.IsValid();
if (!bExists)
{
g_Context.CreateProductionTree(deviceNodeInfo, deviceNode);
// this might fail.
}
if (deviceNode.IsValid() && deviceNode.IsCapabilitySupported(XN_CAPABILITY_DEVICE_IDENTIFICATION))
{
const XnUInt32 nStringBufferSize = 200;
XnChar strDeviceName[nStringBufferSize];
XnChar strSerialNumber[nStringBufferSize];
XnUInt32 nLength = nStringBufferSize;
deviceNode.GetIdentificationCap().GetDeviceName(strDeviceName, nLength);
nLength = nStringBufferSize;
deviceNode.GetIdentificationCap().GetSerialNumber(strSerialNumber, nLength);
printf("[%d] %s (%s)\n", i, strDeviceName, strSerialNumber);
}
else
{
printf("[%d] %s\n", i, deviceNodeInfo.GetCreationInfo());
}
// release the device if we created it
if (!bExists && deviceNode.IsValid())
{
deviceNode.Release();
}
}
printf("\n");
printf("Choose device to open (1): ");
int chosen = 1;
scanf("%d", &chosen);
// create it
NodeInfoList::Iterator it = list.Begin();
for (i = 1; i < chosen; ++i)
{
it++;
}
NodeInfo deviceNode = *it;
nRetVal = g_Context.CreateProductionTree(deviceNode, g_Device);
XN_IS_STATUS_OK(nRetVal);
// now run the rest of the XML
nRetVal = g_Context.RunXmlScriptFromFile(csXmlFile, g_scriptNode, &errors);
XN_IS_STATUS_OK(nRetVal);
openCommon();
return (XN_STATUS_OK);
}
示例14: fillProductionNodeMap
// fills a Map with available ProductionNodes. used for altern. viewpoint.
void fillProductionNodeMap(std::map<std::string, xn::ProductionNode> &pn_map)
{
ProductionNode n;
XnStatus status = XN_STATUS_OK;
NodeInfoList l;
// RGB
OpenNIContext::EnumerateProductionTrees(XN_NODE_TYPE_IMAGE, NULL , l);
int i = 0;
for (NodeInfoList::Iterator it = l.Begin(); it != l.End(); ++it, ++i){
std::ostringstream tmp;
tmp << "rgb";
if(i) tmp << i;
NodeInfo ni = *it;
status = OpenNIContext::CreateProductionTree(ni, n);
if(status == XN_STATUS_OK){
pn_map[tmp.str()] = n;
} else {
DEBUG_LOG("error while creating Production tree: " << xnGetStatusString(status));
}
}
// DEPTH
OpenNIContext::EnumerateProductionTrees(XN_NODE_TYPE_DEPTH, NULL , l);
i = 0;
for (NodeInfoList::Iterator it = l.Begin(); it != l.End(); ++it, ++i){
std::ostringstream tmp;
tmp << "depth";
if(i) tmp << i;
NodeInfo ni = *it;
status = OpenNIContext::CreateProductionTree(ni, n);
if(status == XN_STATUS_OK){
pn_map[tmp.str()] = n;
} else {
DEBUG_LOG("error while creating Production tree: " << xnGetStatusString(status));
}
}
// IR
OpenNIContext::EnumerateProductionTrees(XN_NODE_TYPE_IR, NULL , l);
i = 0;
for (NodeInfoList::Iterator it = l.Begin(); it != l.End(); ++it, ++i){
std::ostringstream tmp;
tmp << "ir";
if(i) tmp << i;
NodeInfo ni = *it;
status = OpenNIContext::CreateProductionTree(ni, n);
if(status == XN_STATUS_OK){
pn_map[tmp.str()] = n;
} else {
DEBUG_LOG("error while creating Production tree: " << xnGetStatusString(status));
}
}
// AUDIO
OpenNIContext::EnumerateProductionTrees(XN_NODE_TYPE_AUDIO, NULL , l);
i = 0;
for (NodeInfoList::Iterator it = l.Begin(); it != l.End(); ++it, ++i){
std::ostringstream tmp;
tmp << "audio";
if(i) tmp << i;
NodeInfo ni = *it;
status = OpenNIContext::CreateProductionTree(ni, n);
if(status == XN_STATUS_OK){
pn_map[tmp.str()] = n;
} else {
DEBUG_LOG("error while creating Production tree: " << xnGetStatusString(status));
}
}
}
示例15: main
int main(int argc, char* argv[])
{
XnStatus nRetVal = XN_STATUS_OK;
Context context;
EnumerationErrors errors;
Mode mode;
// default mode
#if XN_PLATFORM == XN_PLATFORM_WIN32
mode = MODE_PLAY;
#else
mode = MODE_RECORD;
#endif
// check if mode was provided by user
if (argc > 1)
{
if (strcmp(argv[1], "play") == 0)
{
mode = MODE_PLAY;
}
else if (strcmp(argv[1], "record") == 0)
{
mode = MODE_RECORD;
}
else
{
printUsage(argv[0]);
return -1;
}
}
// make sure mode is valid
#if XN_PLATFORM != XN_PLATFORM_WIN32
if (mode == MODE_PLAY)
{
printf("Playing is not supported on this platform!\n");
return -1;
}
#endif
ScriptNode scriptNode;
nRetVal = context.InitFromXmlFile(SAMPLE_XML_PATH, scriptNode);
if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
{
XnChar strError[1024];
errors.ToString(strError, 1024);
printf("%s\n", strError);
return (nRetVal);
}
else if (nRetVal != XN_STATUS_OK)
{
printf("Open failed: %s\n", xnGetStatusString(nRetVal));
return (nRetVal);
}
// find audio nodes
AudioGenerator gens[nSupportedNodes];
XnUInt32 nNodes = 0;
NodeInfoList list;
nRetVal = context.EnumerateExistingNodes(list, XN_NODE_TYPE_AUDIO);
CHECK_RC(nRetVal, "Enumerate audio nodes");
for (NodeInfoList::Iterator it = list.Begin(); it != list.End(); ++it)
{
NodeInfo info = *it;
nRetVal = info.GetInstance(gens[nNodes]);
CHECK_RC(nRetVal, "Get audio node");
nNodes++;
}
if (nNodes == 0)
{
printf("No audio node was found!\n");
return -1;
}
if (mode == MODE_PLAY)
{
nRetVal = play(context, gens, nNodes);
}
else if (mode == MODE_RECORD)
{
nRetVal = record(context, gens, nNodes);
}
scriptNode.Release();
for (int i = 0; i < nSupportedNodes; ++i)
gens[i].Release();
context.Release();
return nRetVal;
}