本文整理汇总了C++中osgviewer::Viewer::run方法的典型用法代码示例。如果您正苦于以下问题:C++ Viewer::run方法的具体用法?C++ Viewer::run怎么用?C++ Viewer::run使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类osgviewer::Viewer
的用法示例。
在下文中一共展示了Viewer::run方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: main
int main(int argc, char* argv[]) {
ref_ptr<Group> root = new Group;
viewer.setUpViewInWindow(0, 0, 640, 480);
viewer.realize();
ref_ptr<Camera> cam = viewer.getCamera();
ref_ptr<Geode> geode = new Geode;
root->addChild(geode.get());
for (int i=0; i < 10; i++) {
osg::Sphere* sphere = new osg::Sphere( Vec3f(i+1,10,0), .1*i);
osg::ShapeDrawable* sphereDrawable = new osg::ShapeDrawable(sphere);
geode->addDrawable(sphereDrawable);
}
// osg::Sphere* sphere = new osg::Sphere( Vec3f(10,10,0), .2);
// osg::ShapeDrawable* sphereDrawable = new osg::ShapeDrawable(sphere);
// sphereDrawable->setColor(osg::Vec4f(1,0,0,1));
// geode->addDrawable(sphereDrawable);
ref_ptr<osgGA::TrackballManipulator> manip = new osgGA::TrackballManipulator();
viewer.setCameraManipulator(manip);
viewer.setSceneData(root.get());
cam->setViewMatrixAsLookAt(Vec3f(10,0,0), Vec3f(10,1,0), Vec3f(0,0,1));
//manip->setHomePosition(Vec3f(10,0,0), Vec3f(11,1,0), Vec3f(10,0,1));
// cam->setProjectionMatrixAsPerspective(49,640/480., .1, 10);
viewer.run();
}
示例2: main
int main(void){
osg::DisplaySettings::instance()->setNumMultiSamples( 4 );
viewer.setUpViewInWindow( 100, 50, 800, 600 );
viewer.getCamera()->setClearColor( osg::Vec4( 0.5,0.5,0.5,1) );
viewer.addEventHandler(new osgViewer::StatsHandler);
scene = new osg::Group;
osg::ref_ptr<osg::LightSource> lumiere = new osg::LightSource;
lumiere->getLight()->setLightNum(0); // GL_LIGHT1
lumiere->getLight()->setPosition(osg::Vec4(1, -1, 10, 0)); // 0 = directionnel
lumiere->getLight()->setAmbient(osg::Vec4(0.5, 0.5, 0.5, 1.0));
lumiere->getLight()->setDiffuse(osg::Vec4(0.9, 0.9, 0.9, 1.0));
lumiere->getLight()->setSpecular(osg::Vec4(1.0, 1.0, 1.0, 1.0));
scene->addChild(lumiere.get());
terrain = creation_terrain();
scene->addChild(terrain.get());
scene->addChild(creation_foret(terrain, 500));
LECHARRR = creation_CHARRR(0,0,terrain);
scene->addChild(LECHARRR);
LECHARRR->accept(rechercheTourelle);
fumeeTank = new osgParticle::SmokeEffect;
fumeeTank->setTextureFileName("fumee.tga");
fumeeTank->setIntensity(2);
fumeeTank->setScale(4);
fumeeTank->setPosition(LECHARRR->getPosition());
scene->addChild(fumeeTank.get());
posCanonX = LECHARRR->getPosition().x();
posCanonY = LECHARRR->getPosition().y() + 3.5;
posCanonZ = LECHARRR->getPosition().z() + 4.0;
viewer.setSceneData(scene);
osg::ref_ptr<GestionEvenements> gestionnaire = new GestionEvenements();
viewer.addEventHandler(gestionnaire.get());
return viewer.run();
}
示例3: initViewer
//.........这里部分代码省略.........
outlineGeom->setColorBinding(osg::Geometry::BIND_OVERALL);
originGeom->setColorBinding(osg::Geometry::BIND_OVERALL);
stopColors->push_back(osg::Vec4(1.0f,0.0f,0.0f,1.0f)); //red
brakeColors->push_back(osg::Vec4(1.0f,1.0f,0.0f,1.0f)); //yellow
slowColors->push_back(osg::Vec4(0.0f,1.0f,0.0f,1.0f)); //green
//outlineColors->push_back(osg::Vec4(1.0f,0.0f,0.0f,1.0f)); //red
outlineColors->push_back(osg::Vec4(0.0f,1.0f,0.0f,1.0f)); //green
originColors->push_back(osg::Vec4(1.0f,1.0f,1.0f,1.0f)); //white
// set the normal in the same way color.
osg::Vec3Array* normals = new osg::Vec3Array;
normals->push_back(osg::Vec3(0.0f,0.0f,1.0f));
stopLinesGeom->setNormalArray(normals);
stopLinesGeom->setNormalBinding(osg::Geometry::BIND_OVERALL);
brakeLinesGeom->setNormalArray(normals);
brakeLinesGeom->setNormalBinding(osg::Geometry::BIND_OVERALL);
slowLinesGeom->setNormalArray(normals);
slowLinesGeom->setNormalBinding(osg::Geometry::BIND_OVERALL);
outlineGeom->setNormalArray(normals);
outlineGeom->setNormalBinding(osg::Geometry::BIND_OVERALL);
originGeom->setNormalArray(normals);
originGeom->setNormalBinding(osg::Geometry::BIND_OVERALL);
// This time we simply use primitive, and hardwire the number of coords to use
// since we know up front,
if (DRAW_OUTLINE) {
outlineGeom->addPrimitiveSet(new osg::DrawArrays(osg::PrimitiveSet::LINES,0,NUM_VERTS));
myshapegeode->addDrawable(outlineGeom);
}
if (DRAW_RAYS) {
stopLinesGeom->addPrimitiveSet(new osg::DrawArrays(osg::PrimitiveSet::LINES,0,NUM_VERTS));
brakeLinesGeom->addPrimitiveSet(new osg::DrawArrays(osg::PrimitiveSet::LINES,0,NUM_VERTS));
slowLinesGeom->addPrimitiveSet(new osg::DrawArrays(osg::PrimitiveSet::LINES,0,NUM_VERTS));
myshapegeode->addDrawable(stopLinesGeom);
myshapegeode->addDrawable(brakeLinesGeom);
myshapegeode->addDrawable(slowLinesGeom);
}
if (DRAW_ORIGIN) {
originGeom->addPrimitiveSet(new osg::DrawArrays(osg::PrimitiveSet::LINES,0,NUM_VERTS));
myshapegeode->addDrawable(originGeom);
}
// add the points geometry to the geode.
myshapegeode->setDataVariance(osg::Object::DYNAMIC);
root->addChild(myshapegeode.get());
//The geode containing our shpae
osg::ref_ptr<osg::Geode> myTextGeode (new osg::Geode);
//osgText::Text* myText = new osgText::Text();
// Geode - Since osgText::Text is a derived class from drawable, we
// must add it to an osg::Geode before we can add it to our ScenGraph.
myTextGeode->addDrawable(myText);
//Set the screen alignment - always face the screen
myText->setAxisAlignment(osgText::Text::SCREEN);
//Set the text to our default text string
myText->setText("Default Text");
//myText->setPosition(osg::Vec3d(25, 75, 0));
myText->setPosition(osg::Vec3d(0, 0, 0));
myText->setColor(osg::Vec4d(1.0f, 1.0f, 1.0f, 1.0f));
myText->setCharacterSize(48);
//myText->setFont("./fonts/Vera.ttf");
char output[256] = "";
sprintf(output, "epoch: %d, scanNumber: %s, totalBytesRead: %d, stop: %d, closest_y: %d, speed: %f\n", (int)time(0), scanNumber, totalBytesRead, stop, closest_y_cm, speed);
myText->setText(output);
root->addChild(myTextGeode.get());
root->setUpdateCallback(new redrawCallback);
viewer.setSceneData( root.get() );
//viewer.setThreadingModel(osgViewer::Viewer::ThreadingModel::SingleThreaded);
//Stats Event Handler s key
//viewer.addEventHandler(new osgViewer::StatsHandler);
//Windows size handler
//viewer.addEventHandler(new osgViewer::WindowSizeHandler);
// add the state manipulator
//viewer.addEventHandler( new osgGA::StateSetManipulator(viewer.getCamera()->getOrCreateStateSet()) );
//The viewer.run() method starts the threads and the traversals.
return (viewer.run());
}
示例4: run
int Metrics::run(osgViewer::Viewer& viewer)
{
if (Metrics::enabled())
{
if (!viewer.isRealized())
{
viewer.realize();
}
// If Metrics are enabled, enable stats on the Viewer so that it we can report them for the Metrics
if (Metrics::enabled())
{
osgViewer::ViewerBase::Scenes scenes;
viewer.getScenes(scenes);
for (osgViewer::ViewerBase::Scenes::iterator itr = scenes.begin();
itr != scenes.end();
++itr)
{
osgViewer::Scene* scene = *itr;
osgDB::DatabasePager* dp = scene->getDatabasePager();
if (dp && dp->isRunning())
{
dp->resetStats();
}
}
viewer.getViewerStats()->collectStats("frame_rate", true);
viewer.getViewerStats()->collectStats("event", true);
viewer.getViewerStats()->collectStats("update", true);
viewer.getCamera()->getStats()->collectStats("rendering", true);
viewer.getCamera()->getStats()->collectStats("gpu", true);
}
// Report memory and fps every 10 frames.
unsigned int reportEvery = 10;
while (!viewer.done())
{
{
METRIC_SCOPED_EX("frame", 1, "number", toString<int>(viewer.getFrameStamp()->getFrameNumber()).c_str());
{
METRIC_SCOPED("advance");
viewer.advance();
}
{
METRIC_SCOPED("event");
viewer.eventTraversal();
}
{
METRIC_SCOPED("update");
viewer.updateTraversal();
}
{
METRIC_SCOPED("render");
viewer.renderingTraversals();
}
}
// Report memory and fps periodically. periodically.
if (viewer.getFrameStamp()->getFrameNumber() % reportEvery == 0)
{
// Only report the metrics if they are enabled to avoid computing the memory.
if (Metrics::enabled())
{
Metrics::counter("Memory::WorkingSet", "WorkingSet", Memory::getProcessPhysicalUsage() / 1048576);
Metrics::counter("Memory::PrivateBytes", "PrivateBytes", Memory::getProcessPrivateUsage() / 1048576);
Metrics::counter("Memory::PeakPrivateBytes", "PeakPrivateBytes", Memory::getProcessPeakPrivateUsage() / 1048576);
}
}
double eventTime = 0.0;
if (viewer.getViewerStats()->getAttribute(viewer.getViewerStats()->getLatestFrameNumber(), "Event traversal time taken", eventTime))
{
Metrics::counter("Viewer::Event", "Event", eventTime * 1000.0);
}
double updateTime = 0.0;
if (viewer.getViewerStats()->getAttribute(viewer.getViewerStats()->getLatestFrameNumber(), "Update traversal time taken", updateTime))
{
Metrics::counter("Viewer::Update", "Update", updateTime * 1000.0);
}
double cullTime = 0.0;
if (viewer.getCamera()->getStats()->getAttribute(viewer.getCamera()->getStats()->getLatestFrameNumber(), "Cull traversal time taken", cullTime))
{
Metrics::counter("Viewer::Cull", "Cull", cullTime * 1000.0);
}
double drawTime = 0.0;
if (viewer.getCamera()->getStats()->getAttribute(viewer.getCamera()->getStats()->getLatestFrameNumber(), "Draw traversal time taken", drawTime))
{
Metrics::counter("Viewer::Draw", "Draw", drawTime * 1000.0);
}
double gpuTime = 0.0;
//.........这里部分代码省略.........
示例5: main
int main( int argc, char **argv )
{
if(argc<4) {
usage(argc,argv);
return 1;
}
is = helper::createImageSource(argv[1]);
if(is.empty() || is->done()) {
loglne("[main] createImageSource failed or no valid imagesource!");
return -1;
}
is->pause(false);
is->reportInfo();
is->get(frame);
imgW = frame.cols; imgH = frame.rows;
videoFromWebcam = false;
if( is->classname() == "ImageSource_Camera" ) {
videoFromWebcam = true;
}
loglni("[main] loading K matrix from: "<<argv[2]);
double K[9];
std::ifstream kfile(argv[2]);
for(int i=0; i<9; ++i) kfile >> K[i];
tracker.loadK(K);
loglni("[main] K matrix loaded:");
loglni(helper::PrintMat<>(3,3,K));
loglni("[main] load template image from: "<<argv[3]);
tracker.loadTemplate(argv[3]);
//////////////// TagDetector /////////////////////////////////////////
int tagid = 0; //default tag16h5
if(argc>5) tagid = atoi(argv[5]);
tagFamily = TagFamilyFactory::create(tagid);
if(tagFamily.empty()) {
loglne("[main] create TagFamily fail!");
return -1;
}
detector = new TagDetector(tagFamily);
if(detector.empty()) {
loglne("[main] create TagDetector fail!");
return -1;
}
Mat temp = imread(argv[3]);
if( findAprilTag(temp, 0, HI, true) ) {
namedWindow("template");
imshow("template", temp);
iHI = HI.inv();
} else {
loglne("[main error] detector did not find any apriltag on template image!");
return -1;
}
//////////////// OSG ////////////////////////////////////////////////
osg::ref_ptr<osg::Group> root = new osg::Group;
string scenefilename = (argc>4?argv[4]:("cow.osg"));
osg::ref_ptr<osg::Node> cow = osgDB::readNodeFile(scenefilename);
arscene = new helper::ARSceneRoot;
helper::FixMat<3,double>::Type matK = helper::FixMat<3,double>::ConvertType(K);
CV2CG::cv2cg(matK,0.01,500,imgW,imgH,*arscene);
manipMat = new osg::MatrixTransform(osg::Matrix::identity());
manipMat->addChild(cow);
manipMat->getOrCreateStateSet()->setMode(GL_NORMALIZE, osg::StateAttribute::ON);
arscene->addChild(manipMat);
osg::ref_ptr<osg::Image> backgroundImage = new osg::Image;
helper::cvmat2osgimage(frame,backgroundImage);
arvideo = new helper::ARVideoBackground(backgroundImage);
root->setUpdateCallback(new ARUpdateCallback);
root->addChild(arvideo);
root->addChild(arscene);
viewer.setSceneData(root);
viewer.addEventHandler(new osgViewer::StatsHandler);
viewer.addEventHandler(new osgViewer::WindowSizeHandler);
viewer.addEventHandler(new QuitHandler);
//start tracking thread
OpenThreads::Thread::Init();
TrackThread* thr = new TrackThread;
thr->start();
viewer.run();
delete thr;
loglni("[main] DONE...exit!");
return 0;
}