当前位置: 首页>>代码示例>>C++>>正文


C++ UTimer::elapsed方法代码示例

本文整理汇总了C++中UTimer::elapsed方法的典型用法代码示例。如果您正苦于以下问题:C++ UTimer::elapsed方法的具体用法?C++ UTimer::elapsed怎么用?C++ UTimer::elapsed使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在UTimer的用法示例。


在下文中一共展示了UTimer::elapsed方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1: process

Transform Odometry::process(const SensorData & data, OdometryInfo * info)
{
	if(_pose.isNull())
	{
		_pose.setIdentity(); // initialized
	}

	UASSERT(!data.image().empty());
	if(dynamic_cast<OdometryMono*>(this) == 0)
	{
		UASSERT(!data.depthOrRightImage().empty());
	}

	if(data.fx() <= 0 || data.fyOrBaseline() <= 0)
	{
		UERROR("Rectified images required! Calibrate your camera. (fx=%f, fy/baseline=%f, cx=%f, cy=%f)",
				data.fx(), data.fyOrBaseline(), data.cx(), data.cy());
		return Transform();
	}

	UTimer time;
	Transform t = this->computeTransform(data, info);

	if(info)
	{
		info->time = time.elapsed();
		info->lost = t.isNull();
	}

	if(!t.isNull())
	{
		_resetCurrentCount = _resetCountdown;

		if(_force2D)
		{
			float x,y,z, roll,pitch,yaw;
			t.getTranslationAndEulerAngles(x, y, z, roll, pitch, yaw);
			t = Transform(x,y,0, 0,0,yaw);
		}

		return _pose *= t; // updated
	}
	else if(_resetCurrentCount > 0)
	{
		UWARN("Odometry lost! Odometry will be reset after next %d consecutive unsuccessful odometry updates...", _resetCurrentCount);

		--_resetCurrentCount;
		if(_resetCurrentCount == 0)
		{
			UWARN("Odometry automatically reset to latest pose!");
			this->reset(_pose);
		}
	}

	return Transform();
}
开发者ID:FNicolai,项目名称:rtabmap,代码行数:56,代码来源:Odometry.cpp

示例2: computeTransform


//.........这里部分代码省略.........
		}
	}
	else
	{
		UFATAL("Unknown depth format!");
	}

	if(camera_ == 0)
	{
		dvo::core::IntrinsicMatrix intrinsics = dvo::core::IntrinsicMatrix::create(
						data.cameraModels()[0].fx(),
						data.cameraModels()[0].fy(),
						data.cameraModels()[0].cx(),
						data.cameraModels()[0].cy());
		camera_ = new dvo::core::RgbdCameraPyramid(
				data.cameraModels()[0].imageWidth(),
				data.cameraModels()[0].imageHeight(),
				intrinsics);
	}

	dvo::core::RgbdImagePyramid * current = new dvo::core::RgbdImagePyramid(*camera_, grey_s16, depth_float);

	cv::Mat covariance;
	if(reference_ == 0)
	{
		reference_ = current;
		if(!lost_)
		{
			t.setIdentity();
		}
		covariance = cv::Mat::eye(6,6,CV_64FC1) * 9999.0;
	}
	else
	{
		dvo::DenseTracker::Result result;
		dvo_->match(*reference_, *current, result);
		t = Transform::fromEigen3d(result.Transformation);

		if(result.Information(0,0) > 0.0 && result.Information(0,0) != 1.0)
		{
			lost_ = false;
			cv::Mat information = cv::Mat::eye(6,6, CV_64FC1);
			memcpy(information.data, result.Information.data(), 36*sizeof(double));
			covariance = information.inv();
			covariance *= 100.0; // to be in the same scale than loop closure detection

			Transform currentMotion = t;
			t = motionFromKeyFrame_.inverse() * t;

			// TODO make parameters?
			if(currentMotion.getNorm() > 0.01 || currentMotion.getAngle() > 0.01)
			{
				if(info)
				{
					info->keyFrameAdded = true;
				}
				// new keyframe
				delete reference_;
				reference_ = current;
				motionFromKeyFrame_.setIdentity();
			}
			else
			{
				delete current;
				motionFromKeyFrame_ = currentMotion;
			}
		}
		else
		{
			lost_ = true;
			delete reference_;
			delete current;
			reference_ = 0; // this will make restart from the next frame
			motionFromKeyFrame_.setIdentity();
			t.setNull();
			covariance = cv::Mat::eye(6,6,CV_64FC1) * 9999.0;
			UWARN("dvo failed to estimate motion, tracking will be reinitialized on next frame.");
		}
	}

	const Transform & localTransform = data.cameraModels()[0].localTransform();
	if(!t.isNull() && !t.isIdentity() && !localTransform.isIdentity() && !localTransform.isNull())
	{
		// from camera frame to base frame
		t = localTransform * t * localTransform.inverse();
	}

	if(info)
	{
		info->type = (int)kTypeDVO;
		info->covariance = covariance;
	}

	UINFO("Odom update time = %fs", timer.elapsed());

#else
	UERROR("RTAB-Map is not built with DVO support! Select another visual odometry approach.");
#endif
	return t;
}
开发者ID:qinhuaping,项目名称:rtabmap,代码行数:101,代码来源:OdometryDVO.cpp

示例3: main


//.........这里部分代码省略.........
		printf(" Creating the ground truth matrix.\n");
	}
	printf(" INFO: All other parameters are set to defaults\n");
	if(pm.size()>1)
	{
		printf("   Overwritten parameters :\n");
		for(ParametersMap::iterator iter = pm.begin(); iter!=pm.end(); ++iter)
		{
			printf("    %s=%s\n",iter->first.c_str(), iter->second.c_str());
		}
	}
	if(rtabmap.getWM().size() || rtabmap.getSTM().size())
	{
		printf("[Warning] RTAB-Map database is not empty (%s)\n", (rtabmap.getWorkingDir()+Parameters::getDefaultDatabaseName()).c_str());
	}
	printf("\nProcessing images...\n");

	UTimer iterationTimer;
	UTimer rtabmapTimer;
	int imagesProcessed = 0;
	std::list<std::vector<float> > teleopActions;
	while(loopDataset <= repeat && g_forever)
	{
		cv::Mat img = camera->takeImage();
		int i=0;
		double maxIterationTime = 0.0;
		int maxIterationTimeId = 0;
		while(!img.empty() && g_forever)
		{
			++imagesProcessed;
			iterationTimer.start();
			rtabmapTimer.start();
			rtabmap.process(img);
			double rtabmapTime = rtabmapTimer.elapsed();
			loopClosureId = rtabmap.getLoopClosureId();
			if(rtabmap.getLoopClosureId())
			{
				++countLoopDetected;
			}
			img = camera->takeImage();
			if(++count % 100 == 0)
			{
				printf(" count = %d, loop closures = %d, max time (at %d) = %fs\n",
						count, countLoopDetected, maxIterationTimeId, maxIterationTime);
				maxIterationTime = 0.0;
				maxIterationTimeId = 0;
				std::map<int, int> wm = rtabmap.getWeights();
				printf(" WM(%d)=[", (int)wm.size());
				for(std::map<int, int>::iterator iter=wm.begin(); iter!=wm.end();++iter)
				{
					if(iter != wm.begin())
					{
						printf(";");
					}
					printf("%d,%d", iter->first, iter->second);
				}
				printf("]\n");
			}

			// Update generated ground truth matrix
			if(createGT)
			{
				if(loopClosureId > 0)
				{
					groundTruth.insert(std::make_pair(i, loopClosureId-1));
				}
开发者ID:abdullah38rcc,项目名称:rtabmap,代码行数:67,代码来源:main.cpp

示例4: Render


//.........这里部分代码省略.........
									LOGE("Not mesh could be created for node %d", id);
								}
							}
							totalPoints_+=indices->size();
						}
					}
				}
			}
		}

		//filter poses?
		if(poses.size() > 2)
		{
			if(nodesFiltering_)
			{
				for(std::multimap<int, rtabmap::Link>::const_iterator iter=links.begin(); iter!=links.end(); ++iter)
				{
					if(iter->second.type() != rtabmap::Link::kNeighbor)
					{
						int oldId = iter->second.to()>iter->second.from()?iter->second.from():iter->second.to();
						poses.erase(oldId);
					}
				}
			}
		}

		//update cloud visibility
		std::set<int> addedClouds = main_scene_.getAddedClouds();
		for(std::set<int>::const_iterator iter=addedClouds.begin();
			iter!=addedClouds.end();
			++iter)
		{
			if(*iter > 0 && poses.find(*iter) == poses.end())
			{
				main_scene_.setCloudVisible(*iter, false);
			}
		}
	}
	else
	{
		rtabmap::OdometryEvent event;
		bool set = false;
		{
			boost::mutex::scoped_lock  lock(odomMutex_);
			if(odomEvents_.size())
			{
				LOGI("Process odom events");
				event = odomEvents_.back();
				odomEvents_.clear();
				set = true;
			}
		}

		main_scene_.setCloudVisible(-1, odomCloudShown_ && !trajectoryMode_);

		//just process the last one
		if(set && !event.pose().isNull())
		{
			if(odomCloudShown_ && !trajectoryMode_)
			{
				if(!event.data().imageRaw().empty() && !event.data().depthRaw().empty())
				{
					pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud;
					cloud = rtabmap::util3d::cloudRGBFromSensorData(event.data(), event.data().imageRaw().rows/event.data().depthRaw().rows, maxCloudDepth_);
					if(cloud->size())
					{
						LOGI("Created odom cloud (rgb=%dx%d depth=%dx%d cloud=%dx%d)",
													event.data().imageRaw().cols, event.data().imageRaw().rows,
													event.data().depthRaw().cols, event.data().depthRaw().rows,
													(int)cloud->width, (int)cloud->height);
						std::vector<pcl::Vertices> polygons = rtabmap::util3d::organizedFastMesh(cloud, meshAngleToleranceDeg_*M_PI/180.0, false, meshTrianglePix_);
						main_scene_.addCloud(-1, cloud, polygons, opengl_world_T_rtabmap_world*event.pose(), event.data().imageRaw());
						main_scene_.setCloudVisible(-1, true);
					}
					else
					{
						LOGE("Generated cloud is empty!");
					}
				}
				else
				{
					LOGE("Odom data images are empty!");
				}
			}
		}
	}

	UTimer fpsTime;
    lastDrawnCloudsCount_ = main_scene_.Render();
    renderingFPS_ = 1.0/fpsTime.elapsed();

    if(rtabmapEvents.size())
    {
    	// send statistics to GUI
		LOGI("Posting PostRenderEvent!");
		UEventsManager::post(new PostRenderEvent(rtabmapEvents.back()));
    }

    return notifyDataLoaded?1:0;
}
开发者ID:KamalDSOberoi,项目名称:rtabmap,代码行数:101,代码来源:RTABMapApp.cpp

示例5: newFrame


//.........这里部分代码省略.........
			for(std::multimap<int, cv::Point3f>::const_iterator iter=tmpRefFrame.getWords3().begin(); iter!=tmpRefFrame.getWords3().end(); ++iter)
			{
				info->localMap.insert(std::make_pair(iter->first, util3d::transformPoint(iter->second, t)));
			}
			info->words = newFrame.getWords();
		}
	}
	else
	{
		//return Identity
		output = Transform::getIdentity();
		// a very high variance tells that the new pose is not linked with the previous one
		regInfo.variance = 9999;
	}

	if(!output.isNull())
	{
		output = motionSinceLastKeyFrame.inverse() * output;

		// new key-frame?
		if( (registrationPipeline_->isImageRequired() && (keyFrameThr_ == 0 || float(regInfo.inliers) <= keyFrameThr_*float(refFrame_.sensorData().keypoints().size()))) ||
			(registrationPipeline_->isScanRequired() && (scanKeyFrameThr_ == 0 || regInfo.icpInliersRatio <= scanKeyFrameThr_)))
		{
			UDEBUG("Update key frame");
			int features = newFrame.getWordsDescriptors().size();
			if(features == 0)
			{
				newFrame = Signature(data);
				// this will generate features only for the first frame or if optical flow was used (no 3d words)
				Signature dummy;
				registrationPipeline_->computeTransformationMod(
						newFrame,
						dummy);
				features = (int)newFrame.sensorData().keypoints().size();
			}

			if((features >= registrationPipeline_->getMinVisualCorrespondences()) &&
			   (registrationPipeline_->getMinGeometryCorrespondencesRatio()==0.0f ||
					   (newFrame.sensorData().laserScanRaw().cols &&
					   (newFrame.sensorData().laserScanMaxPts() == 0 || float(newFrame.sensorData().laserScanRaw().cols)/float(newFrame.sensorData().laserScanMaxPts())>=registrationPipeline_->getMinGeometryCorrespondencesRatio()))))
			{
				refFrame_ = newFrame;

				refFrame_.setWords(std::multimap<int, cv::KeyPoint>());
				refFrame_.setWords3(std::multimap<int, cv::Point3f>());
				refFrame_.setWordsDescriptors(std::multimap<int, cv::Mat>());

				//reset motion
				lastKeyFramePose_.setNull();
			}
			else
			{
				if (!refFrame_.sensorData().isValid())
				{
					// Don't send odometry if we don't have a keyframe yet
					output.setNull();
				}

				if(features < registrationPipeline_->getMinVisualCorrespondences())
				{
					UWARN("Too low 2D features (%d), keeping last key frame...", features);
				}

				if(registrationPipeline_->getMinGeometryCorrespondencesRatio()>0.0f && newFrame.sensorData().laserScanRaw().cols==0)
				{
					UWARN("Too low scan points (%d), keeping last key frame...", newFrame.sensorData().laserScanRaw().cols);
				}
				else if(registrationPipeline_->getMinGeometryCorrespondencesRatio()>0.0f && newFrame.sensorData().laserScanMaxPts() != 0 && float(newFrame.sensorData().laserScanRaw().cols)/float(newFrame.sensorData().laserScanMaxPts())<registrationPipeline_->getMinGeometryCorrespondencesRatio())
				{
					UWARN("Too low scan points ratio (%d < %d), keeping last key frame...", float(newFrame.sensorData().laserScanRaw().cols)/float(newFrame.sensorData().laserScanMaxPts()), registrationPipeline_->getMinGeometryCorrespondencesRatio());
				}
			}
		}
	}
	else if(!regInfo.rejectedMsg.empty())
	{
		UWARN("Registration failed: \"%s\"", regInfo.rejectedMsg.c_str());
	}

	data.setFeatures(newFrame.sensorData().keypoints(), newFrame.sensorData().descriptors());

	if(info)
	{
		info->type = 1;
		info->variance = regInfo.variance;
		info->inliers = regInfo.inliers;
		info->icpInliersRatio = regInfo.icpInliersRatio;
		info->matches = regInfo.matches;
		info->features = newFrame.sensorData().keypoints().size();
	}

	UINFO("Odom update time = %fs lost=%s inliers=%d, ref frame corners=%d, transform accepted=%s",
			timer.elapsed(),
			output.isNull()?"true":"false",
			(int)regInfo.inliers,
			(int)newFrame.sensorData().keypoints().size(),
			!output.isNull()?"true":"false");

	return output;
}
开发者ID:Aleem21,项目名称:rtabmap,代码行数:101,代码来源:OdometryF2F.cpp

示例6: computeTransform


//.........这里部分代码省略.........
						_samples,
						data.localTransform());

		if(_pointToPlane)
		{
			pcl::PointCloud<pcl::PointNormal>::Ptr newCloud = util3d::computeNormals(newCloudXYZ);

			std::vector<int> indices;
			newCloud = util3d::removeNaNNormalsFromPointCloud<pcl::PointNormal>(newCloud);
			if(newCloudXYZ->size() != newCloud->size())
			{
				UWARN("removed nan normals...");
			}

			if(_previousCloudNormal->size() > minPoints && newCloud->size() > minPoints)
			{
				int correspondences = 0;
				Transform transform = util3d::icpPointToPlane(newCloud,
						_previousCloudNormal,
						_maxCorrespondenceDistance,
						_maxIterations,
						&hasConverged,
						&variance,
						&correspondences);

				// verify if there are enough correspondences
				float correspondencesRatio = float(correspondences)/float(_previousCloudNormal->size()>newCloud->size()?_previousCloudNormal->size():newCloud->size());

				if(!transform.isNull() && hasConverged &&
		   	   	   correspondencesRatio >= _correspondenceRatio)
				{
					output = transform;
					_previousCloudNormal = newCloud;
				}
				else
				{
					UWARN("Transform not valid (hasConverged=%s variance = %f)",
							hasConverged?"true":"false", variance);
				}
			}
			else if(newCloud->size() > minPoints)
			{
				output.setIdentity();
				_previousCloudNormal = newCloud;
			}
		}
		else
		{
			//point to point
			if(_previousCloud->size() > minPoints && newCloudXYZ->size() > minPoints)
			{
				int correspondences = 0;
				Transform transform = util3d::icp(newCloudXYZ,
						_previousCloud,
						_maxCorrespondenceDistance,
						_maxIterations,
						&hasConverged,
						&variance,
						&correspondences);

				// verify if there are enough correspondences
				float correspondencesRatio = float(correspondences)/float(_previousCloud->size()>newCloudXYZ->size()?_previousCloud->size():newCloudXYZ->size());

				if(!transform.isNull() && hasConverged &&
				   correspondencesRatio >= _correspondenceRatio)
				{
					output = transform;
					_previousCloud = newCloudXYZ;
				}
				else
				{
					UWARN("Transform not valid (hasConverged=%s variance = %f)",
							hasConverged?"true":"false", variance);
				}
			}
			else if(newCloudXYZ->size() > minPoints)
			{
				output.setIdentity();
				_previousCloud = newCloudXYZ;
			}
		}
	}
	else
	{
		UERROR("Depth is empty?!?");
	}

	if(info)
	{
		info->variance = variance;
	}

	UINFO("Odom update time = %fs hasConverged=%s variance=%f cloud=%d",
			timer.elapsed(),
			hasConverged?"true":"false",
			variance,
			(int)(_pointToPlane?_previousCloudNormal->size():_previousCloud->size()));

	return output;
}
开发者ID:redater,项目名称:PAM-BATR,代码行数:101,代码来源:OdometryICP.cpp


注:本文中的UTimer::elapsed方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。