本文整理汇总了C++中boost::circular_buffer::at方法的典型用法代码示例。如果您正苦于以下问题:C++ circular_buffer::at方法的具体用法?C++ circular_buffer::at怎么用?C++ circular_buffer::at使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类boost::circular_buffer
的用法示例。
在下文中一共展示了circular_buffer::at方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: find
ros::Time find(ros::Time sensor_time) {
boost::circular_buffer<ros::Time>::iterator it = sensor.begin();
for (int i = 0; it != sensor.end(); it++, i++) {
if (it->sec == sensor_time.sec && it->nsec == sensor_time.nsec) {
return execution.at(i); // find
}
}
ROS_ERROR("error:not found a pair");
ros::Time failed;
failed.sec = 0;
failed.nsec = 0;
return failed; // not find
}
示例2: pointsCallback
void pointsCallback(const sensor_msgs::PointCloud2ConstPtr& input)
{
if (count >= 0)
{
counter++;
pcl::PointCloud<pcl::PointXYZRGB> input_cloud;
pcl::fromROSMsg (*input, input_cloud);
pcl::PointCloud<pcl::PointXYZRGB>::Ptr input_cloud_ptr (new pcl::PointCloud<pcl::PointXYZRGB>);
*input_cloud_ptr = input_cloud;
count = 0;
if (firstTime == true)
{
//Create a standard map object
m->setVerbose(true); //Set the map to give text output
m->loadCalibrationWords(bow_path,"orb", 500); //set bag of words to orb 500 orb features from bow_path
m->setFeatureExtractor(new OrbExtractor()); //Use orb features
int max_points = 300; //Number of keypoints used by matcher
int nr_iter = 8; //Number of iterations the matcher will run
float shrinking = 0.7; //The rate of convergence for the matcher
float bow_threshold = 0.15; //Bag of words threshold to avoid investigating bad matches
float distance_threshold = 0.015; //Distance threshold to discard bad matches using euclidean information.
float feature_threshold = 0.15; //Feature threshold to discard bad matches using feature information.
m->setMatcher(new BowAICK(max_points, nr_iter,shrinking,bow_threshold,distance_threshold,feature_threshold));//Create a new matcher
firstTime = false;
vector< RGBDFrame * > frames;
m->addFrame(input_cloud_ptr);
}
else
{
printf("----------------------%i-------------------\nadding a new frame\n",counter);
//Add frame to map
m->addFrame(input_cloud_ptr);
nrMatches = m->numberOfMatchesInLastFrame();
int hej = m->numberOfFrames();
/*float time = (input->header.stamp - lastCloudTime).toSec();
xSpeed = (lastTransformationMatrix.front()(0,3) - transformationMatrix.front()(0,3))/time;
ySpeed = (lastTransformationMatrix.front()(1,3) - transformationMatrix.front()(1,3))/time;
zSpeed = (lastTransformationMatrix.front()(2,3) - transformationMatrix.front()(2,3))/time;
//cout << time << " HEJ" << endl;*/
if ((nrMatches < 40 and hej > 2)) //or xSpeed > 1.3 or ySpeed > 1.3 or zSpeed > 1.3)
{
cout << "BAD MATCH" << endl << endl << endl;
m->removeLastFrame();
badcount ++;
//cout << lastPoses.front().pose.position.x << endl;
pose.header.stamp = ros::Time::now();
pose.header.frame_id = "local_origin";
pose.pose.position.x = lastPoses.at(2).pose.position.x + (lastPoses.at(2).pose.position.x - lastPoses.at(1).pose.position.x);
pose.pose.position.y = lastPoses.at(2).pose.position.y + (lastPoses.at(2).pose.position.y - lastPoses.at(1).pose.position.y);
pose.pose.position.z = lastPoses.at(2).pose.position.z + (lastPoses.at(2).pose.position.z - lastPoses.at(1).pose.position.z);
pose.pose.orientation.x = lastPoses.at(2).pose.orientation.x; //lastLocalPose.pose.orientation.x; //q.x();
pose.pose.orientation.y = lastPoses.at(2).pose.orientation.y; //lastLocalPose.pose.orientation.y; //q.y();
pose.pose.orientation.z = lastPoses.at(2).pose.orientation.z; //lastLocalPose.pose.orientation.z; //q.z();
pose.pose.orientation.w = lastPoses.at(2).pose.orientation.w; //lastLocalPose.pose.orientation.w; //q.w();
//pub_Pose.publish(pose);
pub_Pose.publish(pose);
}
else
{
//transformationMatrix = m->estimateCurrentPose(lastTransformationMatrix);
transformationMatrix = m->estimateCurrentPose(lastTransformationMatrix);
//cout << transformationMatrix.front() << endl << endl;
lastTransformationMatrix = transformationMatrix;
transformationMatrix.front() = frameConversionMat*transformationMatrix.front();
//Convert rotation matrix to quaternion
tf::Matrix3x3 rotationMatrix;
rotationMatrix.setValue(transformationMatrix.front()(0,0), transformationMatrix.front()(0,1),transformationMatrix.front()(0,2),
transformationMatrix.front()(1,0), transformationMatrix.front()(1,1),transformationMatrix.front()(1,2),
transformationMatrix.front()(2,0), transformationMatrix.front()(2,1),transformationMatrix.front()(2,2) );
tf::Quaternion q;
rotationMatrix.getRotation(q);
//tf::Transform transform;
//transform.setOrigin(tf::Vector3(transformationMatrix.front()(0,3), transformationMatrix.front()(1,3), transformationMatrix.front()(2,3)));
//publish pose
//geometry_msgs::PoseStamped pose;
pose.header.stamp = input->header.stamp;
pose.header.frame_id = "local_origin";
pose.pose.position.x = transformationMatrix.front()(0,3);
pose.pose.position.y = transformationMatrix.front()(1,3);
pose.pose.position.z = transformationMatrix.front()(2,3);
pose.pose.orientation.x = q.x(); //
pose.pose.orientation.y = q.y(); //
pose.pose.orientation.z = q.z(); //
pose.pose.orientation.w = q.w(); //
pub_Pose.publish(pose);
}
//pub_transform.sendTransform(tf::StampedTransform(transform, now, "map", "robot"));
//ros::Time now(0);
/*while (!tfl.waitForTransform("local_origin", "camera_link", now, ros::Duration(1)))
ROS_ERROR("Couldn't find transform from 'camera_link' to 'local_origin', retrying...");
geometry_msgs::PoseStamped local_origin_pose;
//.........这里部分代码省略.........