本文整理汇总了C++中tf::Pose::inverse方法的典型用法代码示例。如果您正苦于以下问题:C++ Pose::inverse方法的具体用法?C++ Pose::inverse怎么用?C++ Pose::inverse使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tf::Pose
的用法示例。
在下文中一共展示了Pose::inverse方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: setOdomPoseCallback
bool OdometryRemap::setOdomPoseCallback(nao_remote::SetTransform::Request& req, nao_remote::SetTransform::Response& res){
ROS_INFO("New target for current odometry pose received");
tf::Transform targetPose;
tf::transformMsgToTF(req.offset, targetPose);
m_odomOffset = targetPose * m_odomPose.inverse();
return true;
}
示例2: headingDiff
geometry_msgs::Twist PoseFollower::diff2D(const tf::Pose& pose1, const tf::Pose& pose2)
{
geometry_msgs::Twist res;
tf::Pose diff = pose2.inverse() * pose1;
res.linear.x = diff.getOrigin().x();
res.linear.y = diff.getOrigin().y();
res.angular.z = tf::getYaw(diff.getRotation());
if(holonomic_ || (fabs(res.linear.x) <= tolerance_trans_ && fabs(res.linear.y) <= tolerance_trans_))
return res;
//in the case that we're not rotating to our goal position and we have a non-holonomic robot
//we'll need to command a rotational velocity that will help us reach our desired heading
//we want to compute a goal based on the heading difference between our pose and the target
double yaw_diff = headingDiff(pose1.getOrigin().x(), pose1.getOrigin().y(),
pose2.getOrigin().x(), pose2.getOrigin().y(), tf::getYaw(pose2.getRotation()));
//we'll also check if we can move more effectively backwards
if (allow_backwards_)
{
double neg_yaw_diff = headingDiff(pose1.getOrigin().x(), pose1.getOrigin().y(),
pose2.getOrigin().x(), pose2.getOrigin().y(), M_PI + tf::getYaw(pose2.getRotation()));
//check if its faster to just back up
if(fabs(neg_yaw_diff) < fabs(yaw_diff)){
ROS_DEBUG("Negative is better: %.2f", neg_yaw_diff);
yaw_diff = neg_yaw_diff;
}
}
//compute the desired quaterion
tf::Quaternion rot_diff = tf::createQuaternionFromYaw(yaw_diff);
tf::Quaternion rot = pose2.getRotation() * rot_diff;
tf::Pose new_pose = pose1;
new_pose.setRotation(rot);
diff = pose2.inverse() * new_pose;
res.linear.x = diff.getOrigin().x();
res.linear.y = diff.getOrigin().y();
res.angular.z = tf::getYaw(diff.getRotation());
return res;
}
示例3: setMotion
void MotionModel::setMotion(const tf::Pose& pnew, const tf::Pose& pold)
{
tf::Pose delta_pose;
tf::Transform odom_to_base(pold.inverse().getRotation());
delta_pose.setOrigin(odom_to_base * (pnew.getOrigin() - pold.getOrigin()));
delta_pose.setRotation(pnew.getRotation() * pold.getRotation().inverse());
delta_x = delta_pose.getOrigin().x();
delta_y = delta_pose.getOrigin().y();
delta_yaw = atan2(sin(tf::getYaw(delta_pose.getRotation())), cos(tf::getYaw(delta_pose.getRotation())));
dxy=sqrt(delta_x*delta_x+delta_y*delta_y);
}
示例4:
pair<Nodes, PointVec>
nodesOnGrid (const unsigned g, const Roadmap& r,
const tmap::TopologicalMap& tmap)
{
pair<Nodes, PointVec> res;
// Add all nodes defined wrt this grid
BOOST_FOREACH (const unsigned n, r.gridNodes(g))
{
res.first.push_back(n);
res.second.push_back(r.nodeInfo(n).position);
}
const double x_size = tmap.nodeInfo(g).x_size;
const double y_size = tmap.nodeInfo(g).y_size;
// Also look at neighboring grids in topological graph
BOOST_FOREACH (const tmap::GraphEdge e,
out_edges(tmap.node(g), tmap))
{
const bool flip = (tmap[e].dest == g);
const tf::Pose offset = gmu::toPose(tmap[e].offset);
const tf::Transform tr = flip ? offset.inverse() : offset;
const unsigned g2 = flip ? tmap[e].src : tmap[e].dest;
// Now tr maps from the neighbor grid g2 to g
BOOST_FOREACH (const unsigned n, r.gridNodes(g2))
{
ROS_ASSERT(g2==r.nodeInfo(n).grid);
const gm::Point& pos = r.nodeInfo(n).position;
const gm::Point local_pos = gmu::transformPoint(tr, pos);
// Now local_pos is the position of the waypoint in frame g
ROS_DEBUG_STREAM_NAMED ("nodes_on_grid", "Node " << n << " position on " << g2 << " is "
<< gmu::toString(pos) << " and on " << g <<
" is " << gmu::toString(local_pos));
if (fabs(local_pos.x) < x_size/2 && fabs(local_pos.y) < y_size/2)
{
ROS_DEBUG_NAMED ("nodes_on_grid", "On grid");
res.first.push_back(n);
res.second.push_back(local_pos);
}
}
}
return res;
}
示例5: positionOnGrid
gm::Point positionOnGrid (const unsigned n, const unsigned g, const Roadmap& r,
const tmap::TopologicalMap& tmap)
{
const msg::RoadmapNode& info = r.nodeInfo(n);
const unsigned g2 = info.grid;
if (g2==g)
{
return info.position;
}
else
{
const tmap::GraphVertex v = tmap.node(g);
const tmap::GraphVertex v2 = tmap.node(g2);
pair<tmap::GraphEdge, bool> res = edge(v, v2, tmap);
ROS_ASSERT_MSG (res.second, "No edge between grid %u and grid %u "
"containing %u", g, g2, n);
const bool flip = (tmap[res.first].dest == g);
const tf::Pose offset = gmu::toPose(tmap[res.first].offset);
const tf::Transform tr = flip ? offset.inverse() : offset;
return gmu::transformPoint(tr, info.position);
}
}
示例6: poseEstimationFunction
bool poseEstimationFunction(kuri_mbzirc_challenge_1_msgs::PES::Request &req,
kuri_mbzirc_challenge_1_msgs::PES::Response &res)
{
tf::Transform BaseToCamera;
double imageWidth = 1228 ;
double imagehight = 1027 ;
BaseToCamera.setOrigin(tf::Vector3(0.0, 0.0, -0.045));
//BaseToCamera.setRotation(tf::Quaternion(0.707, -0.707, 0.000, -0.000));
BaseToCamera.setRotation(tf::Quaternion(0.9999060498015511, 0, 0, -0.013707354604664749));
tf::Transform extrisic;
cv::Mat P_Mat_G(3, 4, CV_64FC1);
//cv::Mat P(3, 4, CV_64FC1);
tfScalar extrisic_data[4 * 4];
pcl::PointCloud<pcl::PointXYZRGB> Pointcloud;
Pointcloud.header.frame_id = "/world";
Pointcloud.height = imagehight;
Pointcloud.width = imageWidth;
Pointcloud.resize(imagehight * imageWidth);
Pointcloud.is_dense = true;
// cv::Mat cvimg = cv_bridge::toCvShare(img, "bgr8")->image.clone();
//tf::poseMsgToTF(odom->pose.pose, tfpose);
extrisic = BaseToCamera * tfpose.inverse();
//to test if the tf is correct, create testframe_to_camera
//br.sendTransform(tf::StampedTransform(extrisic, ros::Time::now(), "/testframe_to_camera", "/world"));
//pinv of projection matrix...
/*for (int i = 0; i < 3; i++)
for (int j = 0; j < 4; j++)
{
P.at<double>(i, j) = cam_info.P.at(i * 4 + j);
// std::cout << "PP" << P << std::endl ;
}*/
//however, this P is in camera coordinate..
extrisic.getOpenGLMatrix(extrisic_data);
cv::Mat E_MAT(4, 4, CV_64FC1, extrisic_data);
P_Mat_G = P * (E_MAT.t());
// now is the ground, namely, world coordinate
double a[4], b[4], c[4];
a[0] = P_Mat_G.at<double>(0, 0);
a[1] = P_Mat_G.at<double>(0, 1);
a[2] = P_Mat_G.at<double>(0, 2);
a[3] = P_Mat_G.at<double>(0, 3);
b[0] = P_Mat_G.at<double>(1, 0);
b[1] = P_Mat_G.at<double>(1, 1);
b[2] = P_Mat_G.at<double>(1, 2);
b[3] = P_Mat_G.at<double>(1, 3);
c[0] = P_Mat_G.at<double>(2, 0);
c[1] = P_Mat_G.at<double>(2, 1);
c[2] = P_Mat_G.at<double>(2, 2);
c[3] = P_Mat_G.at<double>(2, 3);
std::clock_t start;
//double duration;
start = std::clock();
// ************************** find 3D point ******************** //
// just for the detected point
float B[2][2], bvu[2];
B[0][0] = req.A * c[0] - a[0];
B[0][1] = req.A * c[1] - a[1];
B[1][0] = req.B * c[0] - b[0];
B[1][1] = req.B * c[1] - b[1];
bvu[0] = a[2] * Ground_Z + a[3] - req.A * c[2] * Ground_Z - req.A * c[3];
bvu[1] = b[2] * Ground_Z + b[3] - req.B * c[2] * Ground_Z - req.B * c[3];
float DomB = B[1][1] * B[0][0] - B[0][1] * B[1][0];
//res.obj.pose.pose.position.x = (B[1][1] * bvu[0] - B[0][1] * bvu[1]) / DomB ;
//res.obj.pose.pose.position.y = (B[0][0] * bvu[1] - B[1][0] * bvu[0]) / DomB ;
res.X = ((B[1][1] * bvu[0] - B[0][1] * bvu[1]) / DomB) / 10000000 ;
res.Y = ((B[0][0] * bvu[1] - B[1][0] * bvu[0]) / DomB ) / 10000000;
res.Z = 2 ;
//res.obj.pose.pose.position.z = 0 ;
//ROS_INFO("request: x=%ld, y=%ld", (long int)req.A, (long int)req.B);
//ROS_INFO("sending back response: [%ld]", (long int)res.obj.pose.pose.position.x);
//std::cout << "request " << req.A << " " << req.B << std::endl ;
//std::cout << "response" << res.X << " " << res.Y << std::endl ;
ROS_INFO("request: x=%f, y=%f", (float)req.A, (float)req.B);
ROS_INFO("sending back response: x=%f, y=%f", (float)res.X , (float)res.Y);
return true;
}
示例7: run
//.........这里部分代码省略.........
/******************************************************************
* Odometry
*****************************************************************/
if (!m_paused) {
// apply offset transformation:
tf::Pose transformedPose;
if (odomData.size()!=6)
{
ROS_ERROR( "Error getting odom data. length is %zu",odomData.size() );
continue;
}
double dt = (stamp.toSec() - m_lastOdomTime);
odomX = odomData[0];
odomY = odomData[1];
odomZ = odomData[2];
odomWX = odomData[3];
odomWY = odomData[4];
odomWZ = odomData[5];
tf::Quaternion q;
// roll and pitch from IMU, yaw from odometry:
if (m_useIMUAngles)
q.setRPY(angleX, angleY, odomWZ);
else
q.setRPY(odomWX, odomWY, odomWZ);
m_odomPose.setOrigin(tf::Vector3(odomX, odomY, odomZ));
m_odomPose.setRotation(q);
if(m_mustUpdateOffset) {
if(!m_isInitialized) {
if(m_initializeFromIMU) {
// Initialization from IMU: Take x, y, z, yaw from odometry, roll and pitch from IMU
m_targetPose.setOrigin(m_odomPose.getOrigin());
m_targetPose.setRotation(tf::createQuaternionFromRPY(angleX, angleY, odomWZ));
} else if(m_initializeFromOdometry) {
m_targetPose.setOrigin(m_odomPose.getOrigin());
m_targetPose.setRotation(tf::createQuaternionFromRPY(odomWX, odomWY, odomWZ));
}
m_isInitialized = true;
} else {
// Overwrite target pitch and roll angles with IMU data
const double target_yaw = tf::getYaw(m_targetPose.getRotation());
if(m_initializeFromIMU) {
m_targetPose.setRotation(tf::createQuaternionFromRPY(angleX, angleY, target_yaw));
} else if(m_initializeFromOdometry){
m_targetPose.setRotation(tf::createQuaternionFromRPY(odomWX, odomWY, target_yaw));
}
}
m_odomOffset = m_targetPose * m_odomPose.inverse();
transformedPose = m_targetPose;
m_mustUpdateOffset = false;
} else {
transformedPose = m_odomOffset * m_odomPose;
}
//
// publish the transform over tf first
//
m_odomTransformMsg.header.stamp = stamp;
tf::transformTFToMsg(transformedPose, m_odomTransformMsg.transform);
m_transformBroadcaster.sendTransform(m_odomTransformMsg);
//
// Fill the Odometry msg
//
m_odom.header.stamp = stamp;
//set the velocity first (old values still valid)
m_odom.twist.twist.linear.x = (odomX - m_odom.pose.pose.position.x) / dt;
m_odom.twist.twist.linear.y = (odomY - m_odom.pose.pose.position.y) / dt;
m_odom.twist.twist.linear.z = (odomZ - m_odom.pose.pose.position.z) / dt;
// TODO: calc angular velocity!
// m_odom.twist.twist.angular.z = vth; ??
//set the position from the above calculated transform
m_odom.pose.pose.orientation = m_odomTransformMsg.transform.rotation;
m_odom.pose.pose.position.x = m_odomTransformMsg.transform.translation.x;
m_odom.pose.pose.position.y = m_odomTransformMsg.transform.translation.y;
m_odom.pose.pose.position.z = m_odomTransformMsg.transform.translation.z;
m_odomPub.publish(m_odom);
m_lastOdomTime = stamp.toSec();
}
}
ROS_INFO("naoqi_sensors stopped.");
}
示例8: torsoOdomCallback
void OdometryRemap::torsoOdomCallback(const nao_msgs::TorsoOdometryConstPtr& msgOdom, const nao_msgs::TorsoIMUConstPtr& msgIMU){
if (m_paused){
ROS_DEBUG("Skipping odometry callback, paused");
return;
}
double odomTime = (msgOdom->header.stamp).toSec();
ROS_DEBUG("Received [%f %f %f %f (%f/%f) (%f/%f) %f]", odomTime, msgOdom->x, msgOdom->y, msgOdom->z, msgOdom->wx, msgIMU->angleX, msgOdom->wy, msgIMU->angleY, msgOdom->wz);
double dt = (odomTime - m_lastOdomTime);
tf::Quaternion q;
// roll and pitch from IMU, yaw from odometry:
if (m_useIMUAngles)
q.setRPY(msgIMU->angleX, msgIMU->angleY, msgOdom->wz);
else
q.setRPY(msgOdom->wx, msgOdom->wy, msgOdom->wz);
m_odomPose.setOrigin(tf::Vector3(msgOdom->x, msgOdom->y, msgOdom->z));
m_odomPose.setRotation(q);
// apply offset transformation:
tf::Pose transformedPose;
if(m_mustUpdateOffset) {
if(!m_isInitialized) {
if(m_initializeFromIMU) {
// Initialization from IMU: Take x, y, z, yaw from odometry, roll and pitch from IMU
m_targetPose.setOrigin(m_odomPose.getOrigin());
m_targetPose.setRotation(tf::createQuaternionFromRPY(msgIMU->angleX, msgIMU->angleY, msgOdom->wz));
} else if(m_initializeFromOdometry) {
m_targetPose.setOrigin(m_odomPose.getOrigin());
m_targetPose.setRotation(tf::createQuaternionFromRPY(msgOdom->wx, msgOdom->wy, msgOdom->wz));
}
m_isInitialized = true;
} else {
// Overwrite target pitch and roll angles with IMU data
const double target_yaw = tf::getYaw(m_targetPose.getRotation());
if(m_initializeFromIMU) {
m_targetPose.setRotation(tf::createQuaternionFromRPY(msgIMU->angleX, msgIMU->angleY, target_yaw));
} else if(m_initializeFromOdometry){
m_targetPose.setRotation(tf::createQuaternionFromRPY(msgOdom->wx, msgOdom->wy, target_yaw));
}
}
m_odomOffset = m_targetPose * m_odomPose.inverse();
transformedPose = m_targetPose;
m_mustUpdateOffset = false;
} else {
transformedPose = m_odomOffset * m_odomPose;
}
// publish the transform over tf first:
m_odomTransformMsg.header.stamp = msgOdom->header.stamp;
tf::transformTFToMsg(transformedPose, m_odomTransformMsg.transform);
m_transformBroadcaster.sendTransform(m_odomTransformMsg);
//next, publish the actual odometry message:
m_odom.header.stamp = msgOdom->header.stamp;
m_odom.header.frame_id = m_odomFrameId;
//set the velocity first (old values still valid)
m_odom.twist.twist.linear.x = (msgOdom->x - m_odom.pose.pose.position.x) / dt;
m_odom.twist.twist.linear.y = (msgOdom->y - m_odom.pose.pose.position.y) / dt;
m_odom.twist.twist.linear.z = (msgOdom->z - m_odom.pose.pose.position.z) / dt;
// TODO: calc angular velocity!
// m_odom.twist.twist.angular.z = vth; ??
//set the position from the above calculated transform
m_odom.pose.pose.orientation = m_odomTransformMsg.transform.rotation;
m_odom.pose.pose.position.x = m_odomTransformMsg.transform.translation.x;
m_odom.pose.pose.position.y = m_odomTransformMsg.transform.translation.y;
m_odom.pose.pose.position.z = m_odomTransformMsg.transform.translation.z;
//publish the message
m_odomPub.publish(m_odom);
m_lastOdomTime = odomTime;
// TODO: not used
// m_lastWx = msgOdom->wx;
// m_lastWy = msgOdom->wy;
// m_lastWz = msgOdom->wz;
}
示例9: fabs
bool
FootstepNavigation::getFootstep(const tf::Pose& from,
const State& from_planned,
const State& to,
humanoid_nav_msgs::StepTarget* footstep)
{
// get footstep to reach 'to' from 'from'
tf::Transform step = from.inverse() *
tf::Pose(tf::createQuaternionFromYaw(to.getTheta()),
tf::Point(to.getX(), to.getY(), 0.0));
// set the footstep
footstep->pose.x = step.getOrigin().x();
footstep->pose.y = step.getOrigin().y();
footstep->pose.theta = tf::getYaw(step.getRotation());
if (to.getLeg() == LEFT)
footstep->leg = humanoid_nav_msgs::StepTarget::left;
else // to.leg == RIGHT
footstep->leg = humanoid_nav_msgs::StepTarget::right;
/* check if the footstep can be performed by the NAO robot ******************/
// check if the step lies within the executable range
if (performable(*footstep))
{
return true;
}
else
{
// check if there is only a minor divergence between the current support
// foot and the foot placement used during the plannig task: in such a case
// perform the step that has been used during the planning
float step_diff_x = fabs(from.getOrigin().x() - from_planned.getX());
float step_diff_y = fabs(from.getOrigin().y() - from_planned.getY());
float step_diff_theta = fabs(
angles::shortest_angular_distance(
tf::getYaw(from.getRotation()), from_planned.getTheta()));
if (step_diff_x < ivAccuracyX && step_diff_y < ivAccuracyY &&
step_diff_theta < ivAccuracyTheta)
{
step = tf::Pose(tf::createQuaternionFromYaw(from_planned.getTheta()),
tf::Point(from_planned.getX(), from_planned.getY(), 0.0)
).inverse() *
tf::Pose(tf::createQuaternionFromYaw(to.getTheta()),
tf::Point(to.getX(), to.getY(), 0.0));
footstep->pose.x = step.getOrigin().x();
footstep->pose.y = step.getOrigin().y();
footstep->pose.theta = tf::getYaw(step.getRotation());
return true;
}
return false;
}
// // ALTERNATIVE: clip the footstep into the executable range; if nothing was
// // clipped: perform; if too much was clipped: do not perform
// humanoid_nav_msgs::ClipFootstep footstep_clip;
// footstep_clip.request.step = footstep;
// ivClipFootstepSrv.call(footstep_clip);
//
// if (performanceValid(footstep_clip))
// {
// footstep.pose.x = footstep_clip.response.step.pose.x;
// footstep.pose.y = footstep_clip.response.step.pose.y;
// footstep.pose.theta = footstep_clip.response.step.pose.theta;
// return true;
// }
// else
// {
// return false;
// }
}