本文整理汇总了C++中tf::TransformListener::transformPose方法的典型用法代码示例。如果您正苦于以下问题:C++ TransformListener::transformPose方法的具体用法?C++ TransformListener::transformPose怎么用?C++ TransformListener::transformPose使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tf::TransformListener
的用法示例。
在下文中一共展示了TransformListener::transformPose方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: getLatestIdentityTransform
bool planning_environment::getLatestIdentityTransform(const std::string& to_frame,
const std::string& from_frame,
tf::TransformListener& tf,
tf::Transform& pose)
{
geometry_msgs::PoseStamped temp_pose;
temp_pose.pose.orientation.w = 1.0;
temp_pose.header.frame_id = from_frame;
geometry_msgs::PoseStamped trans_pose;
ros::Time tm;
std::string err_string;
if (tf.getLatestCommonTime(to_frame, temp_pose.header.frame_id, tm, &err_string) == tf::NO_ERROR) {
temp_pose.header.stamp = tm;
try {
tf.transformPose(to_frame, temp_pose, trans_pose);
} catch(tf::TransformException& ex) {
ROS_ERROR_STREAM("Unable to transform object from frame " << temp_pose.header.frame_id << " to " << to_frame << " error is " << ex.what());
return false;
}
} else {
ROS_ERROR_STREAM("No latest time for transforming " << from_frame << " to " << to_frame);
return false;
}
tf::poseMsgToTF(trans_pose.pose, pose);
return true;
}
示例2: commandForce
void commandForce(const geometry_msgs::WrenchStamped::ConstPtr &msg)
{
F_des_ << msg->wrench.force.x, msg->wrench.force.y, msg->wrench.force.z,
msg->wrench.torque.x, msg->wrench.torque.y, msg->wrench.torque.z;
if(msg->header.frame_id == root_name_)
return;
geometry_msgs::PoseStamped in_root;
in_root.pose.orientation.w = 1.0;
in_root.header.frame_id = msg->header.frame_id;
try {
tf_.waitForTransform(root_name_, msg->header.frame_id, msg->header.stamp, ros::Duration(0.1));
tf_.transformPose(root_name_, in_root, in_root);
}
catch (const tf::TransformException &ex)
{
ROS_ERROR("Failed to transform: %s", ex.what());
return;
}
Eigen::Affine3d t;
tf::poseMsgToEigen(in_root.pose, t);
F_des_.head<3>() = t.linear() * F_des_.head<3>();
F_des_.tail<3>() = t.linear() * F_des_.tail<3>();
}
示例3: pose_cb
void PathTracer::pose_cb(const geometry_msgs::PoseStampedConstPtr& msg)
{
std::string current_frame = msg->header.frame_id;
geometry_msgs::PoseStamped p;
if(current_frame.compare(path_frame) == 0)
{
p.header.frame_id = msg->header.frame_id;
p.header.stamp = msg->header.stamp;
p.pose.position.x = msg->pose.position.x;
p.pose.position.y = msg->pose.position.y;
p.pose.position.z = msg->pose.position.z;
p.pose.orientation.x = msg->pose.orientation.x;
p.pose.orientation.y = msg->pose.orientation.y;
p.pose.orientation.z = msg->pose.orientation.z;
p.pose.orientation.w = msg->pose.orientation.w;
path.poses.push_back(p);
}
else
{
try
{
tf_listener.transformPose(current_frame,*msg,p);
path.poses.push_back(p);
}
catch( tf::TransformException ex)
{
ROS_ERROR("transfrom exception : %s",ex.what());
}
}
path_pub.publish(path);
}
示例4: updateAttachedObjectBodyPoses
void planning_environment::updateAttachedObjectBodyPoses(planning_environment::CollisionModels* cm,
planning_models::KinematicState& state,
tf::TransformListener& tf)
{
cm->bodiesLock();
const std::map<std::string, std::map<std::string, bodies::BodyVector*> >& link_att_objects = cm->getLinkAttachedObjects();
if(link_att_objects.empty()) {
cm->bodiesUnlock();
return;
}
//this gets all the attached bodies in their correct current positions according to tf
geometry_msgs::PoseStamped ps;
ps.pose.orientation.w = 1.0;
for(std::map<std::string, std::map<std::string, bodies::BodyVector*> >::const_iterator it = link_att_objects.begin();
it != link_att_objects.end();
it++) {
ps.header.frame_id = it->first;
std::string es;
if (tf.getLatestCommonTime(cm->getWorldFrameId(), it->first, ps.header.stamp, &es) != tf::NO_ERROR) {
ROS_INFO_STREAM("Problem transforming into fixed frame from " << it->first << ". Error string " << es);
continue;
}
geometry_msgs::PoseStamped psout;
tf.transformPose(cm->getWorldFrameId(), ps, psout);
tf::Transform link_trans;
tf::poseMsgToTF(psout.pose, link_trans);
state.updateKinematicStateWithLinkAt(it->first, link_trans);
cm->updateAttachedBodyPosesForLink(state, it->first);
}
cm->bodiesUnlock();
}
示例5:
//Baxter-robot specific:
//IK is written for tool flange--called right_hand--w/rt torso frame
//need to convert gripper request, generic name "generic_gripper_frame", expressed w/rt some frame (e.g. system_ref_frame)
// into pose of flange w/rt torso frame; return this result as an Eigen::Affine3d (though frame id's are lost)
Eigen::Affine3d ArmMotionInterface::xform_gripper_pose_to_affine_flange_wrt_torso(geometry_msgs::PoseStamped des_pose_gripper) {
Eigen::Affine3d affine_flange_wrt_torso;
tf::StampedTransform flange_stf, flange_wrt_torso_stf;
geometry_msgs::PoseStamped flange_gmps, flange_wrt_torso_gmps;
//convert des gripper pose to a stamped transform, so we can do more transforms
ROS_WARN("xform_gripper_pose_to_affine_flange_wrt_torso: input pose-stamped: ");
xformUtils.printStampedPose(des_pose_gripper);
tf::StampedTransform gripper_stf = xformUtils.convert_poseStamped_to_stampedTransform(des_pose_gripper, "generic_gripper_frame");
//convert to transform of corresponding tool flange w/rt whatever reference frame_id
ROS_INFO("gripper_stf: ");
xformUtils.printStampedTf(gripper_stf);
ROS_INFO("flange_stf");
xformUtils.printStampedTf(flange_stf);
bool mult_ok = xformUtils.multiply_stamped_tfs(gripper_stf,generic_toolflange_frame_wrt_gripper_frame_stf_,flange_stf);
if (!mult_ok) { ROS_WARN("stf multiply not legal! "); } //should not happen
//ROS_INFO("corresponding flange frame: ");
//xformUtils.printStampedTf(flange_stf);
//convert stamped ‘tf::StampedTransform to geometry_msgs::PoseStamped
flange_gmps = xformUtils.get_pose_from_stamped_tf(flange_stf);
//change the reference frame from whatever to "torso":
tfListener_->transformPose("torso", flange_gmps, flange_wrt_torso_gmps);
ROS_INFO("corresponding flange frame w/rt torso frame: ");
xformUtils.printStampedPose(flange_wrt_torso_gmps);
//convert this to an affine. parent and child frame id's are lost, so we'll have to remember what this means
affine_flange_wrt_torso = xformUtils.transformPoseToEigenAffine3d(flange_wrt_torso_gmps);
return affine_flange_wrt_torso;
}
示例6: publish_all
void TransformPose::publish_all(tf::TransformListener& listener)
{
geometry_msgs::PoseStamped odom_pose;
odom_pose.header.frame_id = "/base_link";
//we'll just use the most recent transform available for our simple example
odom_pose.header.stamp = ros::Time();
//just an arbitrary point in space
odom_pose.pose.position.x = 0.0;
odom_pose.pose.position.y = 0.0;
odom_pose.pose.position.z = 0.0;
odom_pose.pose.orientation.x = 0.0;
odom_pose.pose.orientation.y = 0.0;
odom_pose.pose.orientation.z = 0.0;
odom_pose.pose.orientation.w = 1.0;
try{
ros::Time now = ros::Time::now();
listener.waitForTransform("/odom", "/base_link", now, ros::Duration(5.0));
geometry_msgs::PoseStamped base_pose;
listener.transformPose("/odom", odom_pose, base_pose);
my_odom.header.stamp = base_pose.header.stamp;
my_odom.pose.pose.position.x = base_pose.pose.position.x;
my_odom.pose.pose.position.y = base_pose.pose.position.y;
my_odom.pose.pose.position.z = base_pose.pose.position.z;
my_odom.pose.pose.orientation = base_pose.pose.orientation;
//my_maximus_odom.twist.twist.linear.x = sqrt(pow(base_pose.pose.position.x - old_x, 2) + pow(base_pose.pose.position.y - old_y, 2)) / (my_maximus_odom.header.stamp.toSec() - old_time.toSec());
my_odom.twist.twist.linear.x = xspeed.data;
my_odom.twist.twist.linear.y = 0;
my_odom.twist.twist.linear.z = 0;
my_odom.twist.twist.angular.x = 0;
my_odom.twist.twist.angular.y = 0;
my_odom.twist.twist.angular.z = tspeed.data;
//my_maximus_odom.twist.twist.angular.z = 1.1 * (tf::getYaw (my_maximus_odom.pose.pose.orientation) - old_theta) / (my_maximus_odom.header.stamp.toSec() - old_time.toSec());
odom_pub.publish(my_odom);
//old_x = base_pose.pose.position.x;
//old_y = base_pose.pose.position.y;
//old_theta = tf::getYaw (my_odom.pose.pose.orientation);
//old_time = my_odom.header.stamp;
/*
ROS_INFO("base_laser: (%.2f, %.2f. %.2f) -----> base_link: (%.2f, %.2f, %.2f) at time %.2f",
my_maximus_odom.pose.pose.position.x, my_maximus_odom.pose.pose.position.y, my_maximus_odom.pose.pose.position.z,
my_maximus_odom.twist.twist.linear.x, my_maximus_odom.twist.twist.linear.y, my_maximus_odom.twist.twist.angular.z, base_pose.header.stamp.toSec());
*/
}
catch(tf::TransformException& ex){
ROS_ERROR("Received an exception trying to transform a point from \"odom\" to \"base_link\": %s", ex.what());
}
}
示例7: findPeople
void findPeople(const tf::TransformListener& tf_listener) {
// initialize message
hide_n_seek::PeopleFinder people_msg;
people_msg.header.stamp = ros::Time();
people_msg.header.frame_id = "base_link";
people_msg.sensor_range = sensor_range;
people_msg.sensor_fov = sensor_fov;
bool foundPerson = false;
try {
fakePersonPoseMap.header.stamp = ros::Time(); // trust me, it is still there
fakePersonPoseMap.header.frame_id = "/map";
// transform
geometry_msgs::PoseStamped fakePersonPoseBase;
tf_listener.transformPose ("base_link", fakePersonPoseMap, fakePersonPoseBase);
//ROS_INFO_STREAM( " fake person = "
// << fakePersonPoseBase.pose.position.x
// << ", " << fakePersonPoseBase.pose.position.y
// << ", " << fakePersonPoseBase.pose.position.z);
// search for people
float range = sqrt(pow(fakePersonPoseBase.pose.position.x,2) + pow(fakePersonPoseBase.pose.position.y,2));
if(fakePersonPoseBase.pose.position.x > 0 && range <= sensor_range) {
if(fakePersonPoseBase.pose.position.y == 0) foundPerson = true;
else {
float bearing = PI/2 - atan(fakePersonPoseBase.pose.position.x/fabs(fakePersonPoseBase.pose.position.y));
if(bearing < sensor_fov/2) foundPerson = true;
//ROS_INFO_STREAM(bearing);
}
}
// make sensor reading noisy
int randNum = rand() % 100;
if(foundPerson && randNum > percent_true_detected) foundPerson = false;
else if(!foundPerson && randNum <= percent_true_undetected) {
foundPerson = true;
fakePersonPoseBase.pose.position.x = sensor_range / 2 * cos(tan(sensor_fov / 4));
fakePersonPoseBase.pose.position.y = sensor_range / 2 * sin(tan(sensor_fov / 4));
}
if(foundPerson) people_msg.people.push_back(fakePersonPoseBase.pose); // this is more realistic (Kinect node would give relative position)
}
catch (tf::TransformException ex) {
ROS_ERROR("%s",ex.what());
}
// publish current sensor shadow
pub_sensor_shadow.publish(sensor_shadow);
// publish any people we found
pub_people_finder.publish(people_msg);
ROS_INFO_STREAM("found " << people_msg.people.size() << " people");
}
示例8: getPositionFK
bool ExcavaROBArmKinematics::getPositionFK(excavaROB_arm_kinematics_msgs::GetPositionFK::Request &request,
excavaROB_arm_kinematics_msgs::GetPositionFK::Response &response) {
KDL::Frame p_out;
KDL::JntArray jnt_pos_in;
geometry_msgs::PoseStamped pose;
tf::Stamped<tf::Pose> tf_pose;
if (num_joints_arm_ != request.fk_link_names.size()) {
ROS_ERROR("The request has not the same dimension of the arm_chain joints.");
return false;
}
jnt_pos_in.resize(num_joints_arm_);
for (unsigned int i = 0; i < num_joints_arm_; i++) {
int jnt_index = getJointIndex(request.joint_state.name[i]);
if (jnt_index >= 0)
jnt_pos_in(jnt_index) = request.joint_state.position[i];
}
response.pose_stamped.resize(num_joints_arm_);
response.fk_link_names.resize(num_joints_arm_);
bool valid = true;
for (unsigned int i = 0; i < num_joints_arm_; i++) {
int segmentIndex = getKDLSegmentIndex(request.fk_link_names[i]);
ROS_DEBUG("End effector index: %d", segmentIndex);
ROS_DEBUG("Chain indices: %d", arm_chain_.getNrOfSegments());
if (fk_solver_->JntToCart(jnt_pos_in, p_out, segmentIndex) >= 0) {
tf_pose.frame_id_ = root_name_;
tf_pose.stamp_ = ros::Time();
tf::PoseKDLToTF(p_out, tf_pose);
try {
tf_listener_.transformPose(request.header.frame_id, tf_pose, tf_pose);
} catch (...) {
ROS_ERROR("ExcavaROB Kinematics: Could not transform FK pose to frame: %s", request.header.frame_id.c_str());
response.error_code.val = response.error_code.FRAME_TRANSFORM_FAILURE;
return false;
}
tf::poseStampedTFToMsg(tf_pose, pose);
response.pose_stamped[i] = pose;
response.fk_link_names[i] = request.fk_link_names[i];
response.error_code.val = response.error_code.SUCCESS;
} else {
ROS_ERROR("ExcavaROB Kinematics: Could not compute FK for %s", request.fk_link_names[i].c_str());
response.error_code.val = response.error_code.NO_FK_SOLUTION;
valid = false;
}
}
return true;
}
示例9: getPositionIK
bool Kinematics::getPositionIK(moveit_msgs::GetPositionIK::Request &request,
moveit_msgs::GetPositionIK::Response &response) {
geometry_msgs::PoseStamped pose_msg_in = request.ik_request.pose_stamped;
tf::Stamped<tf::Pose> transform;
tf::Stamped<tf::Pose> transform_root;
tf::poseStampedMsgToTF( pose_msg_in, transform );
//Do the IK
KDL::JntArray jnt_pos_in;
KDL::JntArray jnt_pos_out;
jnt_pos_in.resize(num_joints);
for (unsigned int i=0; i < num_joints; i++) {
// nalt: mod for moveit msgs
int tmp_index = getJointIndex(request.ik_request.robot_state.joint_state.name[i]);
if (tmp_index >=0) {
jnt_pos_in(tmp_index) = request.ik_request.robot_state.joint_state.position[i];
} else {
ROS_ERROR("i: %d, No joint index for %s",i,request.ik_request.robot_state.joint_state.name[i].c_str());
}
}
//Convert F to our root_frame
try {
tf_listener.transformPose(root_name, transform, transform_root);
} catch (...) {
ROS_ERROR("Could not transform IK pose to frame: %s", root_name.c_str());
response.error_code.val = response.error_code.FRAME_TRANSFORM_FAILURE;
return false;
}
KDL::Frame F_dest;
tf::TransformTFToKDL(transform_root, F_dest);
int ik_valid = ik_solver_pos->CartToJnt(jnt_pos_in, F_dest, jnt_pos_out);
if (ik_valid >= 0) {
response.solution.joint_state.name = info.joint_names;
response.solution.joint_state.position.resize(num_joints);
for (unsigned int i=0; i < num_joints; i++) {
response.solution.joint_state.position[i] = jnt_pos_out(i);
ROS_DEBUG("IK Solution: %s %d: %f",response.solution.joint_state.name[i].c_str(),i,jnt_pos_out(i));
}
response.error_code.val = response.error_code.SUCCESS;
return true;
} else {
ROS_DEBUG("An IK solution could not be found");
response.error_code.val = response.error_code.NO_IK_SOLUTION;
return true;
}
}
示例10: obj_real
void obj_real(const object_recognition_msgs::RecognizedObjectArray::ConstPtr& msg)
{
try
{
int obj_id = 0;
object_recognition_msgs::RecognizedObjectArray::_objects_type::const_iterator it;
for (it = msg->objects.begin(); it != msg->objects.end(); ++it)
{
msg_obj_cam_.header = msg->header;
msg_obj_cam_.header.stamp = ros::Time(0);
msg_obj_cam_.pose = it->pose.pose.pose;
listener_.transformPose(target_frame_, msg_obj_cam_, msg_obj_pose);
moveit_msgs::CollisionObject msg_obj_collision;
msg_obj_collision.header = msg->header;
msg_obj_collision.header.frame_id = target_frame_;
object_recognition_msgs::GetObjectInformation obj_info;
obj_info.request.type = it->type;
if (getMeshFromDB(obj_info))
{
msg_obj_collision.meshes.push_back(obj_info.response.information.ground_truth_mesh);
msg_obj_collision.mesh_poses.push_back(msg_obj_pose.pose);
}
else
{
msg_obj_collision.primitive_poses.push_back(msg_obj_pose.pose);
msg_obj_collision.primitives.push_back(msg_cylinder_);
}
msg_obj_collision.operation = moveit_msgs::CollisionObject::ADD;
msg_obj_collision.type = it->type;
std::stringstream ss;
ss << obj_id << "_" << it->type.key;
msg_obj_collision.id = ss.str();
++obj_id;
object_moveit_pub_.publish(msg_obj_collision);
}
}
catch (tf::TransformException ex)
{
ROS_ERROR("%s",ex.what());
ros::Duration(1.0).sleep();
}
}
示例11: computeError
geometry_msgs::Pose2D computeError(const ros::Time & now, const occgrid_planner::TrajectoryElement & te) {
tf::StampedTransform transform;
listener_.waitForTransform(base_frame_,frame_id_,now,ros::Duration(1.0));
geometry_msgs::PoseStamped pose,error;
pose.header.stamp = now;
pose.header.frame_id = frame_id_;
pose.pose = te.pose;
listener_.transformPose(base_frame_,pose,error);
geometry_msgs::Pose2D result;
result.x = error.pose.position.x;
result.y = error.pose.position.y;
result.theta = tf::getYaw(error.pose.orientation);
// printf("Current error: %+6.2f %+6.2f %+6.2f\n",result.x,result.y,result.theta*180./M_PI);
return result;
}
示例12: commandPose
void commandPose(const geometry_msgs::PoseStamped::ConstPtr &command)
{
geometry_msgs::PoseStamped in_root;
try {
tf_.waitForTransform(root_name_, command->header.frame_id, command->header.stamp, ros::Duration(0.1));
tf_.transformPose(root_name_, *command, in_root);
}
catch (const tf::TransformException &ex)
{
ROS_ERROR("Failed to transform: %s", ex.what());
return;
}
tf::poseMsgToEigen(in_root.pose, x_desi_);
}
示例13: msgCallback
// Callback to register with tf::MessageFilter to be called when transforms are available
void msgCallback(const boost::shared_ptr<const geometry_msgs::PoseStamped>& point_ptr)
{
geometry_msgs::PoseStamped point_out;
try
{
std::string str="odom";
tf_.transformPose(str, *point_ptr, point_out);
ROS_INFO("(x:%f y:%f z:%f)\n",
point_out.pose.position.x,
point_out.pose.position.y,
point_out.pose.position.z);
}
catch (tf::TransformException &ex)
{
ROS_ERROR("Failure %s\n", ex.what()); //Print exception which was caught
}
};
示例14: poseCallback
void poseCallback(const geometry_msgs::PoseStamped::ConstPtr& worldPose) {
geometry_msgs::PoseStamped worldGoal;
worldGoal.header = worldPose->header;
worldGoal.pose = worldPose->pose;
geometry_msgs::PoseStamped baseGoal;
static tf::TransformListener ls;
try{
ls.transformPose("base_link", worldGoal, baseGoal);
pubpose.publish(baseGoal);
ROS_INFO("Pose Data Received: x=%f, y=%f, Transformed: x=%f, y=%f",
worldPose->pose.position.x, worldPose->pose.position.y,
baseGoal.pose.position.x, baseGoal.pose.position.y);
}
catch (tf::TransformException ex){
ROS_ERROR("%s",ex.what());
// ros::Duration(1.0).sleep();
}
}
示例15: getPoseInRobotFrame
void getPoseInRobotFrame(std::vector<SM_COND> nextRobotCond_w)
{
//transforming position into robot frame
geometry_msgs::PoseStamped goal;
geometry_msgs::PoseStamped yawMapRobot;
geometry_msgs::PoseStamped goalRobotFrame;
// position
goal.header.stamp= ros::Time(0);
goal.header.frame_id= "map";
goal.pose.position.x= nextRobotCond_w[0].x;
goal.pose.position.y= nextRobotCond_w[1].x;
goal.pose.position.z= 0.0;
tf::Quaternion q = tf::createQuaternionFromRPY(0.0,0.0,nextRobotCond_w[5].x);
tf::quaternionTFToMsg(q,goal.pose.orientation);
_listener.waitForTransform("base_footprint", "map",
ros::Time(0), ros::Duration(1.0));
_listener.transformPose("base_footprint", goal, goalRobotFrame);
_nextRobotCond_r[0].x = goalRobotFrame.pose.position.x;
_nextRobotCond_r[1].x = goalRobotFrame.pose.position.y;
_nextRobotCond_r[5].x = tf::getYaw(goalRobotFrame.pose.orientation);
// velocity and acceleration
//record the starting transform from the odometry to the base frame
_listener.lookupTransform("map", "base_footprint",
ros::Time(0), world_transform);
double roll, pitch, yaw;
q = world_transform.getRotation();
btMatrix3x3(q).getRPY(roll, pitch, yaw);
_nextRobotCond_r[0].v = cos(yaw)*nextRobotCond_w[0].v + sin(yaw)*nextRobotCond_w[1].v;
_nextRobotCond_r[1].v = -sin(yaw)*nextRobotCond_w[0].v + cos(yaw)*nextRobotCond_w[1].v;
_nextRobotCond_r[5].v = nextRobotCond_w[5].v;
_nextRobotCond_r[0].a = cos(yaw)*nextRobotCond_w[0].a + sin(yaw)*nextRobotCond_w[1].a;
_nextRobotCond_r[1].a = -sin(yaw)*nextRobotCond_w[0].a + cos(yaw)*nextRobotCond_w[1].a;
_nextRobotCond_r[5].a = nextRobotCond_w[5].a;
}