本文整理汇总了C++中tf::TransformListener::lookupTransform方法的典型用法代码示例。如果您正苦于以下问题:C++ TransformListener::lookupTransform方法的具体用法?C++ TransformListener::lookupTransform怎么用?C++ TransformListener::lookupTransform使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tf::TransformListener
的用法示例。
在下文中一共展示了TransformListener::lookupTransform方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: lookForSpecificBodyTransform
std::string lookForSpecificBodyTransform(tf::TransformListener& listener, std::string frame_id, std::string body_to_track_frame, tf::StampedTransform& transform)
{
//Cerco il torso dell'utente o quello stimato dal filtro di Kalman
try
{
if(skeleton_to_track == -1) //Kalman
{
listener.lookupTransform(frame_id, "torso_k", ros::Time(0), transform);
}
else //Utente
{
listener.lookupTransform(frame_id, body_to_track_frame, ros::Time(0), transform);
}
if(transform.stamp_ != last_stamp[body_to_track_frame]) //Controllo se ho una nuova rilevazione o ho trovato nuovamente l'ultima
{
last_stamp[body_to_track_frame] = transform.stamp_;
return "found";
}
else //Rilevazione uguale all'ultima, sto andando più veloce dello skeleton tracker, salto un fotogramma
{
return "skip";
}
}
catch(tf::TransformException &ex) //Torso non trovato
{
return "not found";
}
}
示例2: lookForSpecificBodyTransform
std::string lookForSpecificBodyTransform(tf::TransformListener& listener, std::string frame_id, std::string body_to_track_frame, tf::StampedTransform& transform)
{
try
{
if(skeleton_to_track == -1)
{
listener.lookupTransform(frame_id, "torso_k", ros::Time(0), transform);
}
else
{
listener.lookupTransform(frame_id, body_to_track_frame, ros::Time(0), transform);
}
if(transform.stamp_ != last_stamp[body_to_track_frame])
{
last_stamp[body_to_track_frame] = transform.stamp_;
return "found";
}
else
{
return "skip";
}
}
catch(tf::TransformException &ex)
{
return "not found";
}
}
示例3: jsCallback
void BaseFootprint::jsCallback(const sensor_msgs::JointState::ConstPtr & ptr)
{
ros::Time time = ptr->header.stamp;
tf::StampedTransform tf_odom_to_base, tf_odom_to_left_foot, tf_odom_to_right_foot;
ROS_DEBUG("JointState callback function, computing frame %s", m_baseFootPrintID.c_str());
try {
m_listener.lookupTransform(m_odomFrameId, m_lfootFrameId, time, tf_odom_to_left_foot);
m_listener.lookupTransform(m_odomFrameId, m_rfootFrameId, time, tf_odom_to_right_foot);
m_listener.lookupTransform(m_odomFrameId, m_baseFrameId, time, tf_odom_to_base);
} catch (const tf::TransformException& ex){
ROS_ERROR("%s",ex.what());
return ;
}
tf::Vector3 new_origin = (tf_odom_to_right_foot.getOrigin() + tf_odom_to_left_foot.getOrigin())/2.0; // middle of both feet
double height = std::min(tf_odom_to_left_foot.getOrigin().getZ(), tf_odom_to_right_foot.getOrigin().getZ()); // fix to lowest foot
new_origin.setZ(height);
// adjust yaw according to torso orientation, all other angles 0 (= in z-plane)
double roll, pitch, yaw;
tf_odom_to_base.getBasis().getRPY(roll, pitch, yaw);
tf::Transform tf_odom_to_footprint(tf::createQuaternionFromYaw(yaw), new_origin);
tf::Transform tf_base_to_footprint = tf_odom_to_base.inverse() * tf_odom_to_footprint;
// publish transform with parent m_baseFrameId and new child m_baseFootPrintID
// i.e. transform from m_baseFrameId to m_baseFootPrintID
m_brBaseFootPrint.sendTransform(tf::StampedTransform(tf_base_to_footprint, time, m_baseFrameId, m_baseFootPrintID));
ROS_DEBUG("Published Transform %s --> %s", m_baseFrameId.c_str(), m_baseFootPrintID.c_str());
return;
}
示例4: transfromRobotToMapPosition
void Explore::transfromRobotToMapPosition(float x_robot, float y_robot, float &x_map, float &y_map){
ros::Time now = ros::Time::now();
tf::StampedTransform tfRO;
tf_listener_.waitForTransform("/base_link", "/odom", now, ros::Duration(1.0));
tf_listener_.lookupTransform ("/base_link", "/odom", now, tfRO);
tf::Transform tfOR = tfRO.inverse();
tf::StampedTransform tfOM;
tf_listener_.waitForTransform("/odom", "/map", now, ros::Duration(1.0));
tf_listener_.lookupTransform ("/odom", "/map", now, tfOM);
tf::Transform tfMO = tfOM.inverse();
tf::Transform tfRM = tfMO*tfOR;
float noused = 0.0;
tf::Transform tfRD;
tfRD.setOrigin(tf::Vector3(x_robot, y_robot, noused));
tf::Transform tfOD = tfOR * tfRD;
tf::Transform tfMD = tfMO * tfOD;
x_map = tfMD.getOrigin ()[0]; // wspolrzedne docelowe w ukladzie mapy
y_map = tfMD.getOrigin ()[1]; // wspolrzedne docelowe w ukladzie mapy
}
示例5: DepthSensorCallback
void DepthSensorCallback(const sensor_msgs::PointCloud2ConstPtr msg)
{
// Get the current world space tool position
m_MsgHeaderStamp = msg->header.stamp;
m_Listener.waitForTransform("/world", "VSV/Tool",
m_MsgHeaderStamp, ros::Duration(1.0));
tf::StampedTransform transform;
m_Listener.lookupTransform("/world", "VSV/Tool",
m_MsgHeaderStamp, transform);
m_WorldSpaceToolPosition = transform * tf::Vector3(0.0, 0.0, 0.0);
// Get the current world space robot position
m_Listener.waitForTransform("/world", "VSV/base",
m_MsgHeaderStamp, ros::Duration(1.0));
m_Listener.lookupTransform("/world", "VSV/base",
m_MsgHeaderStamp, transform);
m_WorldSpaceRobotPosition = transform * tf::Vector3(0.0, 0.0, 0.0);
//ROS_INFO("Base position %f %f %f",
// m_WorldSpaceRobotPosition.x(), m_WorldSpaceRobotPosition.y(), m_WorldSpaceRobotPosition.z());
// Get the orientation of the robot
auto Q = transform.getRotation();
tf::Matrix3x3 M;
M.setRotation(Q);
double roll, pitch;
M.getRPY(roll, pitch, m_Orientation);
}
示例6: wallCB
void wallCB(const geometry_msgs::PoseStamped& poseMsg)
{
if (!mocapOn)
{
std::cout << "Bebop pose is publishing. THE WALL IS UP!" << std::endl;
mocapOn = true;
}
geometry_msgs::Pose pose = poseMsg.pose;
// Center
tf::Vector3 center(0,0,0);
try
{
tf::StampedTransform tf1;
tf::StampedTransform tf2;
tf::StampedTransform tf3;
tf::StampedTransform tf4;
tfl.lookupTransform("world","ugv1",ros::Time(0),tf1);
tfl.lookupTransform("world","ugv2",ros::Time(0),tf2);
tfl.lookupTransform("world","ugv3",ros::Time(0),tf3);
tfl.lookupTransform("world","ugv4",ros::Time(0),tf4);
center = (tf1.getOrigin() + tf2.getOrigin() + tf3.getOrigin() + tf4.getOrigin())/4;
}
catch(tf::TransformException ex)
{
}
// Boundary
tf::Vector3 boundaryBottomLeft = center + boundaryOffsetBottomLeft;
tf::Vector3 boundaryTopRight = center + boundaryOffsetTopRight;
// Exceeding wall
bool leftWall = pose.position.x <= boundaryBottomLeft.getX();
bool rightWall = pose.position.x >= boundaryTopRight.getX();
bool frontWall = pose.position.y <= boundaryBottomLeft.getY();
bool backWall = pose.position.y >= boundaryTopRight.getY();
bool bottomWall = pose.position.z <= boundaryBottomLeft.getZ();
bool topWall = pose.position.z >= boundaryTopRight.getZ();
if (leftWall or rightWall or frontWall or backWall or bottomWall or topWall)
{
wallOverride = true;
// velocity command
tf::Vector3 velCmd(leftWall - rightWall, frontWall - backWall, bottomWall - topWall);
tf::Vector3 velCmdBody = tf::quatRotate(tf::Quaternion(pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w).inverse(),velCmd);
// construct message and publish
geometry_msgs::Twist twistMsg;
twistMsg.linear.x = velCmdBody.getX();
twistMsg.linear.y = velCmdBody.getY();
twistMsg.linear.z = velCmdBody.getZ();
velCmdPub.publish(twistMsg);
}
else
{
wallOverride = false;
}
}
示例7: tle
/** Callback: On new sensor data
*/
void onNewSensor_Laser2D(const sensor_msgs::LaserScanConstPtr & scan)
{
mrpt::utils::CTimeLoggerEntry tle(m_profiler,"onNewSensor_Laser2D");
// Get the relative position of the sensor wrt the robot:
tf::StampedTransform sensorOnRobot;
try {
mrpt::utils::CTimeLoggerEntry tle2(m_profiler,"onNewSensor_Laser2D.lookupTransform_sensor");
m_tf_listener.lookupTransform(m_frameid_robot,scan->header.frame_id, ros::Time(0), sensorOnRobot);
}
catch (tf::TransformException &ex) {
ROS_ERROR("%s",ex.what());
return;
}
// Convert data to MRPT format:
mrpt::poses::CPose3D sensorOnRobot_mrpt;
mrpt_bridge::convert(sensorOnRobot,sensorOnRobot_mrpt);
// In MRPT, CObservation2DRangeScan holds both: sensor data + relative pose:
CObservation2DRangeScanPtr obsScan = CObservation2DRangeScan::Create();
mrpt_bridge::convert(*scan,sensorOnRobot_mrpt, *obsScan);
ROS_DEBUG("[onNewSensor_Laser2D] %u rays, sensor pose on robot %s", static_cast<unsigned int>(obsScan->scan.size()), sensorOnRobot_mrpt.asString().c_str() );
// Get sensor timestamp:
const double timestamp = scan->header.stamp.toSec();
// Get robot pose at that time in the reference frame, typ: /odom -> /base_link
mrpt::poses::CPose3D robotPose;
try {
mrpt::utils::CTimeLoggerEntry tle3(m_profiler,"onNewSensor_Laser2D.lookupTransform_robot");
tf::StampedTransform tx;
try {
m_tf_listener.lookupTransform(m_frameid_reference,m_frameid_robot, scan->header.stamp, tx);
}
catch (tf::ExtrapolationException &) {
// if we need a "too much " recent robot pose,be happy with the latest one:
m_tf_listener.lookupTransform(m_frameid_reference,m_frameid_robot, ros::Time(0), tx);
}
mrpt_bridge::convert(tx,robotPose);
ROS_DEBUG("[onNewSensor_Laser2D] robot pose %s", robotPose.asString().c_str() );
}
catch (tf::TransformException &ex) {
ROS_ERROR("%s",ex.what());
return;
}
// Insert into the observation history:
TInfoPerTimeStep ipt;
ipt.observation = obsScan;
ipt.robot_pose = robotPose;
m_hist_obs_mtx.lock();
m_hist_obs.insert( m_hist_obs.end(), TListObservations::value_type(timestamp,ipt) );
m_hist_obs_mtx.unlock();
} // end onNewSensor_Laser2D
示例8: close
//Close the gripper
void close(){
ROS_INFO("Closing Gripper");
ros::Duration(0.5).sleep();
//wait for the listener to get the first message
listener_.waitForTransform("base_footprint", "r_gripper_l_finger_tip_frame",
ros::Time(0), ros::Duration(1.0));
//we will record transforms here
tf::StampedTransform start_transform;
tf::StampedTransform current_transform;
//record the starting transform from the gripper to the base frame
listener_.lookupTransform("base_footprint", "r_gripper_l_finger_tip_frame",
ros::Time(0), start_transform);
bool done = false;
pr2_controllers_msgs::Pr2GripperCommand gripper_cmd;
gripper_cmd.position = 0.0;
gripper_cmd.max_effort = 50.0;
ros::Rate rate(10.0);
double dist_moved_before;
double dist_moved;
while (!done && nh_.ok())
{
r_gripper_.publish(gripper_cmd);
rate.sleep();
//get the current transform
try
{
listener_.lookupTransform("base_footprint", "r_gripper_l_finger_tip_frame",
ros::Time(0), current_transform);
}
catch (tf::TransformException ex)
{
ROS_ERROR("%s",ex.what());
break;
}
//see how if the gripper is open or if it hit some object
tf::Transform relative_transform =
start_transform.inverse() * current_transform;
dist_moved_before = dist_moved;
dist_moved = relative_transform.getOrigin().length();
//ROS_INFO("%f",dist_moved);
if(dist_moved > 0.03 || dist_moved < dist_moved_before) done = true;
}
}
示例9: callback
// void cloud_cb (const sensor_msgs::PointCloud2ConstPtr& pc)
void callback (const sensor_msgs::PointCloud2ConstPtr& pc, const sensor_msgs::ImageConstPtr& im)
{
if (!cloud_and_image_received_)
{
//Write cloud
//transform to target frame
bool found_transform = tf_.waitForTransform(pc->header.frame_id, to_frame_,
pc->header.stamp, ros::Duration(10.0));
tf::StampedTransform transform;
if (found_transform)
{
//ROS_ASSERT_MSG(found_transform, "Could not transform to camera frame");
tf_.lookupTransform(to_frame_,pc->header.frame_id, pc->header.stamp, transform);
ROS_DEBUG("[TransformPointcloudNode:] Point cloud published in frame %s", pc->header.frame_id.c_str());
}
else
{
ROS_ERROR("No transform for pointcloud found!!!");
return;
}
bag_.write(input_cloud_topic_, pc->header.stamp, *pc);
geometry_msgs::TransformStamped transform_msg;
tf::transformStampedTFToMsg(transform, transform_msg);
bag_.write(input_cloud_topic_ + "/transform", transform_msg.header.stamp, transform_msg);
ROS_INFO("Wrote cloud to %s", bag_name_.c_str());
//Write image
found_transform = tf_.waitForTransform(im->header.frame_id, to_frame_,
im->header.stamp, ros::Duration(10.0));
if (found_transform)
{
//ROS_ASSERT_MSG(found_transform, "Could not transform to camera frame");
tf_.lookupTransform(to_frame_,im->header.frame_id, im->header.stamp, transform);
ROS_DEBUG("[TransformPointcloudNode:] Point cloud published in frame %s", im->header.frame_id.c_str());
}
else
{
ROS_ERROR("No transform for image found!!!");
return;
}
bag_.write(input_image_topic_, im->header.stamp, im);
tf::transformStampedTFToMsg(transform, transform_msg);
bag_.write(input_image_topic_ + "/transform", transform_msg.header.stamp, transform_msg);
ROS_INFO("Wrote image to %s", bag_name_.c_str());
cam_info_ = ros::topic::waitForMessage<sensor_msgs::CameraInfo>(input_camera_info_topic_);
bag_.write(input_camera_info_topic_, cam_info_->header.stamp, cam_info_);
ROS_INFO("Wrote Camera Info to %s", bag_name_.c_str());
cloud_and_image_received_ = true;
}
}
示例10: driveForwardOdom
//! Drive forward a specified distance based on odometry information
bool driveForwardOdom(double distance)
{
//wait for the listener to get the first message
listener_.waitForTransform("base_footprint", "odom_combined",
ros::Time(0), ros::Duration(1.0));
//we will record transforms here
tf::StampedTransform start_transform;
tf::StampedTransform current_transform;
//record the starting transform from the odometry to the base frame
listener_.lookupTransform("base_footprint", "odom_combined",
ros::Time(0), start_transform);
//we will be sending commands of type "twist"
geometry_msgs::Twist base_cmd;
//the command will be to go forward at 0.25 m/s
base_cmd.linear.y = base_cmd.angular.z = 0;
base_cmd.linear.x = 0.25;
ros::Rate rate(10.0);
bool done = false;
while (!done && nh_.ok())
{
//send the drive command
cmd_vel_pub_.publish(base_cmd);
rate.sleep();
//get the current transform
try
{
listener_.lookupTransform("base_footprint", "odom_combined",
ros::Time(0), current_transform);
}
catch (tf::TransformException ex)
{
ROS_ERROR("%s",ex.what());
break;
}
//see how far we've traveled
tf::Transform relative_transform =
start_transform.inverse() * current_transform;
double dist_moved = relative_transform.getOrigin().length();
if(dist_moved > distance) done = true;
}
if (done) return true;
return false;
}
示例11: doubleLaserCallback
void doubleLaserCallback(const sensor_msgs::LaserScan& scan1, const sensor_msgs::LaserScan& scan2)
{
//transform both to the base frame, and republish this to the laser line extractor.
tf::StampedTransform laser1_to_base;
tf::StampedTransform laser2_to_base;
try{
tf_listener_->waitForTransform("/laser1", "/base_link",scan1.header.stamp, ros::Duration(0.6));
tf_listener_->lookupTransform("/laser1","/base_link",scan1.header.stamp, laser1_to_base);
tf_listener_->waitForTransform("/laser2", "/base_link",scan2.header.stamp, ros::Duration(0.6));
tf_listener_->lookupTransform("/laser2","/base_link",scan2.header.stamp, laser2_to_base);
} catch(tf::TransformException ex){
ROS_ERROR("Laser Merge Callback failure: %s",ex.what());
}
}
示例12: getHumanLocation
static int getHumanLocation(tf::TransformListener& listener){
tf::StampedTransform transform;
try{
ros::Time now = ros::Time::now();
listener.waitForTransform("/map", "/human_base",
now, ros::Duration(3.0));
listener.lookupTransform("/map", "/human_base",
now, transform);
humanConf.dof[0] = transform.getOrigin().x();
humanConf.dof[1] = transform.getOrigin().y();
humanConf.dof[2] = transform.getOrigin().z()+1.0;
humanConf.dof[3] = 0.0;
humanConf.dof[4] = 0.0;
humanConf.dof[5] = tf::getYaw(transform.getRotation());
if(humanPose_DEBUG){
printf("%f %f %f %f %f %f\n", transform.getOrigin().x(), transform.getOrigin().y(),
transform.getOrigin().z(), humanConf.dof[3], humanConf.dof[4], humanConf.dof[5]);
}
}
catch (tf::TransformException ex){
ROS_ERROR("%s",ex.what());
}
return TRUE;
}
示例13: poseCB
void poseCB(const geometry_msgs::PoseStamped& poseMsg)
{
if (!mocapOn)
{
std::cout << "Bebop pose is publishing. THE WALL IS UP!" << std::endl;
mocapOn = true;
}
tf::Vector3 desBodyLinVel, desBodyAngVel;
if (autonomy)
{
// Bebop pose
tf::Transform bebopPoseTF(tf::Quaternion(poseMsg.pose.orientation.x,poseMsg.pose.orientation.y,poseMsg.pose.orientation.z,poseMsg.pose.orientation.w),
tf::Vector3(poseMsg.pose.position.x,poseMsg.pose.position.y,poseMsg.pose.position.z));
// Target pose
tf::StampedTransform targetPoseTF;
try { tfl.lookupTransform("world",target,ros::Time(0),targetPoseTF); }
catch(tf::TransformException ex) {}
// Desired pose
tf::Vector3 desPos, desForward;
if (lazy)
{
tf::Vector3 unitRelPos = (bebopPoseTF.getOrigin() - targetPoseTF.getOrigin());
unitRelPos.setZ(0);
unitRelPos.normalize();
desPos = targetPoseTF.getOrigin() + radius*unitRelPos;
desPos.setZ(desPos.getZ() + 1);
desForward = -unitRelPos;
}
else
{
desPos = targetPoseTF.getOrigin();
desPos.setZ(desPos.getZ() + 1);
desForward = targetPoseTF.getBasis()*tf::Vector3(1,0,0);
}
// Desired velocity
tf::Vector3 desLinVel = kp*(desPos - bebopPoseTF.getOrigin()) + kpd*(targetLinVel - bebopLinVel);
tf::Vector3 desAngVel = kw*((bebopPoseTF.getBasis()*tf::Vector3(1,0,0)).cross(desForward));
desBodyLinVel = sat(bebopPoseTF.getBasis().inverse()*desLinVel,0.4) + tf::Vector3(0,0.4*joyAngVel.getZ(),0);
desBodyAngVel = sat(bebopPoseTF.getBasis().inverse()*desAngVel,0.4) + tf::Vector3(0,0,-0.5*joyAngVel.getZ());
}
else
{
desBodyLinVel = joyLinVel;
desBodyAngVel = joyAngVel;
}
// Publish
geometry_msgs::Twist twistMsg;
twistMsg.linear.x = desBodyLinVel.getX();
twistMsg.linear.y = desBodyLinVel.getY();
twistMsg.linear.z = desBodyLinVel.getZ();
twistMsg.angular.x = desBodyAngVel.getX();
twistMsg.angular.y = desBodyAngVel.getY();
twistMsg.angular.z = -1*desBodyAngVel.getZ();
velCmdPub.publish(twistMsg);
}
示例14: lookForEveryHeadTransform
//---------------------------------Funzioni per l'associazione iniziale e per la fase di accompagnamento-------------------------------------
void lookForEveryHeadTransform(tf::TransformListener& listener, std::vector<tf::StampedTransform>& transforms, std::string user_to_track)
{
//Per ogni scheletro possibilmente nella vista dello skeleton tracker, cerco le coordinate della testa corssipondente, relativamente alle coordinate del volto dell'utente
for(int i = 1; i <= 15; i++)
{
std::ostringstream oss;
oss << "head_" << i;
try
{
tf::StampedTransform transform;
listener.lookupTransform(user_to_track, oss.str(), ros::Time(0), transform);
if(transform.stamp_ != last_stamp[oss.str()])
{
last_stamp[oss.str()] = transform.stamp_;
transforms.push_back(transform);
}
}
catch(tf::TransformException &ex)
{
continue;
}
}
}
示例15: getTransform
bool Conversions::getTransform(tf::TransformListener &tfl, Eigen::Isometry3d &T, std::string from, std::string to, ros::Time stamp)
{
geometry_msgs::Pose transform_msg;
try {
tf::StampedTransform t;
tfl.lookupTransform(from, to, stamp, t);
transform_msg.orientation.w = t.getRotation().w();
transform_msg.orientation.x = t.getRotation().x();
transform_msg.orientation.y = t.getRotation().y();
transform_msg.orientation.z = t.getRotation().z();
transform_msg.position.x = t.getOrigin().x();
transform_msg.position.y = t.getOrigin().y();
transform_msg.position.z = t.getOrigin().z();
} catch (tf::LookupException le) {
ROS_ERROR("Couldn't find transformation between %s and %s (%s)", from.c_str(), to.c_str(), le.what());
return false;
} catch (tf::ExtrapolationException ee) {
ROS_ERROR("Transformation lookup time is in the future (%s)", ee.what());
return false;
} catch (tf::InvalidArgument ie) {
ROS_ERROR("TF invalid argument between %s and %s (%s)", from.c_str(), to.c_str(), ie.what());
return false;
} catch (tf::ConnectivityException ce) {
ROS_ERROR("TF no connectivity between %s and %s (%s)", from.c_str(), to.c_str(), ce.what());
return false;
}
T = Conversions::fromMsg(transform_msg);
return true;
}