本文整理汇总了C++中tf::TransformListener::transformPoint方法的典型用法代码示例。如果您正苦于以下问题:C++ TransformListener::transformPoint方法的具体用法?C++ TransformListener::transformPoint怎么用?C++ TransformListener::transformPoint使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tf::TransformListener
的用法示例。
在下文中一共展示了TransformListener::transformPoint方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: transformPoint
void TFListener::transformPoint(const tf::TransformListener& listener)
{
_laser_point.header.frame_id = "laser";
_camera_point.header.frame_id = "base_camera";
_base_point.header.frame_id = "base_footprint";
_odomPoint.header.frame_id = "odom";
_laser_point.header.stamp = ros::Time();
_camera_point.header.stamp = ros::Time();
_base_point.header.stamp = ros::Time();
_odomPoint.header.stamp = ros::Time();
try //Try transforming the LRF
{
geometry_msgs::PointStamped baseLinkToBaseLaser;
listener.transformPoint("base_link", _laser_point, baseLinkToBaseLaser);
}
catch(tf::TransformException& ex)
{
ROS_ERROR("Received an exception trying to transform a point from \"base_laser\" to \"base_link\": %s", ex.what());
}
try //Try Transforming the camera
{
geometry_msgs::PointStamped baseLinkToBaseCamera;
listener.transformPoint("base_link", _camera_point, baseLinkToBaseCamera);
//1. (Frame being applied offset)
//2. (Point from sensor)
//3. ("returned" Point with applied offset)
}
catch(tf::TransformException& ex)
{
ROS_ERROR("Received an exception trying to transform a point from \"base_laser\" to \"base_link\": %s", ex.what());
}
}
示例2: transformPoint
void transformPoint(const tf::TransformListener& listener) {
geometry_msgs::PointStamped pt0, pt1, pt2;
ros::Time cur_time = ros::Time::now();
pt0.header.frame_id = "camera_link";
pt1.header.frame_id = "base_footprint";//"camera_link";
pt2.header.frame_id = "base_footprint";//"camera_link";
pt0.header.stamp = cur_time;
pt1.header.stamp = cur_time;
pt2.header.stamp = cur_time;
pt0.point.x = 0;
pt0.point.y = 0;
pt0.point.z = 0;
pt1.point.x = range;
pt1.point.y = -range*tan(field_of_view/2);
pt1.point.z = 0;
pt2.point.x = range;
pt2.point.y = range*tan(field_of_view/2);
pt2.point.z = 0;
try
{
listener.transformPoint("map", pt0, pt0_tf);
//listener.transformPoint("map", pt1, pt1_tf);
//listener.transformPoint("map", pt2, pt2_tf);
ROS_INFO("trying transform...");//debug
}
catch(tf::TransformException& ex)
{
ROS_ERROR("Received an exception trying to transform a point from \"camera_link\" to \"map\": %s", ex.what());
}
}
示例3: transformPoint
void transformPoint(const tf::TransformListener& listener){
//we'll create a point in the base_laser frame that we'd like to transform to the base_link frame
geometry_msgs::PointStamped laser_point;
laser_point.header.frame_id = "base_laser";
//we'll just use the most recent transform available for our simple example
laser_point.header.stamp = ros::Time();
//just an arbitrary point in space
laser_point.point.x = 1.0;
laser_point.point.y = 2.0;
laser_point.point.z = 0.0;
geometry_msgs::PointStamped base_point;
//listener.waitForTransform(
listener.transformPoint("base_link", laser_point, base_point);
// listener.lookupTransform("base_link", "wheel_2", ros::Time(0), ros::Duration(10.0));
ROS_INFO("base_laser: (%.2f, %.2f. %.2f) -----> base_link: (%.2f, %.2f, %.2f) at time %.2f",
laser_point.point.x, laser_point.point.y, laser_point.point.z,
base_point.point.x, base_point.point.y, base_point.point.z, base_point.header.stamp.toSec());
}
示例4: receiveInformation
void receiveInformation(const geometry_msgs::PointStampedConstPtr &person)
{
notReceivedNumber = 0;
hold = false;
try
{
listener.waitForTransform(robot_frame, person->header.stamp, person->header.frame_id, person->header.stamp, fixed_frame_id, ros::Duration(10.0) );
listener.transformPoint(robot_frame, person->header.stamp, *person, fixed_frame_id, personInRobotBaseFrame);
cv::Point3d personPoint;
personPoint.x = personInRobotBaseFrame.point.x;
personPoint.y = personInRobotBaseFrame.point.y;
personPoint.z = personInRobotBaseFrame.point.z;
distanceToPerson = cv::norm(personPoint);
ROS_ERROR("Person distance: %f", distanceToPerson);
}
catch(tf::TransformException ex)
{
ROS_WARN("%s",ex.what());
//ros::Duration(1.0).sleep();
return;
}
personInRobotBaseFrame.header.frame_id = robot_frame;
}
示例5: checkObjectInMap
bool checkObjectInMap(robot_msgs::checkObjectInMap::Request &req, robot_msgs::checkObjectInMap::Response &res) {
int width = 16;
geometry_msgs::PointStamped tf_object, obj;
obj.point.x = req.point.x;
obj.point.y = req.point.y;
obj.header.frame_id = "camera";
listener.waitForTransform("/camera", "/map", ros::Time(0), ros::Duration(1));
listener.transformPoint("map", obj, tf_object);
int x = floor(tf_object.point.x/resolution);
int y = floor(tf_object.point.y/resolution);
//ROS_INFO("COORDINATE OBJECT x: %d y: %d", x, y);
for(int i = x-width/2; i <= x+width/2; i++)
{
//ROS_INFO("CHECK COORDINATE OBJECT x: %d", i);
for(int j = y-width/2; j <= y+width/2; j++)
{
//ROS_INFO("Value %d", final_map[i + width_map*j]);
if(final_map[i + width_map*j] == -106)
{
res.inMap = true;
return true;
}
}
}
res.inMap = false;
return true;
}
示例6:
bool
KalmanDetectionFilter::transformPointToFilter(samplereturn_msgs::NamedPoint& msg)
{
tf::Point pt;
tf::pointMsgToTF(msg.point, pt);
tf::Stamped<tf::Point> msg_point(pt, msg.header.stamp, msg.header.frame_id);
tf::Stamped<tf::Point> filter_point;
if(!listener_.canTransform(_filter_frame_id, msg.header.frame_id, msg.header.stamp))
return false;
listener_.transformPoint(_filter_frame_id, msg_point, filter_point);
tf::pointTFToMsg(filter_point, msg.point);
msg.header.frame_id = _filter_frame_id;
return true;
}
示例7: objectSrv
bool objectSrv(jsk_smart_gui::point2screenpoint::Request &req,
jsk_smart_gui::point2screenpoint::Response &res)
{
ROS_INFO("3dtopixel request:x=%lf,y=%lf,z=%lf",req.point.point.x,req.point.point.y,req.point.point.z);
geometry_msgs::PointStamped point_transformed;
tf_listener_.transformPoint(camera_topic, req.point, point_transformed);
cv::Point3d xyz;
cv::Point2d uv_rect;
xyz.x = point_transformed.point.x;
xyz.y = point_transformed.point.y;
xyz.z = point_transformed.point.z;
uv_rect = cam_model_.project3dToPixel(xyz);
res.x=uv_rect.x;
res.y=uv_rect.y;
return true;
}
示例8: addPhysicalPoint
void addPhysicalPoint()
{
geometry_msgs::PointStamped pt_out;
try
{
tf_listener_.transformPoint(fixed_frame, gripper_tip, pt_out);
}
catch (tf::TransformException& ex)
{
ROS_WARN("[calibrate] TF exception:\n%s", ex.what());
return;
}
physical_points_.push_back(pcl::PointXYZ(pt_out.point.x, pt_out.point.y, pt_out.point.z));
}
示例9: transform_coord
void laser_comb::transform_coord(std::string frameid,const geometry_msgs::PoseArray::ConstPtr& p_array1,geometry_msgs::PoseArray *p_array2)
{
geometry_msgs::PointStamped old_point,new_point;
geometry_msgs::Quaternion HumanQuaternion;
HumanQuaternion.x = 0;//|-> Orientation is ignored
HumanQuaternion.y = 0;//|
HumanQuaternion.z = 0;//|
HumanQuaternion.w = 1;//|
geometry_msgs::Pose HumanPose;
HumanPose.orientation= HumanQuaternion;
old_point.header = p_array1->header;
int p = p_array1->poses.size();
for (int i = p-1;i>=0;i--)
{
old_point.point = p_array1->poses[i].position;
/*old_point.point.y = p_array1->poses[i].position.y;
old_point.point.z = p_array1->poses[i].position.z;*/
//ROS_INFO_STREAM("hello");
try{
listener.waitForTransform(frameid,old_point.header.frame_id,old_point.header.stamp,ros::Duration(0.3));
listener.transformPoint(frameid,old_point,new_point);
HumanPose.position = new_point.point;
p_array2->poses.push_back(HumanPose);
p_array2->header = new_point.header;
}
catch(tf::TransformException& ex){
ROS_ERROR("Received an exception trying to transform a point from \"base_laser\" to \"base_link\": %s", ex.what());
}
}
}
示例10: transformPoint
void transformPoint(geometry_msgs::PointStamped src,
geometry_msgs::PointStamped *dst)
{
try
{
//notice ros::time!!
tl.waitForTransform(dst->header.frame_id, src.header.frame_id,
src.header.stamp, ros::Duration(3.0));
tl.transformPoint(dst->header.frame_id, ros::Time(),
src, src.header.frame_id, *dst);
}
catch(tf::TransformException& ex)
{
ROS_ERROR("Received an exception trying to transform a point to /map: %s",
ex.what());
//++waittime;
}
}
示例11: mean
void
KalmanDetectionFilter::drawFilterStates()
{
if(!current_filter_)
return;
geometry_msgs::PointStamped temp_msg, temp_msg_base_link;
temp_msg.header.frame_id = "odom";
temp_msg.header.stamp = ros::Time(0);
cv::Mat img = cv::Mat::zeros(500, 500, CV_8UC3);
float px_per_meter = 50.0;
float offset = 250;
temp_msg.point.x = current_filter_->statePost.at<float>(0);
temp_msg.point.y = current_filter_->statePost.at<float>(1);
temp_msg.point.z = 0.0;
if( listener_.canTransform( "base_link", temp_msg.header.frame_id, temp_msg.header.stamp))
{
listener_.transformPoint("base_link",temp_msg,temp_msg_base_link);
}
else
{
ROS_ERROR_STREAM("cannot transform filter from odom to base_link");
return;
}
cv::Point mean(temp_msg_base_link.point.x * px_per_meter,
temp_msg_base_link.point.y * px_per_meter);
float rad_x = current_filter_->errorCovPost.at<float>(0,0) * px_per_meter;
float rad_y = current_filter_->errorCovPost.at<float>(1,1) * px_per_meter;
cv::circle(img, mean+cv::Point(0,offset), 5, cv::Scalar(255,0,0));
cv::ellipse(img, mean+cv::Point(0,offset), cv::Size(rad_x, rad_y), 0, 0, 360, cv::Scalar(0,255,0));
//printFilterState();
std_msgs::Header header;
sensor_msgs::ImagePtr debug_img_msg = cv_bridge::CvImage(header,"rgb8",img).toImageMsg();
pub_debug_img.publish(debug_img_msg);
}
示例12: atan
void SpatialPerspectiveLevel2::callbackTimer(const ros::TimerEvent&) {
static tf::TransformListener listener;
std::string target_frame = "head_origin1";
std::vector<std::string> frames;
listener.getFrameStrings(frames);
for(size_t i=0; i<frames.size(); i++) {
std::string obj_name = frames.at(i);
if(obj_name.find("obj_")!=std::string::npos) {
geometry_msgs::PointStamped object_head_frame;
try {
geometry_msgs::PointStamped object_object_frame;
// get object coordinates in target_frame, needed because of offset
object_object_frame.header.frame_id = obj_name;
object_object_frame.point.x = 0;
object_object_frame.point.y = 0;
object_object_frame.point.z = 0;
listener.waitForTransform(obj_name, target_frame, ros::Time::now(), ros::Duration(1.0));
listener.transformPoint(target_frame, object_object_frame, object_head_frame);
double angle = atan(object_head_frame.point.y / object_head_frame.point.x) * 180 / M_PI;
if(angle > 5.0) {
ROS_INFO_STREAM("Object " << obj_name << " is left of human.");
} else if(angle < -5.0){
ROS_INFO_STREAM("Object " << obj_name << " is right of human.");
} else {
ROS_INFO_STREAM("Object " << obj_name << " is central.");
}
} catch (tf::TransformException ex) {
ROS_WARN_STREAM("Skip object " << obj_name << " as point could not be transformed!");
continue;
}
}
}
}
示例13: transform_goal_to_robot
geometry_msgs::Point Trajectory_Tracker::transform_goal_to_robot(float x, float y, string frame)
{
ROS_INFO("WAITING FOR TRANSFORM");
geometry_msgs::PointStamped p;
ros::Time now = ros::Time::now();
if(listener.waitForTransform(frame,"/robot",now,ros::Duration(5.0)))
{
p.header.stamp = now;
p.header.frame_id = frame;
p.point.x = x;
p.point.y = y;
p.point.z = 0;
ROS_INFO("Planned path to World: %f, %f",p.point.x,p.point.y);
listener.transformPoint("/robot",p,p);
ROS_INFO("Planned path to Robot: %f, %f",p.point.x,p.point.y);
return p.point;
}else{
ROS_ERROR("FAILED TO GET TRANSFORM BETWEEN Frame and /robot");
return p.point;
}
}
示例14: objectCallback
void objectCallback(const robot_msgs::detectedObject &obj_msg)
{
//object_pose = obj_msg;
geometry_msgs::PointStamped tf_object;
geometry_msgs::PointStamped obj;
obj.point.x = obj_msg.position.x;
obj.point.y = obj_msg.position.y;
obj.header.frame_id = "camera";
listener.waitForTransform("/camera", "/map", ros::Time(0), ros::Duration(1));
listener.transformPoint("map", obj, tf_object);
ROS_INFO("Detected %s", obj_msg.object_id.c_str());
ROS_INFO("Coordinates x: %f y: %f", obj_msg.position.x, obj_msg.position.y);
ROS_INFO("Transformed coordinates x: %f y: %f", tf_object.point.x, tf_object.point.y);
int x = floor((tf_object.point.x)/resolution);
int y = floor((tf_object.point.y)/resolution);
ROS_INFO("In cells x: %d y: %d", x, y);
robot_msgs::detectedObject temp = obj_msg;
temp.position.x = tf_object.point.x;
temp.position.y = tf_object.point.y;
detected_objects.push_back(temp);
//allObjects_publisher.publish(detected_objects);
visualization_msgs::Marker object;
object.header.frame_id = "map";
object.header.stamp = ros::Time(0);
object.ns = obj_msg.object_id;
object.id = 0;
//object.type = visualization_msgs::Marker::CUBE;
object.action = visualization_msgs::Marker::ADD;
object.pose.position.x = tf_object.point.x;
object.pose.position.y = tf_object.point.y;
object.pose.position.z = 0;
object.pose.orientation.w = 0;
object.pose.orientation.x = 0;
object.pose.orientation.y = 0;
object.pose.orientation.z = 0;
object.scale.x = 0.1;
object.scale.y = 0.1;
object.scale.z = 0.1;
/*object.color.a = 1.0;
object.color.r = 1.0;
object.color.g = 1.0;
object.color.b = 0;*/
if (obj_msg.object_id == "greencube") {
object.type = visualization_msgs::Marker::CUBE;
object.color.a = 1.0;
object.color.r = 0;
object.color.g = 1.0;
object.color.b = 0;
} else if (obj_msg.object_id == "greencylinder") {
object.type = visualization_msgs::Marker::CYLINDER;
object.color.a = 1.0;
object.color.r = 0;
object.color.g = 1.0;
object.color.b = 0;
} else if (obj_msg.object_id == "redball") {
object.type = visualization_msgs::Marker::SPHERE;
object.color.a = 1.0;
object.color.r = 1.0;
object.color.g = 0;
object.color.b = 0;
} else if (obj_msg.object_id == "redcube") {
object.type = visualization_msgs::Marker::CUBE;
object.color.a = 1.0;
object.color.r = 1.0;
object.color.g = 0;
object.color.b = 0;
} else if (obj_msg.object_id == "bluecube") {
object.type = visualization_msgs::Marker::CUBE;
object.color.a = 1.0;
object.color.r = 0;
object.color.g = 0;
object.color.b = 1.0;
} else if (obj_msg.object_id == "yellowcube") {
object.type = visualization_msgs::Marker::CUBE;
object.color.a = 1.0;
object.color.r = 1.0;
object.color.g = 1.0;
object.color.b = 0;
} else if (obj_msg.object_id == "yellowball") {
object.type = visualization_msgs::Marker::SPHERE;
object.color.a = 1.0;
object.color.r = 1.0;
object.color.g = 1.0;
object.color.b = 0;
}
int x_object_cell = floor(tf_object.point.x/resolution);
int y_object_cell = floor(tf_object.point.y/resolution);
final_map[x_object_cell+width_map*y_object_cell] = 110;
//object_publisher.publish( object );
}
示例15: msgCallback
// Callback to register with tf::MessageFilter to be called when transforms are available
void msgCallback(const boost::shared_ptr<const sensor_msgs::Range>& msg)
{
geometry_msgs::Point ptg;
double xi, yi;
double range = msg->range;
double field_of_view = msg->field_of_view;
geometry_msgs::PointStamped point_in[3], point_out[3];
point_in[0].header.frame_id = "/thermocam_link";
point_in[0].point.x = 0.0;
point_in[0].point.y = 0.0;
point_in[0].point.z = 0.0;
point_in[1].header.frame_id = "/thermocam_link";
point_in[1].point.x = range;
point_in[1].point.y = -range*tan(field_of_view/2);
point_in[1].point.z = 0.0;
point_in[2].header.frame_id = "/thermocam_link";
point_in[2].point.x = range;
point_in[2].point.y = range*tan(field_of_view/2);
point_in[2].point.z = 0.0;
try
{
for(int i = 0; i < 3; i++)
tf_.transformPoint(target_frame_, point_in[i], point_out[i]);
}
catch (tf::TransformException &ex)
{
printf ("Failure %s\n", ex.what()); //Print exception which was caught
}
for(int i = 0; i < map.info.width * map.info.height; i++)
{
//pti.x = map.info.origin.position.x + (i%map.info.width)*map.info.resolution + map.info.resolution/2;
//pti.y = map.info.origin.position.y + (i/map.info.width)*map.info.resolution + map.info.resolution/2;
//pti.z = 0;
xi = (i%map.info.width)*map.info.resolution + map.info.resolution/2;
yi = (i/map.info.width)*map.info.resolution + map.info.resolution/2;
ptg.x = map.info.origin.position.x + xi*cos(yaw) - yi*sin(yaw);
ptg.y = map.info.origin.position.y + xi*sin(yaw) + yi*cos(yaw);
ptg.z = 0;
if(this->detectInOutTriangle(point_out[0].point, point_out[1].point, point_out[2].point, ptg)) // 観測済みかどうか判定
{
if(map.data[i] == OCCUPIED || map.data[i] == UNSIGHT)
{
map.data[i] = OBSERVED; // 熱カメラのrange内 かつ occupied → 観測済みとマーク
}
/*
else
{
map.data[i] = occu_data.data[i];
}
*/
is_observed.data[i] = 1;
}
}
pub_map.publish(map);
}