当前位置: 首页>>代码示例>>C++>>正文


C++ Transform::inverse方法代码示例

本文整理汇总了C++中tf::Transform::inverse方法的典型用法代码示例。如果您正苦于以下问题:C++ Transform::inverse方法的具体用法?C++ Transform::inverse怎么用?C++ Transform::inverse使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在tf::Transform的用法示例。


在下文中一共展示了Transform::inverse方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1: setMarker

void MarkerVis::setMarker()
  {
    tf::Transform T_cB = T_bC.inverse();
    tf::Transform T_gB = T_cB * T_gC;
    tf::Quaternion q = T_gB.getRotation();
    marker.header.frame_id = "/world";
    marker.header.stamp = ros::Time();
    marker.type = visualization_msgs::Marker::CUBE;
    marker.action = visualization_msgs::Marker::ADD;
    marker.pose.position.x = T_gB.getOrigin()[0];
    marker.pose.position.y = T_gB.getOrigin()[1];
    marker.pose.position.z = T_gB.getOrigin()[2];
    //std::cout<<T_gB.getOrigin()[2]<<std::endl;
    //printf("%f, %f, %f\n", T_gB.getOrigin()[0]*1000, T_gB.getOrigin()[1]*1000, T_gB.getOrigin()[2]*1000);
    marker.pose.orientation.x = q[0];
    marker.pose.orientation.y = q[1];
    marker.pose.orientation.z = q[2];
    marker.pose.orientation.w = q[3];
    printf("%f, %f, %f, %f\n", q[0], q[1], q[2], q[3]);
    marker.scale.x = 0.2;
    marker.scale.y = 0.4;
    marker.scale.z = 0.01;
    marker.color.a = 1.0;
    marker.color.r = 1.0;
    marker.color.g = 0.0;
    marker.color.b = 0.0;
    marker_vis_pub.publish( marker );
  }
开发者ID:mwegiere,项目名称:serwo,代码行数:28,代码来源:conveyor_visualization.cpp

示例2: setMarker

void MarkerVis::setMarker()
  {
    tf::Transform T_cB = T_bC.inverse();
    tf::Transform T_gB = T_cB * T_gC;
    //printTransform(T_gB);
    tf::Quaternion q = T_gB.getRotation();

    marker.header.frame_id = "/world";
    marker.header.stamp = ros::Time();
    //marker.ns = "my_namespace";
    //marker.id = 0;
    marker.type = visualization_msgs::Marker::CUBE;
    marker.action = visualization_msgs::Marker::ADD;
    marker.pose.position.x = T_gB.getOrigin()[0];
    marker.pose.position.y = T_gB.getOrigin()[1];
    marker.pose.position.z = T_gB.getOrigin()[2];
    marker.pose.orientation.x = q[0];
    marker.pose.orientation.y = q[1];
    marker.pose.orientation.z = q[2];
    marker.pose.orientation.w = q[3];
    marker.scale.x = 0.2;
    marker.scale.y = 0.4;
    marker.scale.z = 0.01;
    marker.color.a = 1.0;
    marker.color.r = 1.0;
    marker.color.g = 0.0;
    marker.color.b = 0.0;
    marker_vis_pub.publish( marker );
  }
开发者ID:mwegiere,项目名称:serwo,代码行数:29,代码来源:grid_visualization.cpp

示例3: OriginSlaveWSCallback

void OriginSlaveWSCallback(const geometry_msgs::PoseStamped& msg){
  
  Tso.setOrigin(tf::Vector3(msg.pose.position.x, msg.pose.position.y, msg.pose.position.z));
  Tso.setRotation(tf::Quaternion(msg.pose.orientation.x, msg.pose.orientation.y, msg.pose.orientation.z, msg.pose.orientation.w));
  
  // Increments absoluts 
  Toffset = Tmo.inverse() * Tso;
  Toffset.setOrigin(tf::Vector3(0., 0., 0.));
}
开发者ID:joseparnau,项目名称:rosjac,代码行数:9,代码来源:workspace_transformation_node.cpp

示例4: setError

void CheckerboardMeasurementEdge::setError(tf::Transform cameraToMarker) {
	double x, y;
	this->frameImageConverter->project(cameraToMarker.inverse(), x, y);

	double dist = cameraToMarker.getOrigin().length();

	// set error
	this->_error[0] = measurement.marker_data[0] - x;
	this->_error[1] = measurement.marker_data[1] - y;
}
开发者ID:Stefan210,项目名称:kinematic_calibration,代码行数:10,代码来源:CheckerboardMeasurementEdge.cpp

示例5: savePoseToServer

void AmclNode::savePoseToServer()
{
  // We need to apply the last transform to the latest odom pose to get
  // the latest map pose to store.  We'll take the covariance from
  // last_published_pose.
  tf::Pose map_pose = latest_tf_.inverse() * latest_odom_pose_;
  double yaw,pitch,roll;
  map_pose.getBasis().getEulerYPR(yaw, pitch, roll);

  ROS_DEBUG("Saving pose to server. x: %.3f, y: %.3f", map_pose.getOrigin().x(), map_pose.getOrigin().y() );

  private_nh_.setParam("initial_pose_x", map_pose.getOrigin().x());
  private_nh_.setParam("initial_pose_y", map_pose.getOrigin().y());
  private_nh_.setParam("initial_pose_a", yaw);
  private_nh_.setParam("initial_cov_xx",
                                  last_published_pose.pose.covariance[6*0+0]);
  private_nh_.setParam("initial_cov_yy",
                                  last_published_pose.pose.covariance[6*1+1]);
  private_nh_.setParam("initial_cov_aa",
                                  last_published_pose.pose.covariance[6*5+5]);
}
开发者ID:Benson516,项目名称:navigation_old_kinetic,代码行数:21,代码来源:amcl_node.cpp

示例6: transform

void transform( const tf2_msgs::TFMessage tfm )
{
//  ROS_INFO("Pa!");
  ROS_INFO( "Size: %s", tfm.transforms[0].child_frame_id.c_str() );
  static tf::TransformBroadcaster br;

  static tf::Transform TEC( tf::Quaternion(-0.5, 0.5, -0.5, 0.5), tf::Vector3(-10e-2, 0, 15e-2) );
  static tf::Transform TEM( tf::Quaternion(0, 0, 0, 1), tf::Vector3(15e-2, 0, 0) );
  static tf::Transform TTF( tf::Quaternion(-0.5, 0.5, 0.5, 0.5), tf::Vector3(20e-2, 5e-2, 0) );
  static tf::StampedTransform TCT, TWE;
  tf::Transform TWEpp, TWF;

  if( strcmp( "EndEffector", tfm.transforms[0].child_frame_id.c_str() ) == 0 )
  {

    tf::transformStampedMsgToTF( tfm.transforms[0], TWE );
//    ROS_INFO("Inside!");

  }
  if( strcmp( "ar_marker_6", tfm.transforms[0].child_frame_id.c_str() ) == 0 )
  {
    
    tf::transformStampedMsgToTF( tfm.transforms[0], TCT );
 TWF = TWE * TEC * TCT * TTF;
  TWEpp = TWF * TEM.inverse();

  
  br.sendTransform( tf::StampedTransform(TWEpp, ros::Time::now(), "World", "EndEffectorPP") );
  br.sendTransform( tf::StampedTransform(TEC, ros::Time::now(), "EndEffector", "Camera") );
  br.sendTransform( tf::StampedTransform(TEM, ros::Time::now(), "EndEffector", "MaleConn") );
  br.sendTransform( tf::StampedTransform(TCT, ros::Time::now(), "Camera", "Tag") );
  br.sendTransform( tf::StampedTransform(TTF, ros::Time::now(), "Tag", "FemaleConn") );
  br.sendTransform( tf::StampedTransform(TWF, ros::Time::now(), "World", "FemaleConn2") );

  geometry_msgs::Transform msg;
  tf::transformTFToMsg( TWEpp, msg );
  pubT.publish( msg );
  }

 }
开发者ID:garamizo,项目名称:visser_power,代码行数:40,代码来源:test.cpp

示例7: getTfDifference

void getTfDifference(const tf::Transform& a, const tf::Transform b, double& dist, double& angle)
{
  tf::Transform motion = a.inverse() * b;
  getTfDifference(motion, dist, angle);
}
开发者ID:Elessog,项目名称:ccny_rgbd_tools,代码行数:5,代码来源:util.cpp

示例8: __attribute__

int __attribute__((optimize("0"))) inv_kin(tf::Transform in_T06, l_r in_arm, ik_solution iksol[8]) {
  dh_theta = robot_thetas[in_arm];
  dh_d = ds[in_arm];
  dh_alpha = alphas[in_arm];
  dh_a = aas[in_arm];
  for (int i = 0; i < 8; i++) iksol[i] = ik_zerosol;

  if (in_arm >= dh_l_r_last) {
    ROS_ERROR("BAD ARM IN IK!!!");
    return -1;
  }

  for (int i = 0; i < 8; i++) iksol[i].arm = in_arm;

  //  Step 1, Compute P5
  tf::Transform T60 = in_T06.inverse();
  tf::Vector3 p6rcm = T60.getOrigin();
  tf::Vector3 p05[8];

  p6rcm[2] = 0;  // take projection onto x-y plane
  for (int i = 0; i < 2; i++) {
    tf::Vector3 p65 = (-1 + 2 * i) * Lw * p6rcm.normalize();
    p05[4 * i] = p05[4 * i + 1] = p05[4 * i + 2] = p05[4 * i + 3] = in_T06 * p65;
  }

  //  Step 2, compute displacement of prismatic joint d3
  for (int i = 0; i < 2; i++) {
    double insertion = 0;
    insertion += p05[4 * i].length();  // Two step process avoids compiler
                                       // optimization problem. (Yeah, right. It
                                       // was the compiler's problem...)

    if (insertion <= Lw) {
      cerr << "WARNING: mechanism at RCM singularity(Lw:" << Lw << "ins:" << insertion
           << ").  IK failing.\n";
      iksol[4 * i + 0].invalid = iksol[4 * i + 1].invalid = ik_invalid;
      iksol[4 * i + 2].invalid = iksol[4 * i + 3].invalid = ik_invalid;
      return -2;
    }
    iksol[4 * i + 0].d3 = iksol[4 * i + 1].d3 = -d4 - insertion;
    iksol[4 * i + 2].d3 = iksol[4 * i + 3].d3 = -d4 + insertion;
  }

  //  Step 3, calculate theta 2
  for (int i = 0; i < 8; i += 2)  // p05 solutions
  {
    double z0p5 = p05[i][2];

    double d = iksol[i].d3 + d4;
    double cth2 = 0;

    if (in_arm == dh_left)
      cth2 = 1 / (GM1 * GM3) * ((-z0p5 / d) - GM2 * GM4);
    else
      cth2 = 1 / (GM1 * GM3) * ((z0p5 / d) + GM2 * GM4);

    // Smooth roundoff errors at +/- 1.
    if (cth2 > 1 && cth2 < 1 + eps)
      cth2 = 1;
    else if (cth2 < -1 && cth2 > -1 - eps)
      cth2 = -1;

    if (cth2 > 1 || cth2 < -1) {
      iksol[i].invalid = iksol[i + 1].invalid = ik_invalid;
    } else {
      iksol[i].th2 = acos(cth2);
      iksol[i + 1].th2 = -acos(cth2);
    }
  }

  //  Step 4: Compute theta 1
  for (int i = 0; i < 8; i++) {
    if (iksol[i].invalid == ik_invalid) continue;

    double cth2 = cos(iksol[i].th2);
    double sth2 = sin(iksol[i].th2);
    double d = iksol[i].d3 + d4;
    double BB1 = sth2 * GM3;
    double BB2 = 0;
    tf::Matrix3x3 Bmx;  // using 3 vector and matrix bullet types for convenience.
    tf::Vector3 xyp05(p05[i]);
    xyp05[2] = 0;

    if (in_arm == dh_left) {
      BB2 = cth2 * GM2 * GM3 - GM1 * GM4;
      Bmx.setValue(BB1, BB2, 0, -BB2, BB1, 0, 0, 0, 1);
    } else {
      BB2 = cth2 * GM2 * GM3 + GM1 * GM4;
      Bmx.setValue(BB1, BB2, 0, BB2, -BB1, 0, 0, 0, 1);
    }

    tf::Vector3 scth1 = Bmx.inverse() * xyp05 * (1 / d);
    iksol[i].th1 = atan2(scth1[1], scth1[0]);
  }

  //  Step 5: get theta 4, 5, 6
  for (int i = 0; i < 8; i++) {
    if (iksol[i].invalid == ik_invalid) continue;

    // compute T03:
//.........这里部分代码省略.........
开发者ID:uw-biorobotics,项目名称:raven2,代码行数:101,代码来源:r2_kinematics.cpp

示例9: if


//.........这里部分代码省略.........
                }
            }
            // Report the overall filter covariance, rather than the
            // covariance for the highest-weight cluster
            //p.covariance[6*5+5] = hyps[max_weight_hyp].pf_pose_cov.m[2][2];
            p.pose.covariance[6*5+5] = set->cov.m[2][2];

            /*
               printf("cov:\n");
               for(int i=0; i<6; i++)
               {
               for(int j=0; j<6; j++)
               printf("%6.3f ", p.covariance[6*i+j]);
               puts("");
               }
             */

            pose_pub_.publish(p);
            last_published_pose = p;

            ROS_DEBUG("New pose: %6.3f %6.3f %6.3f",
                      hyps[max_weight_hyp].pf_pose_mean.v[0],
                      hyps[max_weight_hyp].pf_pose_mean.v[1],
                      hyps[max_weight_hyp].pf_pose_mean.v[2]);

            // subtracting base to odom from map to base and send map to odom instead
            tf::Stamped<tf::Pose> odom_to_map;
            try
            {
                tf::Transform tmp_tf(tf::createQuaternionFromYaw(hyps[max_weight_hyp].pf_pose_mean.v[2]),
                                     tf::Vector3(hyps[max_weight_hyp].pf_pose_mean.v[0],
                                                 hyps[max_weight_hyp].pf_pose_mean.v[1],
                                                 0.0));
                tf::Stamped<tf::Pose> tmp_tf_stamped (tmp_tf.inverse(),
                                                      laser_scan->header.stamp,
                                                      base_frame_id_);
                this->tf_->transformPose(odom_frame_id_,
                                         tmp_tf_stamped,
                                         odom_to_map);
            }
            catch(tf::TransformException)
            {
                ROS_DEBUG("Failed to subtract base to odom transform");
                return;
            }

            latest_tf_ = tf::Transform(tf::Quaternion(odom_to_map.getRotation()),
                                       tf::Point(odom_to_map.getOrigin()));
            latest_tf_valid_ = true;

            if (tf_broadcast_ == true)
            {
                // We want to send a transform that is good up until a
                // tolerance time so that odom can be used
                ros::Time transform_expiration = (laser_scan->header.stamp +
                                                  transform_tolerance_);
                tf::StampedTransform tmp_tf_stamped(latest_tf_.inverse(),
                                                    transform_expiration,
                                                    global_frame_id_, odom_frame_id_);
                this->tfb_->sendTransform(tmp_tf_stamped);
                sent_first_transform_ = true;
            }
        }
        else
        {
            ROS_ERROR("No pose!");
开发者ID:Renda110,项目名称:AGVC,代码行数:67,代码来源:amcl_node.cpp

示例10: straightenPoints

void TabletopSegmentor::straightenPoints(PointCloudType &points, const tf::Transform& table_plane_trans, 
		      const tf::Transform& table_plane_trans_flat)
{
  tf::Transform trans = table_plane_trans_flat * table_plane_trans.inverse();
  pcl_ros::transformPointCloud(points, points, trans);
}
开发者ID:bit-pirate,项目名称:tabletop_segmentor,代码行数:6,代码来源:tabletop_segmentation.cpp

示例11: main

int main(int argc, char** argv)
{
	ros::init(argc, argv, "squirtle_localization_node");

	ROS_INFO("Squirtle Localization for ROS v0.1");

    ros::NodeHandle n;
	ros::NodeHandle pn("~");

	double rate;
	pn.param("rate", rate, 2.0);

    pn.param("true_north_compensation", true_north_compensation, 0.0499164166);
    
	pn.param<std::string>("global_frame_id", global_frame_id, "/world");
    pn.param<std::string>("odom_frame_id", odom_frame_id, "odom");
	
	tf_listener = new tf::TransformListener();

	message_filters::Subscriber<sensor_msgs::Imu> * imu_sub = new message_filters::Subscriber<sensor_msgs::Imu>();
	imu_sub->subscribe(n, "imu/data", 20);
    tf::MessageFilter<sensor_msgs::Imu> * tf_filter_imu = new tf::MessageFilter<sensor_msgs::Imu>(*imu_sub, *tf_listener, odom_frame_id, 20);
    tf_filter_imu->registerCallback( boost::bind(imuCallback, _1) );

	message_filters::Subscriber<sensor_msgs::NavSatFix> * gps_sub = new message_filters::Subscriber<sensor_msgs::NavSatFix>();
	gps_sub->subscribe(n, "gps/fix", 20);
    tf::MessageFilter<sensor_msgs::NavSatFix> * tf_filter_gps = new tf::MessageFilter<sensor_msgs::NavSatFix>(*gps_sub, *tf_listener, odom_frame_id, 20);
    tf_filter_gps->registerCallback( boost::bind(gpsCallback, _1) );

	/*message_filters::Subscriber<nav_msgs::Odometry> * odom_sub = new message_filters::Subscriber<nav_msgs::Odometry>();
	odom_sub->subscribe(n, "odom", 20);
    tf::MessageFilter<nav_msgs::Odometry> * tf_filter = new tf::MessageFilter<nav_msgs::Odometry>(*odom_sub, *tf_listener, base_footprint_frame_id, 20);
    tf_filter->registerCallback( boost::bind(odomCallback, _1) );*/

    ros::Publisher odom_pub = n.advertise<nav_msgs::Odometry>("global_odom", 10);

	static tf::TransformBroadcaster tf_broadcaster;

	ros::Rate r(rate);
	while(n.ok())
	{
        tf::Transform t_world_odom_rotation = t_world_imu * t_odom_imu.inverse();
        tf::Transform t_world_odom_translation = t_world_gps * t_odom_gps.inverse();

        tf::Transform t_world_odom;
        t_world_odom.setOrigin(t_world_odom_translation.getOrigin());
        t_world_odom.setRotation((t_world_odom_rotation.getRotation()).normalized());

        tf_broadcaster.sendTransform(tf::StampedTransform(t_world_odom, ros::Time::now(), global_frame_id, odom_frame_id));

        // Odometry header
        nav_msgs::Odometry odom_msg;
        odom_msg.header.frame_id = global_frame_id;
        odom_msg.header.stamp = ros::Time::now();

        // Odometry position
        tf::Vector3 position = t_world_odom_translation.getOrigin();
        odom_msg.pose.pose.position.x = position.getX();
        odom_msg.pose.pose.position.y = position.getY();
        odom_msg.pose.pose.position.z = position.getZ();

        // Odometry orientation
        tf::quaternionTFToMsg((t_world_odom_rotation.getRotation()).normalized(), odom_msg.pose.pose.orientation);

        // TODO: Finish this, add covariance...
        //odom_msg.pose.covariance...

        ros::Duration delta_t = odom_msg.header.stamp - last_odom.header.stamp;

        if(delta_t.toSec() < 1.0)
        {
            // Odometry linear velocity
            odom_msg.twist.twist.linear.x = (odom_msg.pose.pose.position.x - last_odom.pose.pose.position.x)/delta_t.toSec();
            odom_msg.twist.twist.linear.y = (odom_msg.pose.pose.position.y - last_odom.pose.pose.position.y)/delta_t.toSec();
            odom_msg.twist.twist.linear.z = (odom_msg.pose.pose.position.z - last_odom.pose.pose.position.z)/delta_t.toSec();

            // TODO: Fix this... We are assuming that the robot cannot rotate fast enought, so shortest_angular_distance might work.
            // Odometry angular velocity
            odom_msg.twist.twist.angular.x = 0.0;
            odom_msg.twist.twist.angular.y = 0.0;
            odom_msg.twist.twist.angular.z = (angles::shortest_angular_distance(tf::getYaw(odom_msg.pose.pose.orientation), tf::getYaw(last_odom.pose.pose.orientation)))/delta_t.toSec();

            // TODO: Finish this, add covariance...
            //odom_msg.twist.covariance...

            odom_pub.publish(odom_msg);
        }

        last_odom = odom_msg;

		ros::spinOnce();
		r.sleep();
	}
	
	return 0;
}
开发者ID:AlfaroP,项目名称:isr-uc-ros-pkg,代码行数:96,代码来源:squirtle_localization_node.cpp

示例12: imageCb

  void imageCb(const sensor_msgs::ImageConstPtr& l_image,
               const sensor_msgs::CameraInfoConstPtr& l_cam_info,
               const sensor_msgs::ImageConstPtr& r_image,
               const sensor_msgs::CameraInfoConstPtr& r_cam_info)
  {
    ROS_INFO("In callback, seq = %u", l_cam_info->header.seq);
    
    // Convert ROS messages for use with OpenCV
    cv::Mat left, right;
    try {
      left  = l_bridge_.imgMsgToCv(l_image, "mono8");
      right = r_bridge_.imgMsgToCv(r_image, "mono8");
    }
    catch (sensor_msgs::CvBridgeException& e) {
      ROS_ERROR("Conversion error: %s", e.what());
      return;
    }
    cam_model_.fromCameraInfo(l_cam_info, r_cam_info);

    frame_common::CamParams cam_params;
    cam_params.fx = cam_model_.left().fx();
    cam_params.fy = cam_model_.left().fy();
    cam_params.cx = cam_model_.left().cx();
    cam_params.cy = cam_model_.left().cy();
    cam_params.tx = cam_model_.baseline();

    if (vslam_system_.addFrame(cam_params, left, right)) {
      /// @todo Not rely on broken encapsulation of VslamSystem here
      int size = vslam_system_.sba_.nodes.size();
      if (size % 2 == 0) {
        // publish markers
        sba::drawGraph(vslam_system_.sba_, cam_marker_pub_, point_marker_pub_);
      }

      // Publish VO tracks
      if (vo_tracks_pub_.getNumSubscribers() > 0) {
        frame_common::drawVOtracks(left, vslam_system_.vo_.frames, vo_display_);
        IplImage ipl = vo_display_;
        sensor_msgs::ImagePtr msg = sensor_msgs::CvBridge::cvToImgMsg(&ipl);
        msg->header = l_cam_info->header;
        vo_tracks_pub_.publish(msg, l_cam_info);
      }
      
      // Refine large-scale SBA.
      const int LARGE_SBA_INTERVAL = 10;
      if (size > 4 && size % LARGE_SBA_INTERVAL == 0) {
        ROS_INFO("Running large SBA on %d nodes", size);
        vslam_system_.refine();
      }
      
      if (pointcloud_pub_.getNumSubscribers() > 0 && size % 2 == 0)
        publishRegisteredPointclouds(vslam_system_.sba_, vslam_system_.frames_, pointcloud_pub_);
      
      // Publish odometry data to tf.
      if (0) // TODO: Change this to parameter.
      {
        ros::Time stamp = l_cam_info->header.stamp;
        std::string image_frame = l_cam_info->header.frame_id;
        Eigen::Vector4d trans = -vslam_system_.sba_.nodes.back().trans;
        Eigen::Quaterniond rot = vslam_system_.sba_.nodes.back().qrot.conjugate();
        
        trans.head<3>() = rot.toRotationMatrix()*trans.head<3>(); 
        
        tf_transform_.setOrigin(tf::Vector3(trans(0), trans(1), trans(2)));
        tf_transform_.setRotation(tf::Quaternion(rot.x(), rot.y(), rot.z(), rot.w()) );
        
        tf::Transform simple_transform;
        simple_transform.setOrigin(tf::Vector3(0, 0, 0));
        simple_transform.setRotation(tf::Quaternion(.5, -.5, .5, .5));
        
        tf_broadcast_.sendTransform(tf::StampedTransform(tf_transform_, stamp, image_frame, "visual_odom"));
        tf_broadcast_.sendTransform(tf::StampedTransform(simple_transform, stamp, "visual_odom", "pgraph"));
      
      
        // Publish odometry data on topic.
        if (odom_pub_.getNumSubscribers() > 0)
        {
          tf::StampedTransform base_to_image;
          tf::Transform base_to_visodom;
         
          try
          {
            tf_listener_.lookupTransform(image_frame, "/base_footprint",
                                 stamp, base_to_image);
          }
          catch (tf::TransformException ex)
          {
              ROS_WARN("%s",ex.what());
              return;
          }
                                 
          base_to_visodom = tf_transform_.inverse() * base_to_image;
          
          geometry_msgs::PoseStamped pose;
          nav_msgs::Odometry odom;
          pose.header.frame_id = "/visual_odom";
          pose.pose.position.x = base_to_visodom.getOrigin().x();
          pose.pose.position.y = base_to_visodom.getOrigin().y();
          pose.pose.position.z = base_to_visodom.getOrigin().z();
          pose.pose.orientation.x = base_to_visodom.getRotation().x();
//.........这里部分代码省略.........
开发者ID:RoboWGT,项目名称:robo_groovy,代码行数:101,代码来源:stereo_vslam_node.cpp


注:本文中的tf::Transform::inverse方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。