当前位置: 首页>>代码示例>>C++>>正文


C++ StampedTransform::inverse方法代码示例

本文整理汇总了C++中tf::StampedTransform::inverse方法的典型用法代码示例。如果您正苦于以下问题:C++ StampedTransform::inverse方法的具体用法?C++ StampedTransform::inverse怎么用?C++ StampedTransform::inverse使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在tf::StampedTransform的用法示例。


在下文中一共展示了StampedTransform::inverse方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1:

carmen_6d_point 
get_carmen_pose6D_from_matrix(Matrix visual_odometry_transformation_matrix)
{
//	// See: http://www.ros.org/wiki/tf
	tf::Transform visual_odometry_pose;
	tf::Transform carmen_pose;
	carmen_6d_point carmen_pose_6d;

	// convert the visual odometry output matrix to the tf::Transform type.
	visual_odometry_pose = convert_matrix_to_tf_transform(visual_odometry_transformation_matrix);

	// compute the current visual odometry pose with respect to the carmen coordinate system
	carmen_pose = g_car_to_visual_odometry_transform * visual_odometry_pose * g_car_to_visual_odometry_transform.inverse();

	double yaw, pitch, roll;
	tf::Matrix3x3(carmen_pose.getRotation()).getRPY(roll, pitch, yaw);

	carmen_pose_6d.x = carmen_pose.getOrigin().x();
	carmen_pose_6d.y = carmen_pose.getOrigin().y();
	carmen_pose_6d.z = carmen_pose.getOrigin().z();

	carmen_pose_6d.yaw = yaw;
	carmen_pose_6d.pitch = pitch;
	carmen_pose_6d.roll = roll;

	 return carmen_pose_6d;
}
开发者ID:ulyssesrr,项目名称:carmen_lcad,代码行数:27,代码来源:visual_odometry_main.cpp

示例2: lock

void sptam::sptam_node::fixOdomFrame(const CameraPose& cameraPose, const tf::StampedTransform& camera_to_odom, const ros::Time& t)
{
  tf::Pose camera_to_map;
  CameraPose2TFPose( cameraPose, camera_to_map );

  // compute the new difference between map and odom
  tf::Transform odom_to_map = camera_to_map * camera_to_odom.inverse();

  std::lock_guard<std::mutex> lock( odom_to_map_mutex_ );
  odom_to_map_ = odom_to_map;
}
开发者ID:Aerobota,项目名称:sptam,代码行数:11,代码来源:sptam_node.cpp

示例3: stamped_transform_inverse

tf::StampedTransform OdomTf::stamped_transform_inverse(tf::StampedTransform stf) {
    // instantiate stamped transform with constructor args
    //note: child_frame and frame_id are reversed, to correspond to inverted transform
    tf::StampedTransform stf_inv(stf.inverse(), stf.stamp_, stf.child_frame_id_, stf.frame_id_);
    /* long-winded equivalent:
    tf::StampedTransform stf_inv;    
    tf::Transform tf = get_tf_from_stamped_tf(stf);
    tf::Transform tf_inv = tf.inverse();
    
    stf_inv.stamp_ = stf.stamp_;
    stf_inv.frame_id_ = stf.child_frame_id_;
    stf_inv.child_frame_id_ = stf.frame_id_;
    stf_inv.setOrigin(tf_inv.getOrigin());
    stf_inv.setBasis(tf_inv.getBasis());
     * */
    return stf_inv;
}
开发者ID:TuZZiX,项目名称:coke_fetcher,代码行数:17,代码来源:odom_tf_fncs.cpp

示例4: updateTFsThread


//.........这里部分代码省略.........
                }else{
                        std::cout << "*** GRIPPER OFFLINE! ***" <<std::endl;
                }

                if(comau_sync) {
                        std::cout << "*** SYNC WITH COMAU ACTIVE! ***"<<std::endl;
                }

                /** MODE JOYSTICK CONTROL */
                if(current_mode==JOYSTICK_CONTROL) {
                        std::cout << "Mode: Joystick Control"<<std::endl;
                        std::cout << joystick_angular_speed<<std::endl;

                        joystick_set_point.position.x +=  joystick_speed.x;
                        joystick_set_point.position.y +=  joystick_speed.y;
                        joystick_set_point.position.z +=  joystick_speed.z;

                        KDL::Rotation joystick_rotation;
                        MathUtils::poseToKDLRotation(joystick_set_point,joystick_rotation);
                        joystick_rotation.DoRotX(joystick_angular_speed.x);
                        joystick_rotation.DoRotY(joystick_angular_speed.y);
                        joystick_rotation.DoRotZ(joystick_angular_speed.z);
                        MathUtils::poseToKDLRotation(joystick_set_point,joystick_rotation,true);

                        MathUtils::poseToTF(joystick_set_point,current_position);
                }

                /** MODE FOLLOW TARGET */
                if(current_mode==FOLLOW_TARGET) {
                        std::cout << "Mode: Follow Target"<<std::endl;

                        if(markers_current_list.size()<=0) {
                                std::cout << "NO TARGET ARE YOU CRAZY!!! "<<follow_target_id<<std::endl;
                        }else{
                                std::cout << "Following target: "<<follow_target_id<<std::endl;

                                follow_target_approach_pose.position.z +=  joystick_speed.z/100.0f;
                                if(follow_target_approach_pose.position.z<0.05f) {
                                        follow_target_approach_pose.position.z=0.025f;
                                }
                                KDL::Rotation approach_rotation;
                                MathUtils::poseToKDLRotation(follow_target_approach_pose,approach_rotation);
                                approach_rotation.DoRotZ(joystick_angular_speed.z/100.0f);
                                MathUtils::poseToKDLRotation(follow_target_approach_pose,approach_rotation,true);

                                current_position = target_approach;

                        }
                }




                if(GLOBAL_CALIBRATION_ACTIVE) {
                        try{
                                listener->lookupTransform("camera_rgb_frame", calibration_marker_name, ros::Time(0), t_cam_marker);
                                listener->lookupTransform("base", "comau_base_marker",ros::Time(0), t_0_marker);

                                t_6_0 = t_0_6.inverse();
                                t_marker_cam = t_cam_marker.inverse();
                                t_6_cam = t_6_0 * t_0_marker;
                                t_6_cam = t_6_cam * t_marker_cam;

                                tf_broadcaster->sendTransform(tf::StampedTransform(t_6_cam, ros::Time::now(), "comau_t0U", "comau_t0CAM"));

                                tf::Transform full = t_0_6*t_6_cam;
                                tf_broadcaster->sendTransform(tf::StampedTransform(full, ros::Time::now(), "base", "comau_t0CAM_full"));

                                //tf_broadcaster->sendTransform(tf::StampedTransform(t_0_6, ros::Time(0), "base", "camera_rgb_frame"));
                        }
                        catch (tf::TransformException ex) {
                                ROS_ERROR("%s",ex.what());
                        }
                }else{
                        listener->lookupTransform("base", "comau_t0U",ros::Time(0), t_0_6);
                        //tf_broadcaster->sendTransform(tf::StampedTransform(t_0_6, ros::Time(0), "base", "camera_rgb_frame"));
                }

                /** TARGET SELECTION */
                lar_visionsystem::MarkerApproachCommand target_approach_command;
                target_approach_command.marker_id = follow_target_id;
                target_approach_command.update_target_only = true;
                marker_approach.publish(target_approach_command);
                std::cout << markers_current_list.size() <<std::endl;

                /** BROACAST COMAU TF TARGET */
                tf_broadcaster->sendTransform(tf::StampedTransform(current_position, ros::Time(0), "base", "comau_tf_target"));
                std::cout << "Sending "<<current_position.getOrigin()[0]<<std::endl;

                /** SEND DATA TO COMAU */
                MathUtils::poseToTF(comau_command.pose,current_position,true);
                std::cout << comau_command<<std::endl;
                comau_cartesian_controller.publish(comau_command);

                ros::spinOnce();
                std::system("clear");
        }

        updateTFsThread.join();
}
开发者ID:lar-lab-unibo,项目名称:lar_visionsystem,代码行数:101,代码来源:comau_tf_follower.cpp

示例5: processWithLock

  TyErrorId processWithLock(CAS &tcas, ResultSpecification const &res_spec)
  {
    MEASURE_TIME;
    outInfo("process begins");
    rs::SceneCas cas(tcas);
    rs::Scene scene = cas.getScene();

    cas.get(VIEW_CLOUD, *cloud);
    cas.get(VIEW_COLOR_IMAGE, color);
    cas.get(VIEW_DEPTH_IMAGE, depth);
    cas.get(VIEW_CAMERA_INFO, cameraInfo);

    indices->clear();
    indices->reserve(cloud->points.size());

    camToWorld.setIdentity();
    if(scene.viewPoint.has())
    {
      rs::conversion::from(scene.viewPoint.get(), camToWorld);
    }
    else
    {
      outWarn("No camera to world transformation, no further processing!");
      throw rs::FrameFilterException();
    }
    worldToCam = tf::StampedTransform(camToWorld.inverse(), camToWorld.stamp_, camToWorld.child_frame_id_, camToWorld.frame_id_);
    computeFrustum();

    //default place to look for objects is counter tops except if we got queried for some different place
    //message comes from desigantor and is not the same as the entries from the semantic map so we need
    //to transform them
    rs::Query qs = rs::create<rs::Query>(tcas);
    std::vector<std::string> regionsToLookAt;
    regionsToLookAt.assign(defaultRegions.begin(),defaultRegions.end());
    regions.clear();
    if(cas.getFS("QUERY", qs))
    {
      outWarn("loaction set in query: " << qs.location());
      if(std::find(defaultRegions.begin(), defaultRegions.end(), qs.location()) == std::end(defaultRegions) && qs.location()!="")
      {
        regionsToLookAt.clear();
        regionsToLookAt.push_back(qs.location());
      }
    }

    if(regions.empty())
    {
      std::vector<rs::SemanticMapObject> semanticRegions;
      getSemanticMapEntries(cas, regionsToLookAt, semanticRegions);

      regions.resize(semanticRegions.size());
      for(size_t i = 0; i < semanticRegions.size(); ++i)
      {

        Region &region = regions[i];
        region.width = semanticRegions[i].width();
        region.depth = semanticRegions[i].depth();
        region.height = semanticRegions[i].height();
        region.name = semanticRegions[i].name();
        rs::conversion::from(semanticRegions[i].transform(), region.transform);
      }
    }

    for(size_t i = 0; i < regions.size(); ++i)
    {
      if(frustumCulling(regions[i]) || !frustumCulling_)
      {
        outInfo("region inside frustum: " << regions[i].name);
        filterRegion(regions[i]);
      }
      else
      {
        outInfo("region outside frustum: " << regions[i].name);
      }
    }

    pcl::ExtractIndices<PointT> ei;
    ei.setKeepOrganized(true);
    ei.setIndices(indices);
    ei.filterDirectly(cloud);

    cas.set(VIEW_CLOUD, *cloud);

    if(changeDetection && !indices->empty())
    {
      ++frames;
      if(lastImg.empty())
      {
        lastMask = cv::Mat::ones(color.rows, color.cols, CV_8U);
        lastImg = cv::Mat::zeros(color.rows, color.cols, CV_32FC4);
      }

      uint32_t secondsPassed = camToWorld.stamp_.sec - lastTime.sec;
      bool change = checkChange() || cas.has("QUERY") || secondsPassed > timeout;

      if(!change)
      {
        ++filtered;
      }
      else
//.........这里部分代码省略.........
开发者ID:bbferka,项目名称:robosherlock,代码行数:101,代码来源:RegionFilter.cpp

示例6: computeRelativeTransformation

void InputOutputLinControl::computeRelativeTransformation(tf::StampedTransform start, tf::StampedTransform current, tf::Transform& relative_transform)
{
	relative_transform = start.inverse() * current;
}
开发者ID:alcor-lab,项目名称:trajectory-control,代码行数:4,代码来源:InputOutputLinControl.cpp


注:本文中的tf::StampedTransform::inverse方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。