本文整理汇总了C++中eigen::Affine3d::inverse方法的典型用法代码示例。如果您正苦于以下问题:C++ Affine3d::inverse方法的具体用法?C++ Affine3d::inverse怎么用?C++ Affine3d::inverse使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类eigen::Affine3d
的用法示例。
在下文中一共展示了Affine3d::inverse方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: snapStateToFrame
void MotionPlanningFrame::snapStateToFrame(robot_state::RobotState& state,
const Eigen::Affine3d& T_frame_world,
const std::string& link,
const Eigen::Affine3d& T_frame_link,
const std::string& group)
{
ROS_DEBUG_FUNCTION;
// std::cout << "T_frame_world:\n" << T_frame_world.matrix() << std::endl;
// std::cout << "T_frame_link:\n" << T_frame_link.matrix() << std::endl;
// Back out the desired link transform in global coordinates.
Eigen::Affine3d T_link_world = T_frame_world * T_frame_link.inverse();
// Snap to the frame using IK.
const moveit::core::JointModelGroup* jmg = state.getJointModelGroup(group);
// std::cout << "group: " << group << std::endl;
state.update();
trajectory_msgs::JointTrajectoryPoint point;
state.copyJointGroupPositions(jmg, point.positions);
ROS_DEBUG_STREAM("pre-ik positions:\n" << point);
geometry_msgs::Pose pose;
tf::poseEigenToMsg(T_link_world, pose);
state.setFromIK(jmg, pose, link);
state.update();
state.copyJointGroupPositions(jmg, point.positions);
ROS_DEBUG_STREAM("post-ik positions:\n" << point);
ROS_DEBUG_FUNCTION;
}
示例2: rt_arm_plan_path_current_to_goal_pose
//this is a pretty general function:
// goal contains a desired tool pose;
// path is planned from current joint state to some joint state that achieves desired tool pose
bool ArmMotionInterface::rt_arm_plan_path_current_to_goal_pose() {
ROS_INFO("computing a cartesian trajectory to right-arm tool goal pose");
//unpack the goal pose:
goal_gripper_pose_right_ = cart_goal_.des_pose_gripper_right;
goal_gripper_affine_right_= transformPoseToEigenAffine3d(goal_gripper_pose_right_.pose);
ROS_INFO("tool frame goal: ");
display_affine(goal_gripper_affine_right_);
goal_flange_affine_right_ = goal_gripper_affine_right_ * A_tool_wrt_flange_.inverse();
ROS_INFO("flange goal");
display_affine(goal_flange_affine_right_);
Vectorq7x1 q_start;
q_start = get_jspace_start_right_arm_(); // choose last cmd, or current joint angles
path_is_valid_ = cartTrajPlanner_.cartesian_path_planner(q_start, goal_flange_affine_right_, optimal_path_);
if (path_is_valid_) {
baxter_traj_streamer_.stuff_trajectory(optimal_path_, des_trajectory_); //convert from vector of poses to trajectory message
computed_arrival_time_ = des_trajectory_.points.back().time_from_start.toSec();
cart_result_.return_code = cwru_action::cwru_baxter_cart_moveResult::SUCCESS;
cart_result_.computed_arrival_time = computed_arrival_time_;
cart_move_as_.setSucceeded(cart_result_);
}
else {
cart_result_.return_code = cwru_action::cwru_baxter_cart_moveResult::RT_ARM_PATH_NOT_VALID;
cart_result_.computed_arrival_time = -1.0; //impossible arrival time
cart_move_as_.setSucceeded(cart_result_); //the communication was a success, but not the computation
}
return path_is_valid_;
}
示例3: if
void Irp6pInverseKinematic::updateHook() {
if (port_input_wrist_pose_.read(wrist_pose_) == RTT::NewData) {
Eigen::Affine3d trans;
tf::poseMsgToEigen(wrist_pose_, trans);
port_current_joint_position_.read(local_current_joints_);
inverse_kinematics_single_iteration(local_current_joints_, trans,
&local_desired_joints_);
port_output_joint_position_.write(local_desired_joints_);
} else if (port_input_end_effector_pose_.read(end_effector_pose_)
== RTT::NewData) {
port_tool_.read(tool_msgs_);
Eigen::Affine3d tool;
Eigen::Affine3d trans;
Eigen::Affine3d wrist_pose;
tf::poseMsgToEigen(end_effector_pose_, trans);
tf::poseMsgToEigen(tool_msgs_, tool);
wrist_pose = trans * tool.inverse();
port_current_joint_position_.read(local_current_joints_);
inverse_kinematics_single_iteration(local_current_joints_, wrist_pose,
&local_desired_joints_);
port_output_joint_position_.write(local_desired_joints_);
}
}
示例4: cloud
template <typename PointT, typename NormalT> bool
cpu_tsdf::TSDFVolumeOctree::integrateCloud (
const pcl::PointCloud<PointT> &cloud,
const pcl::PointCloud<NormalT> &normals,
const Eigen::Affine3d &trans)
{
Eigen::Affine3f trans_inv = trans.inverse ().cast<float> ();
// First, sample a few points and force their containing voxels to split
int px_step = 1;
int nsplit = 0;
for (size_t u = 0; u < cloud.width; u += px_step)
{
for (size_t v = 0; v < cloud.height; v += px_step)
{
const PointT &pt_surface_orig = cloud (u, v);
if (pcl_isnan (pt_surface_orig.z))
continue;
// Look at surroundings
int nstep = 0;
Eigen::Vector3f ray = pt_surface_orig.getVector3fMap ().normalized ();
for (int perm = 0; perm < num_random_splits_; perm++)
{
// Get containing voxels
PointT pt_trans;
float scale = (float)rand () / (float)RAND_MAX * 0.03;
Eigen::Vector3f noise = Eigen::Vector3f::Random ().normalized () * scale;;
if (perm == 0) noise *= 0;
pt_trans.getVector3fMap () = trans.cast<float> () * (pt_surface_orig.getVector3fMap ()+ noise);
OctreeNode* voxel = octree_->getContainingVoxel (pt_trans.x, pt_trans.y, pt_trans.z);
if (voxel != NULL)
{
while (voxel->getMinSize () > xsize_ / xres_)
{
nsplit++;
voxel->split ();
voxel = voxel->getContainingVoxel (pt_trans.x, pt_trans.y, pt_trans.z);
}
}
}
}
}
// Do Frustum Culling to get rid of unseen voxels
std::vector<cpu_tsdf::OctreeNode::Ptr> voxels_culled;
getFrustumCulledVoxels(trans, voxels_culled);
#pragma omp parallel for
for (size_t i = 0; i < voxels_culled.size (); i++)
{
updateVoxel (voxels_culled[i], cloud, normals, trans_inv);
}
// Cloud is no longer empty
is_empty_ = false;
return (true);
}
示例5: addScanLineToPointCloud
void ICPLocalization::addScanLineToPointCloud(Eigen::Affine3d body2Odo, Eigen::Affine3d body2World, Eigen::Affine3d laser2Body, const ::base::samples::LaserScan &scan_reading)
{
if(scansWithTransforms.size() == 0)
{
addLaserScan(body2Odo,body2World,laser2Body,scan_reading);
return;
}
/*
double max_rotation = 1.5 * conf_point_cloud.lines_per_point_cloud * conf_point_cloud.min_rotation_for_new_line;
double max_translation = 1.5 * conf_point_cloud.lines_per_point_cloud * conf_point_cloud.min_distance_travelled_for_new_line;
double max_head_movement = 1.5 * conf_point_cloud.lines_per_point_cloud * conf_point_cloud.min_rotation_head_for_new_line;
*/
bool add_laser_scan = true;
for( uint i = 0; i < scansWithTransforms.size(); i++)
{
Eigen::Affine3d diference( body2Odo.inverse() * scansWithTransforms.at(i).body2Odo );
Vector3d Ylaser2Body = laser2Body * Vector3d::UnitY() - laser2Body.translation();
Ylaser2Body.normalize();
Vector3d YlastLaser2Body = scansWithTransforms.back().laser2Body * Vector3d::UnitY() - scansWithTransforms.at(i).laser2Body.translation();
YlastLaser2Body.normalize();
double laserChange = acos(Ylaser2Body.dot(YlastLaser2Body));
double translation = diference.translation().norm();
double rotation = fabs(Eigen::AngleAxisd( diference.rotation() ).angle()) ;
add_laser_scan = add_laser_scan && ( rotation > conf_point_cloud.min_rotation_for_new_line || translation > conf_point_cloud.min_distance_travelled_for_new_line || laserChange > conf_point_cloud.min_rotation_head_for_new_line);
//if the distance is to big means the old laser scan is not consistent anymore.
// if( rotation > max_rotation|| translation > max_translation || laserChange > max_head_movement)
// {
// scansWithTransforms.erase(scansWithTransforms.begin() + i);
// scanCount--;
// std::cout << " IcpLocalization.cpp erasing old laser scan " << std::endl;
// }
/* std::cout <<" add new scan " << add_laser_scan << std::endl;
std::cout << "\t translation " << (translation > conf_point_cloud.min_distance_travelled_for_new_line)<< " " << translation << " > " << conf_point_cloud.min_distance_travelled_for_new_line << std::endl;
std::cout << "\t rotation " << (rotation > conf_point_cloud.min_rotation_for_new_line) << " " << rotation * 180 / M_PI << " > " << conf_point_cloud.min_rotation_for_new_line * 180 / M_PI << std::endl;
std::cout << "\t head " << (laserChange > conf_point_cloud.min_rotation_head_for_new_line) << " "<< laserChange * 180 / M_PI << " > " << conf_point_cloud.min_rotation_head_for_new_line * 180 / M_PI<< std::endl; */
if (!add_laser_scan)
break;
}
if ( add_laser_scan )
{
addLaserScan(body2Odo,body2World,laser2Body,scan_reading);
}
return;
}
示例6: transformPolygon
void PolygonArrayTransformer::transformPolygon(const Eigen::Affine3d& transform,
const geometry_msgs::PolygonStamped& polygon,
geometry_msgs::PolygonStamped& result)
{
result.header = polygon.header;
result.header.frame_id = frame_id_;
for (size_t i = 0; i < polygon.polygon.points.size(); i++) {
Eigen::Vector4d point;
point[0] = polygon.polygon.points[i].x;
point[1] = polygon.polygon.points[i].y;
point[2] = polygon.polygon.points[i].z;
point[3] = 1; // homogenious
Eigen::Vector4d transformed_point_eigen = transform.inverse() * point;
geometry_msgs::Point32 transformed_point;
transformed_point.x = transformed_point_eigen[0];
transformed_point.y = transformed_point_eigen[1];
transformed_point.z = transformed_point_eigen[2];
result.polygon.points.push_back(transformed_point);
}
}
示例7: unpack_goal_pose
bool ArmMotionInterface::unpack_goal_pose(void) {
geometry_msgs::PoseStamped poseStamped_goal = request_.poseStamped_goal;
Eigen::Vector3d goal_origin;
goal_origin[0] = poseStamped_goal.pose.position.x;
goal_origin[1] = poseStamped_goal.pose.position.y;
goal_origin[2] = poseStamped_goal.pose.position.z;
a_tool_end_.translation() = goal_origin;
Eigen::Quaterniond quat;
quat.x() = poseStamped_goal.pose.orientation.x;
quat.y() = poseStamped_goal.pose.orientation.y;
quat.z() = poseStamped_goal.pose.orientation.z;
quat.w() = poseStamped_goal.pose.orientation.w;
Eigen::Matrix3d R_goal(quat);
a_tool_end_.linear() = R_goal;
a_flange_end_ = a_tool_end_ * A_tool_wrt_flange_.inverse();
return true;
}
示例8: checkNearPose
bool CaptureStereoSynchronizer::checkNearPose(
const geometry_msgs::Pose& new_pose)
{
Eigen::Affine3d new_affine;
tf::poseMsgToEigen(new_pose, new_affine);
for (size_t i = 0; i < poses_.size(); i++) {
// convert pose into eigen
Eigen::Affine3d affine;
tf::poseMsgToEigen(poses_[i], affine);
// compute difference
Eigen::Affine3d diff = affine.inverse() * new_affine;
double positional_difference = diff.translation().norm();
if (positional_difference < positional_bin_size_) {
Eigen::AngleAxisd rotational_difference(diff.rotation());
if (rotational_difference.angle() < rotational_bin_size_) {
return true;
}
}
}
return false;
}
示例9: inPointCallback
void inPointCallback(const geometry_msgs::Point& pt_msg) {
Eigen::Affine3d des_gripper1_wrt_base;
g_des_point(0) = pt_msg.x;
g_des_point(1) = pt_msg.y;
g_des_point(2) = pt_msg.z;
cout << "received des point = " << g_des_point.transpose() << endl;
g_des_gripper_affine1.translation() = g_des_point;
//convert this to base coords:
g_des_gripper1_wrt_base = g_affine_lcamera_to_psm_one.inverse() * g_des_gripper_affine1;
//try computing IK:
if (g_ik_solver.ik_solve(g_des_gripper1_wrt_base)) {
ROS_INFO("found IK soln");
g_got_new_pose = true;
g_q_vec1_start = g_q_vec1_goal;
g_q_vec1_goal = g_ik_solver.get_soln();
cout << g_q_vec1_goal.transpose() << endl;
g_trajectory_point1 = g_trajectory_point2; //former goal is new start
for (int i = 0; i < 6; i++) {
g_trajectory_point2.positions[i] = g_q_vec1_goal[i]; //leave psm2 angles alone; just update psm1 goal angles
}
//gripper_affines_wrt_camera.push_back(affine_gripper_frame_wrt_camera_frame_);
// des_trajectory.joint_names.push_back("right_s0"); //yada-yada should fill in names
g_des_trajectory.header.stamp = ros::Time::now();
g_des_trajectory.points[0] = g_des_trajectory.points[1]; //former goal is new start
g_des_trajectory.points[1] = g_trajectory_point2;
// copy traj to goal:
g_goal.trajectory = g_des_trajectory;
} else {
ROS_WARN("NO IK SOLN FOUND");
}
}
示例10:
// static BITBOTS_INLINE Eigen::Matrix4d inverse_chain_matrix(const Eigen::Matrix4d chain_matrix) {
// Eigen::Matrix4d inverse;
// inverse.block<3, 3>(0, 0) = chain_matrix.block<3, 3>(0, 0).inverse();
// inverse.col(3).head<3>() = -(inverse.block<3, 3>(0, 0) * chain_matrix.col(3).head<3>());
// inverse.row(3) = Eigen::RowVector4d(0, 0, 0, 1);
// return inverse;
// }
static BITBOTS_INLINE Eigen::Affine3d inverse_chain_matrix(const Eigen::Affine3d& chain_matrix) {
return chain_matrix.inverse();
}
示例11: callback
/**
* This is where the localization is done... not really callback in the offline version
*/
void callback(const sensor_msgs::LaserScan &scan, tf::Transform odo_pose, tf::Transform state_pose)
{
static int counter = 0;
counter++;
static tf::TransformListener tf_listener;
double looptime = TT.Tac();
TT.Tic();
fprintf(stderr,"Lt( %.1lfms %.1lfHz seq:%d) -",looptime*1000,1.0/looptime,scan.header.seq);
if(has_sensor_offset_set == false) return;
double gx,gy,gyaw,x,y,yaw;
///Get the ground truth
gyaw = tf::getYaw(state_pose.getRotation());
gx = state_pose.getOrigin().x();
gy = state_pose.getOrigin().y();
///Get the odometry
yaw = tf::getYaw(odo_pose.getRotation());
x = odo_pose.getOrigin().x();
y = odo_pose.getOrigin().y();
mrpt::utils::CTicTac tictac;
tictac.Tic();
int N =(scan.angle_max - scan.angle_min)/scan.angle_increment; ///< number of scan lines
/////
/// Pose conversions
////
Eigen::Affine3d T = getAsAffine(x,y,yaw);
Eigen::Affine3d Tgt = getAsAffine(gx,gy,gyaw);
/**
* We are now using the information from the ground truth to initialize the filter
*/
if(isFirstLoad){
fprintf(stderr,"Initializing to (%lf, %lf, %lf)\n",gx,gy,gyaw);
///Initialize the filter with 1m^2 variance in position and 20deg in heading
ndtmcl->initializeFilter(gx, gy,gyaw,1.0, 1.0, 20.0*M_PI/180.0, 450);
Told = T;
Todo = Tgt;
}
///Compute the differential motion between two time steps
Eigen::Affine3d Tmotion = Told.inverse() * T;
Todo = Todo*Tmotion;
Told = T;
///Get the laser pose with respect to the base
float dy =offy;
float dx = offx;
float alpha = atan2(dy,dx);
float L = sqrt(dx*dx+dy*dy);
///Laser pose in base frame
float lpx = L * cos(alpha);
float lpy = L * sin(alpha);
float lpa = offa;
///Laser scan to PointCloud transformed with respect to the base
pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>);
for (int j=0;j<N;j++){
double r = scan.ranges[j];
if(r>=scan.range_min && r<scan.range_max && r>0.3 && r<20.0){
double a = scan.angle_min + j*scan.angle_increment;
pcl::PointXYZ pt;
pt.x = r*cos(a+lpa)+lpx;
pt.y = r*sin(a+lpa)+lpy;
pt.z = 0.1+0.02 * (double)rand()/(double)RAND_MAX;
cloud->push_back(pt);
}
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/// Now we have the differential motion and pointcloud -- Lets do MCL
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
ndtmcl->updateAndPredict(Tmotion, *cloud); ///< does the prediction, update and resampling steps (see ndt_mcl.hpp)
Eigen::Vector3d dm = ndtmcl->getMean(); ///<Maximum aposteriori estimate of the pose
Eigen::Matrix3d cov = ndtmcl->pf.getDistributionVariances(); ///distribution variances
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
double Time = tictac.Tac();
fprintf(stderr,"Time elapsed %.1lfms (%lf %lf %lf) \n",Time*1000,dm[0],dm[1],dm[2]);
isFirstLoad = false;
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//If visualization desired
#ifdef USE_VISUALIZATION_DEBUG
///The sensor pose in the global frame for visualization
Eigen::Vector3d origin(dm[0] + L * cos(dm[2]+alpha),dm[1] + L * sin(dm[2]+alpha),0.1);
//.........这里部分代码省略.........
示例12: getTransform
//.........这里部分代码省略.........
refinement->viewer = viewer;
refinement->visualizationLvl = 0;
//for(int r = 0; r < 1000; r++){
// while(true){
// double rx = 2.0*M_PI*0.0001*double(rand()%10000);
// double ry = 2.0*M_PI*0.0001*double(rand()%10000);
// double rz = 2.0*M_PI*0.0001*double(rand()%10000);
//double stop = 0;
double step = 0.1+2.0*M_PI/5;
for(double rx = 0; rx < 2.0*M_PI; rx += step) {
for(double ry = 0; ry < 2.0*M_PI; ry += step)
for(double rz = 0; rz < 2.0*M_PI; rz += step) {
printf("rx: %f ry: %f rz: %f\n",rx,ry,rz);
double start = getTime();
double meantime = 999999999999;
if(r != 0) {
meantime = sumtimeSum/double(sumtimeOK+1.0);
}
refinement->maxtime = std::min(0.5,3*meantime);
Eigen::VectorXd startparam = Eigen::VectorXd(3);
startparam(0) = rx;
startparam(1) = ry;
startparam(2) = rz;
Eigen::Affine3d randomrot = Eigen::Affine3d::Identity();
randomrot = Eigen::AngleAxisd(rx, Eigen::Vector3d::UnitX()) *
Eigen::AngleAxisd(ry, Eigen::Vector3d::UnitY()) *
Eigen::AngleAxisd(rz, Eigen::Vector3d::UnitZ());
Eigen::Affine3d current_guess = Ymean*randomrot*Xmean.inverse();//*Ymean;
refinement->target_points = 250;
FusionResults fr = refinement->getTransform(current_guess.matrix());
//fr.timeout = timestopped;
double stoptime = getTime();
sumtime += stoptime-start;
if(!fr.timeout) {
sumtimeSum += stoptime-start;
sumtimeOK++;
}
//printf("sumtime: %f\n",sumtime);
stop = fr.score;
Eigen::Matrix4d m = fr.guess;
current_guess = m;//fr.guess;
float m00 = current_guess(0,0);
float m01 = current_guess(0,1);
float m02 = current_guess(0,2);
float m03 = current_guess(0,3);
float m10 = current_guess(1,0);
float m11 = current_guess(1,1);
float m12 = current_guess(1,2);
float m13 = current_guess(1,3);
float m20 = current_guess(2,0);
float m21 = current_guess(2,1);
float m22 = current_guess(2,2);
float m23 = current_guess(2,3);
Eigen::Matrix<double, 3, Eigen::Dynamic> Xsmall;
Xsmall.resize(Eigen::NoChange,s_nr_data/stepxsmall);
for(unsigned int i = 0; i < s_nr_data/stepxsmall; i++) {
float x = src->data(0,i*stepxsmall);
示例13: transformationdiff
double transformationdiff(Eigen::Affine3d & A, Eigen::Affine3d & B, double rotationweight) {
Eigen::Affine3d C = A.inverse()*B;
double r = fabs(1-C(0,0))+fabs(C(0,1))+fabs(C(0,2)) + fabs(C(1,0))+fabs(1-C(1,1))+fabs(C(1,2)) + fabs(C(2,0))+fabs(C(2,1))+fabs(1-C(2,2));
double t = sqrt(C(0,3)*C(0,3)+C(1,3)*C(1,3)+C(2,3)*C(2,3));
return r*rotationweight+t;
}
示例14: callback
void callback(const PointPlusConstPtr& robot_point, const PointPlusConstPtr& mass_point)
{
ROS_DEBUG("Synchronized Callback triggered");
puppeteer_msgs::PointPlus r_pt, m_pt;
// if not in run or calibrate, just exit
if (run_system_logic())
return;
// let's first correct for the radii of the objects:
r_pt = *robot_point;
r_pt = correct_vals(r_pt, DEFAULT_ROBOT_RADIUS);
m_pt = *mass_point;
m_pt = correct_vals(m_pt, DEFAULT_MASS_RADIUS);
if (calibrated_flag == false)
{
if (calibrate_count == 0)
{
calibrate_count++;
robot_cal_pos << 0, 0, 0;
mass_cal_pos << 0, 0, 0;
robot_start_pos << 0, 0, 0;
mass_start_pos << 0, 0, 0;
return;
}
else if (calibrate_count <= NUM_CALIBRATES)
{
ROS_INFO_THROTTLE(1, "Calibrating...");
if (!m_pt.error && !r_pt.error)
{
// then we got both objects successfully:
robot_cal_pos(0) += r_pt.x;
robot_cal_pos(1) += r_pt.y;
robot_cal_pos(2) += r_pt.z;
mass_cal_pos(0) += m_pt.x;
mass_cal_pos(1) += m_pt.y;
mass_cal_pos(2) += m_pt.z;
calibrate_count++;
}
return;
}
else
{
ROS_INFO("Calibration completed successfully!");
// if here, calibration is done!
// first find averages:
robot_cal_pos = (robot_cal_pos/((double) NUM_CALIBRATES));
mass_cal_pos = (mass_cal_pos/((double) NUM_CALIBRATES));
// now set start positions:
mass_start_pos << q0.xm, q0.ym, 0;
robot_start_pos << q0.xr, H0, 0;
// now get the transform:
mass_cal_pos -= mass_start_pos;
robot_cal_pos -= robot_start_pos;
// let's check if there is an error here:
////////////////////////
// ERROR CHECK //
////////////////////////
// if no error:
// cal_pos = (mass_cal_pos + robot_cal_pos)/2.0;
cal_pos = mass_cal_pos;
// reset vars:
calibrate_count = 0;
calibrated_flag = true;
}
}
// send global frame transforms:
send_global_transforms(m_pt.header.stamp);
// transform both of the points into the
// optimization_frame, build the output message, and
// publish
tf::Transform transform;
PlanarSystemConfig qmeas;
Eigen::Affine3d gwo;
Eigen::Vector3d robot_trans, mass_trans;
transform.setOrigin(tf::Vector3(cal_pos(0),
cal_pos(1), cal_pos(2)));
transform.setRotation(tf::Quaternion(0,0,0,1));
tf::TransformTFToEigen(transform, gwo);
gwo = gwo.inverse();
// transform robot:
robot_trans << r_pt.x, r_pt.y, r_pt.z;
robot_trans = gwo*robot_trans;
// transform mass:
mass_trans << m_pt.x, m_pt.y, m_pt.z;
mass_trans = gwo*mass_trans;
// calculate string length:
mass_trans(2) = 0; robot_trans(2) = 0;
double rad = (mass_trans - robot_trans).norm();
qmeas.xm = mass_trans(0);
qmeas.ym = mass_trans(1);
qmeas.xr = robot_trans(0);
qmeas.r = rad;
qmeas.mass_err = m_pt.error;
qmeas.robot_err = r_pt.error;
qmeas.header.stamp = m_pt.header.stamp;
//.........这里部分代码省略.........
示例15: executeCB
void CartMoveActionServer::executeCB(const actionlib::SimpleActionServer<cwru_action::cart_moveAction>::GoalConstPtr& goal) {
ROS_INFO("in executeCB of CartMoveActionServer");
cart_result_.err_code=0;
cart_move_as_.isActive();
//unpack the necessary info:
gripper_ang1_ = goal->gripper_jaw_angle1;
gripper_ang2_ = goal->gripper_jaw_angle2;
arrival_time_ = goal->move_time;
// interpret the desired gripper poses:
geometry_msgs::PoseStamped des_pose_gripper1 = goal->des_pose_gripper1;
geometry_msgs::PoseStamped des_pose_gripper2 = goal->des_pose_gripper2;
// convert the above to affine objects:
des_gripper1_affine_wrt_lcamera_ = transformPoseToEigenAffine3d(des_pose_gripper1.pose);
cout<<"gripper1 desired pose; "<<endl;
cout<<des_gripper1_affine_wrt_lcamera_.linear()<<endl;
cout<<"origin: "<<des_gripper1_affine_wrt_lcamera_.translation().transpose()<<endl;
des_gripper2_affine_wrt_lcamera_ = transformPoseToEigenAffine3d(des_pose_gripper2.pose);
cout<<"gripper2 desired pose; "<<endl;
cout<<des_gripper2_affine_wrt_lcamera_.linear()<<endl;
cout<<"origin: "<<des_gripper2_affine_wrt_lcamera_.translation().transpose()<<endl;
//do IK to convert these to joint angles:
//Eigen::VectorXd q_vec1,q_vec2;
Vectorq7x1 q_vec1,q_vec2;
q_vec1.resize(7);
q_vec2.resize(7);
trajectory_msgs::JointTrajectory des_trajectory; // an empty trajectory
des_trajectory.points.clear(); // can clear components, but not entire trajectory_msgs
des_trajectory.joint_names.clear(); //could put joint names in...but I assume a fixed order and fixed size, so this is unnecessary
// if using wsn's trajectory streamer action server
des_trajectory.header.stamp = ros::Time::now();
trajectory_msgs::JointTrajectoryPoint trajectory_point; //,trajectory_point2;
trajectory_point.positions.resize(14);
ROS_INFO("\n");
ROS_INFO("stored previous command to gripper one: ");
cout<<gripper1_affine_last_commanded_pose_.linear()<<endl;
cout<<"origin: "<<gripper1_affine_last_commanded_pose_.translation().transpose()<<endl;
// first, reiterate previous command:
// this could be easier, if saved previous joint-space trajectory point...
des_gripper_affine1_ = affine_lcamera_to_psm_one_.inverse()*gripper1_affine_last_commanded_pose_; //previous pose
ik_solver_.ik_solve(des_gripper_affine1_); //convert desired pose into equiv joint displacements
q_vec1 = ik_solver_.get_soln();
q_vec1(6) = last_gripper_ang1_; // include desired gripper opening angle
des_gripper_affine2_ = affine_lcamera_to_psm_two_.inverse()*gripper2_affine_last_commanded_pose_; //previous pose
ik_solver_.ik_solve(des_gripper_affine2_); //convert desired pose into equiv joint displacements
q_vec2 = ik_solver_.get_soln();
cout<<"q_vec1 of stored pose: "<<endl;
for (int i=0;i<6;i++) {
cout<<q_vec1[i]<<", ";
}
cout<<endl;
q_vec2(6) = last_gripper_ang2_; // include desired gripper opening angle
for (int i=0;i<7;i++) {
trajectory_point.positions[i] = q_vec1(i);
trajectory_point.positions[i+7] = q_vec2(i);
}
cout<<"start traj pt: "<<endl;
for (int i=0;i<14;i++) {
cout<<trajectory_point.positions[i]<<", ";
}
cout<<endl;
trajectory_point.time_from_start = ros::Duration(0.0); // start time set to 0
// PUSH IN THE START POINT:
des_trajectory.points.push_back(trajectory_point);
// compute and append the goal point, in joint space trajectory:
des_gripper_affine1_ = affine_lcamera_to_psm_one_.inverse()*des_gripper1_affine_wrt_lcamera_;
ROS_INFO("desired gripper one location in base frame: ");
cout<<"gripper1 desired pose; "<<endl;
cout<<des_gripper_affine1_.linear()<<endl;
cout<<"origin: "<<des_gripper_affine1_.translation().transpose()<<endl;
ik_solver_.ik_solve(des_gripper_affine1_); //convert desired pose into equiv joint displacements
q_vec1 = ik_solver_.get_soln();
q_vec1(6) = gripper_ang1_; // include desired gripper opening angle
des_gripper_affine2_ = affine_lcamera_to_psm_two_.inverse()*des_gripper2_affine_wrt_lcamera_;
ik_solver_.ik_solve(des_gripper_affine2_); //convert desired pose into equiv joint displacements
q_vec2 = ik_solver_.get_soln();
cout<<"q_vec1 of goal pose: "<<endl;
for (int i=0;i<6;i++) {
cout<<q_vec1[i]<<", ";
}
cout<<endl;
q_vec2(6) = gripper_ang2_;
for (int i=0;i<7;i++) {
trajectory_point.positions[i] = q_vec1(i);
trajectory_point.positions[i+7] = q_vec2(i);
}
//.........这里部分代码省略.........