本文整理汇总了C++中eigen::Affine3f::rotation方法的典型用法代码示例。如果您正苦于以下问题:C++ Affine3f::rotation方法的具体用法?C++ Affine3f::rotation怎么用?C++ Affine3f::rotation使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类eigen::Affine3f
的用法示例。
在下文中一共展示了Affine3f::rotation方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1:
void
transform_cylinder(CylinderPtr & c_ptr,Eigen::Affine3f& trafo)
{
Cylinder & c=*c_ptr;
for (int i = 0; i < (int) c.contours.size(); ++i) {
for (int j = 0; j < (int) c.contours[i].size(); ++j) {
c.contours[i][j]=trafo*c.contours[i][j];
}
}
c.origin_=trafo*c.origin_;
std::cout<<"transformed origin\n"<<c.origin_<<std::endl;
for (int i = 0; i < 3; ++i) {
c.axes_[i]=trafo.rotation()*c.axes_[i];
// std::cout<<"axis -"<<i<<" \n"<<c.axes_[i]<<std::endl;
}
c.normal=trafo.rotation()*c.normal;
float roll,pitch,yaw,x,y,z;
pcl::getTranslationAndEulerAngles(trafo,x,y,z,roll,pitch,yaw);
// std::cout<<" x= "<<x<<" y= "<<z<<" z= "<<z<<" roll= "<<roll<<" pitch= "<<pitch<<" yaw= "<<yaw<<std::endl;
c.assignMembers(c.axes_[1], c.axes_[2], c.origin_); // configure unrolled polygon
}
示例2: projectOnPlane
void ConvexPolygon::projectOnPlane(
const Eigen::Affine3f& pose, Eigen::Affine3f& output)
{
Eigen::Vector3f p(pose.translation());
Eigen::Vector3f output_p;
projectOnPlane(p, output_p);
// ROS_INFO("[ConvexPolygon::projectOnPlane] p: [%f, %f, %f]",
// p[0], p[1], p[2]);
// ROS_INFO("[ConvexPolygon::projectOnPlane] output_p: [%f, %f, %f]",
// output_p[0], output_p[1], output_p[2]);
Eigen::Quaternionf rot;
rot.setFromTwoVectors(pose.rotation() * Eigen::Vector3f::UnitZ(),
coordinates().rotation() * Eigen::Vector3f::UnitZ());
Eigen::Quaternionf coords_rot(coordinates().rotation());
Eigen::Quaternionf pose_rot(pose.rotation());
// ROS_INFO("[ConvexPolygon::projectOnPlane] rot: [%f, %f, %f, %f]",
// rot.x(), rot.y(), rot.z(), rot.w());
// ROS_INFO("[ConvexPolygon::projectOnPlane] coords_rot: [%f, %f, %f, %f]",
// coords_rot.x(), coords_rot.y(), coords_rot.z(), coords_rot.w());
// ROS_INFO("[ConvexPolygon::projectOnPlane] pose_rot: [%f, %f, %f, %f]",
// pose_rot.x(), pose_rot.y(), pose_rot.z(), pose_rot.w());
// ROS_INFO("[ConvexPolygon::projectOnPlane] normal: [%f, %f, %f]", normal_[0], normal_[1], normal_[2]);
// Eigen::Affine3f::Identity() *
// output.translation() = Eigen::Translation3f(output_p);
// output.rotation() = rot * pose.rotation();
//output = Eigen::Translation3f(output_p) * rot * pose.rotation();
output = Eigen::Affine3f(rot * pose.rotation());
output.pretranslate(output_p);
// Eigen::Vector3f projected_point = output * Eigen::Vector3f(0, 0, 0);
// ROS_INFO("[ConvexPolygon::projectOnPlane] output: [%f, %f, %f]",
// projected_point[0], projected_point[1], projected_point[2]);
}
示例3: setViewerPose
void setViewerPose (pcl::visualization::PCLVisualizer& viewer, const Eigen::Affine3f& viewer_pose) {
Eigen::Vector3f pos_vector = viewer_pose * Eigen::Vector3f(10, 10, 10);
Eigen::Vector3f look_at_vector = viewer_pose.rotation () * Eigen::Vector3f(0, 0, 0);
Eigen::Vector3f up_vector = viewer_pose.rotation () * Eigen::Vector3f(0, -1, 0);
viewer.setCameraPosition (pos_vector[0], pos_vector[1], pos_vector[2],
look_at_vector[0], look_at_vector[1], look_at_vector[2],
up_vector[0], up_vector[1], up_vector[2]);
}
示例4: rotation_diff_mat
template <typename PointSource, typename PointTarget> bool
pcl::PPFRegistration<PointSource, PointTarget>::posesWithinErrorBounds (Eigen::Affine3f &pose1,
Eigen::Affine3f &pose2)
{
float position_diff = (pose1.translation () - pose2.translation ()).norm ();
Eigen::AngleAxisf rotation_diff_mat (pose1.rotation ().inverse () * pose2.rotation ());
float rotation_diff_angle = fabsf (rotation_diff_mat.angle ());
if (position_diff < clustering_position_diff_threshold_ && rotation_diff_angle < clustering_rotation_diff_threshold_)
return true;
else return false;
}
示例5: modelToScene
transformation objectModelSV::modelToScene( const int modelPointIndex, const Eigen::Affine3f & transformSceneToGlobal, const float alpha)
{
Eigen::Vector3f modelPoint=modelCloud->points[modelPointIndex].getVector3fMap();
Eigen::Vector3f modelNormal=modelCloud->points[modelPointIndex].getNormalVector3fMap ();
// Get transformation from model local frame to scene local frame
Eigen::Affine3f completeTransform = transformSceneToGlobal.inverse () * Eigen::AngleAxisf(alpha, Eigen::Vector3f::UnitX ()) * modelCloudTransformations[modelPointIndex];
Eigen::Quaternion<float> rotationQ=Eigen::Quaternion<float>(completeTransform.rotation());
// if object is symmetric remove yaw rotation (assume symmetry around z axis)
if(symmetric)
{
Eigen::Vector3f eulerAngles;
// primeiro [0] -> rot. around x (roll) [1] -> rot. around y (pitch) [2] -> rot. around z (yaw)
quaternionToEuler(rotationQ, eulerAngles[0], eulerAngles[1], eulerAngles[2]);
//pcl::getEulerAngles(completeTransform,eulerAngles[0], eulerAngles[1], eulerAngles[2]);
//eulerAngles[2]=0.0;
eulerToQuaternion(rotationQ, eulerAngles[0], eulerAngles[1], eulerAngles[2]);
//quaternionToEuler(rotationQ, eulerAngles[2], eulerAngles[1], eulerAngles[2]);
//std::cout << "EULER ANGLES: " << eulerAngles << std::endl;
}
//std::cout << "translation: " << completeTransform.rotation().matrix() << std::endl;
return transformation(rotationQ, Eigen::Translation3f(completeTransform.translation()) );
}
示例6:
void
pcl::gpu::kinfuLS::KinfuTracker::setInitialCameraPose (const Eigen::Affine3f& pose)
{
init_Rcam_ = pose.rotation ();
init_tcam_ = pose.translation ();
//reset (); // (already called in constructor)
}
示例7: setBasicSuccessors
void FootstepGraph::setBasicSuccessors(
std::vector<Eigen::Affine3f> left_to_right_successors)
{
successors_from_left_to_right_ = left_to_right_successors;
for (size_t i = 0; i < successors_from_left_to_right_.size(); i++) {
Eigen::Affine3f transform = successors_from_left_to_right_[i];
float roll, pitch, yaw;
pcl::getEulerAngles(transform, roll, pitch, yaw);
Eigen::Vector3f translation = transform.translation();
Eigen::Affine3f flipped_transform
= Eigen::Translation3f(translation[0], -translation[1], translation[2])
* Eigen::Quaternionf(Eigen::AngleAxisf(-yaw, Eigen::Vector3f::UnitZ()));
successors_from_right_to_left_.push_back(flipped_transform);
}
// max_successor_distance_
for (size_t i = 0; i < successors_from_left_to_right_.size(); i++) {
Eigen::Affine3f transform = successors_from_left_to_right_[i];
//double dist = transform.translation().norm();
double dist = transform.translation()[0]; // Only consider x
if (dist > max_successor_distance_) {
max_successor_distance_ = dist;
}
double rot = Eigen::AngleAxisf(transform.rotation()).angle();
if (rot > max_successor_rotation_) {
max_successor_rotation_ = rot;
}
}
}
示例8: isGoal
bool FootstepGraph::isGoal(StatePtr state)
{
FootstepState::Ptr goal = getGoal(state->getLeg());
if (publish_progress_) {
jsk_footstep_msgs::FootstepArray msg;
msg.header.frame_id = "odom"; // TODO fixed frame_id
msg.header.stamp = ros::Time::now();
msg.footsteps.push_back(*state->toROSMsg());
pub_progress_.publish(msg);
}
Eigen::Affine3f pose = state->getPose();
Eigen::Affine3f goal_pose = goal->getPose();
Eigen::Affine3f transformation = pose.inverse() * goal_pose;
if ((parameters_.goal_pos_thr > transformation.translation().norm()) &&
(parameters_.goal_rot_thr > std::abs(Eigen::AngleAxisf(transformation.rotation()).angle()))) {
// check collision
if (state->getLeg() == jsk_footstep_msgs::Footstep::LEFT) {
if (right_goal_state_->crossCheck(state)) {
return true;
}
} else if (state->getLeg() == jsk_footstep_msgs::Footstep::RIGHT) {
if (left_goal_state_->crossCheck(state)) {
return true;
}
}
}
return false;
}
示例9: saveAllPoses
void Evaluation::saveAllPoses(const pcl::gpu::KinfuTracker& kinfu, int frame_number, const std::string& logfile) const
{
size_t total = accociations_.empty() ? depth_stamps_and_filenames_.size() : accociations_.size();
if (frame_number < 0)
frame_number = (int)total;
frame_number = std::min(frame_number, (int)kinfu.getNumberOfPoses());
cout << "Writing " << frame_number << " poses to " << logfile << endl;
ofstream path_file_stream(logfile.c_str());
path_file_stream.setf(ios::fixed,ios::floatfield);
for(int i = 0; i < frame_number; ++i)
{
Eigen::Affine3f pose = kinfu.getCameraPose(i);
Eigen::Quaternionf q(pose.rotation());
Eigen::Vector3f t = pose.translation();
double stamp = accociations_.empty() ? depth_stamps_and_filenames_[i].first : accociations_[i].time1;
path_file_stream << stamp << " ";
path_file_stream << t[0] << " " << t[1] << " " << t[2] << " ";
path_file_stream << q.x() << " " << q.y() << " " << q.z() << " " << q.w() << endl;
}
}
示例10:
bool reduce_measurement_g2o::find_transform(const color_keyframe::Ptr & fi,
const color_keyframe::Ptr & fj, Sophus::SE3f & t) {
std::vector<cv::KeyPoint> keypoints_i, keypoints_j;
pcl::PointCloud<pcl::PointXYZ> keypoints3d_i, keypoints3d_j;
cv::Mat descriptors_i, descriptors_j;
compute_features(fi->get_i(0), fi->get_d(0), fi->get_intrinsics(0), fd, de,
keypoints_i, keypoints3d_i, descriptors_i);
compute_features(fj->get_i(0), fj->get_d(0), fj->get_intrinsics(0), fd, de,
keypoints_j, keypoints3d_j, descriptors_j);
std::vector<cv::DMatch> matches, matches_filtered;
dm->match(descriptors_j, descriptors_i, matches);
Eigen::Affine3f transform;
std::vector<bool> inliers;
bool res = estimate_transform_ransac(keypoints3d_j, keypoints3d_i, matches,
100, 0.03 * 0.03, 20, transform, inliers);
t = Sophus::SE3f(transform.rotation(), transform.translation());
return res;
}
示例11: reset
void
pcl::gpu::KinfuTracker::setInitalCameraPose (const Eigen::Affine3f& pose)
{
init_Rcam_ = pose.rotation ();
init_tcam_ = pose.translation ();
reset ();
}
示例12:
template <typename PointT> void
pcl::transformPointCloudWithNormals (const pcl::PointCloud<PointT> &cloud_in,
pcl::PointCloud<PointT> &cloud_out,
const Eigen::Affine3f &transform)
{
if (&cloud_in != &cloud_out)
{
// Note: could be replaced by cloud_out = cloud_in
cloud_out.header = cloud_in.header;
cloud_out.width = cloud_in.width;
cloud_out.height = cloud_in.height;
cloud_out.is_dense = cloud_in.is_dense;
cloud_out.points.reserve (cloud_out.points.size ());
cloud_out.points.assign (cloud_in.points.begin (), cloud_in.points.end ());
}
// If the data is dense, we don't need to check for NaN
if (cloud_in.is_dense)
{
for (size_t i = 0; i < cloud_out.points.size (); ++i)
{
cloud_out.points[i].getVector3fMap() = transform *
cloud_in.points[i].getVector3fMap ();
// Rotate normals
cloud_out.points[i].getNormalVector3fMap() = transform.rotation () *
cloud_in.points[i].getNormalVector3fMap ();
}
}
// Dataset might contain NaNs and Infs, so check for them first.
else
{
for (size_t i = 0; i < cloud_out.points.size (); ++i)
{
if (!pcl_isfinite (cloud_in.points[i].x) ||
!pcl_isfinite (cloud_in.points[i].y) ||
!pcl_isfinite (cloud_in.points[i].z))
continue;
cloud_out.points[i].getVector3fMap() = transform *
cloud_in.points[i].getVector3fMap ();
// Rotate normals
cloud_out.points[i].getNormalVector3fMap() = transform.rotation () *
cloud_in.points[i].getNormalVector3fMap ();
}
}
}
示例13: q
template <typename PointT> void
pcl::registration::ELCH<PointT>::compute ()
{
if (!initCompute ())
{
return;
}
LOAGraph grb[4];
typename boost::graph_traits<LoopGraph>::edge_iterator edge_it, edge_it_end;
for (boost::tuples::tie (edge_it, edge_it_end) = edges (*loop_graph_); edge_it != edge_it_end; edge_it++)
{
for (int j = 0; j < 4; j++)
add_edge (source (*edge_it, *loop_graph_), target (*edge_it, *loop_graph_), 1, grb[j]); //TODO add variance
}
double *weights[4];
for (int i = 0; i < 4; i++)
{
weights[i] = new double[num_vertices (*loop_graph_)];
loopOptimizerAlgorithm (grb[i], weights[i]);
}
//TODO use pose
//Eigen::Vector4f cend;
//pcl::compute3DCentroid (*((*loop_graph_)[loop_end_].cloud), cend);
//Eigen::Translation3f tend (cend.head (3));
//Eigen::Affine3f aend (tend);
//Eigen::Affine3f aendI = aend.inverse ();
//TODO iterate ovr loop_graph_
//typename boost::graph_traits<LoopGraph>::vertex_iterator vertex_it, vertex_it_end;
//for (boost::tuples::tie (vertex_it, vertex_it_end) = vertices (*loop_graph_); vertex_it != vertex_it_end; vertex_it++)
for (size_t i = 0; i < num_vertices (*loop_graph_); i++)
{
Eigen::Vector3f t2;
t2[0] = loop_transform_ (0, 3) * static_cast<float> (weights[0][i]);
t2[1] = loop_transform_ (1, 3) * static_cast<float> (weights[1][i]);
t2[2] = loop_transform_ (2, 3) * static_cast<float> (weights[2][i]);
Eigen::Affine3f bl (loop_transform_);
Eigen::Quaternionf q (bl.rotation ());
Eigen::Quaternionf q2;
q2 = Eigen::Quaternionf::Identity ().slerp (static_cast<float> (weights[3][i]), q);
//TODO use rotation from branch start
Eigen::Translation3f t3 (t2);
Eigen::Affine3f a (t3 * q2);
//a = aend * a * aendI;
pcl::transformPointCloud (*(*loop_graph_)[i].cloud, *(*loop_graph_)[i].cloud, a);
(*loop_graph_)[i].transform = a;
}
add_edge (loop_start_, loop_end_, *loop_graph_);
deinitCompute ();
}
示例14:
void
setViewerPose (pcl::visualization::PCLVisualizer& viewer, const Eigen::Affine3f& viewer_pose)
{
Eigen::Vector3f pos_vector = viewer_pose * Eigen::Vector3f (0, 0, 0);
Eigen::Vector3f look_at_vector = viewer_pose.rotation () * Eigen::Vector3f (0, 0, 1) + pos_vector;
Eigen::Vector3f up_vector = viewer_pose.rotation () * Eigen::Vector3f (0, -1, 0);
viewer.camera_.pos[0] = pos_vector[0];
viewer.camera_.pos[1] = pos_vector[1];
viewer.camera_.pos[2] = pos_vector[2];
viewer.camera_.focal[0] = look_at_vector[0];
viewer.camera_.focal[1] = look_at_vector[1];
viewer.camera_.focal[2] = look_at_vector[2];
viewer.camera_.view[0] = up_vector[0];
viewer.camera_.view[1] = up_vector[1];
viewer.camera_.view[2] = up_vector[2];
viewer.updateCamera ();
}
示例15: footstepHeuristicStraightRotation
double footstepHeuristicStraightRotation(
SolverNode<FootstepState, FootstepGraph>::Ptr node, FootstepGraph::Ptr graph)
{
FootstepState::Ptr state = node->getState();
FootstepState::Ptr goal = graph->getGoal(state->getLeg());
Eigen::Affine3f transform = state->getPose().inverse() * goal->getPose();
return (std::abs(transform.translation().norm() / graph->maxSuccessorDistance()) +
std::abs(Eigen::AngleAxisf(transform.rotation()).angle()) / graph->maxSuccessorRotation());
}