本文整理汇总了C++中eigen::Affine3f::inverse方法的典型用法代码示例。如果您正苦于以下问题:C++ Affine3f::inverse方法的具体用法?C++ Affine3f::inverse怎么用?C++ Affine3f::inverse使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类eigen::Affine3f
的用法示例。
在下文中一共展示了Affine3f::inverse方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: render
void ViewController::render(const Eigen::Affine3f& parentTrans)
{
Eigen::Affine3f trans = mCamera * parentTrans;
// camera position, position + size
Eigen::Vector3f viewStart = trans.inverse().translation();
Eigen::Vector3f viewEnd = trans.inverse() * Eigen::Vector3f((float)Renderer::getScreenWidth(), (float)Renderer::getScreenHeight(), 0);
// draw systemview
getSystemListView()->render(trans);
// draw gamelists
for(auto it = mGameListViews.begin(); it != mGameListViews.end(); it++)
{
// clipping
Eigen::Vector3f guiStart = it->second->getPosition();
Eigen::Vector3f guiEnd = it->second->getPosition() + Eigen::Vector3f(it->second->getSize().x(), it->second->getSize().y(), 0);
if(guiEnd.x() >= viewStart.x() && guiEnd.y() >= viewStart.y() &&
guiStart.x() <= viewEnd.x() && guiStart.y() <= viewEnd.y())
it->second->render(trans);
}
if(mWindow->peekGui() == this)
mWindow->renderHelpPromptsEarly();
// fade out
if(mFadeOpacity)
{
Renderer::setMatrix(parentTrans);
Renderer::drawRect(0, 0, Renderer::getScreenWidth(), Renderer::getScreenHeight(), 0x00000000 | (unsigned char)(mFadeOpacity * 255));
}
}
示例2: modelToScene
transformation objectModelSV::modelToScene( const int modelPointIndex, const Eigen::Affine3f & transformSceneToGlobal, const float alpha)
{
Eigen::Vector3f modelPoint=modelCloud->points[modelPointIndex].getVector3fMap();
Eigen::Vector3f modelNormal=modelCloud->points[modelPointIndex].getNormalVector3fMap ();
// Get transformation from model local frame to scene local frame
Eigen::Affine3f completeTransform = transformSceneToGlobal.inverse () * Eigen::AngleAxisf(alpha, Eigen::Vector3f::UnitX ()) * modelCloudTransformations[modelPointIndex];
Eigen::Quaternion<float> rotationQ=Eigen::Quaternion<float>(completeTransform.rotation());
// if object is symmetric remove yaw rotation (assume symmetry around z axis)
if(symmetric)
{
Eigen::Vector3f eulerAngles;
// primeiro [0] -> rot. around x (roll) [1] -> rot. around y (pitch) [2] -> rot. around z (yaw)
quaternionToEuler(rotationQ, eulerAngles[0], eulerAngles[1], eulerAngles[2]);
//pcl::getEulerAngles(completeTransform,eulerAngles[0], eulerAngles[1], eulerAngles[2]);
//eulerAngles[2]=0.0;
eulerToQuaternion(rotationQ, eulerAngles[0], eulerAngles[1], eulerAngles[2]);
//quaternionToEuler(rotationQ, eulerAngles[2], eulerAngles[1], eulerAngles[2]);
//std::cout << "EULER ANGLES: " << eulerAngles << std::endl;
}
//std::cout << "translation: " << completeTransform.rotation().matrix() << std::endl;
return transformation(rotationQ, Eigen::Translation3f(completeTransform.translation()) );
}
示例3: isGoal
bool FootstepGraph::isGoal(StatePtr state)
{
FootstepState::Ptr goal = getGoal(state->getLeg());
if (publish_progress_) {
jsk_footstep_msgs::FootstepArray msg;
msg.header.frame_id = "odom"; // TODO fixed frame_id
msg.header.stamp = ros::Time::now();
msg.footsteps.push_back(*state->toROSMsg());
pub_progress_.publish(msg);
}
Eigen::Affine3f pose = state->getPose();
Eigen::Affine3f goal_pose = goal->getPose();
Eigen::Affine3f transformation = pose.inverse() * goal_pose;
if ((parameters_.goal_pos_thr > transformation.translation().norm()) &&
(parameters_.goal_rot_thr > std::abs(Eigen::AngleAxisf(transformation.rotation()).angle()))) {
// check collision
if (state->getLeg() == jsk_footstep_msgs::Footstep::LEFT) {
if (right_goal_state_->crossCheck(state)) {
return true;
}
} else if (state->getLeg() == jsk_footstep_msgs::Footstep::RIGHT) {
if (left_goal_state_->crossCheck(state)) {
return true;
}
}
}
return false;
}
示例4: acosf
template<typename PointT> void
pcl16::approximatePolygon (const PlanarPolygon<PointT>& polygon, PlanarPolygon<PointT>& approx_polygon, float threshold, bool refine, bool closed)
{
const Eigen::Vector4f& coefficients = polygon.getCoefficients ();
const typename pcl16::PointCloud<PointT>::VectorType &contour = polygon.getContour ();
Eigen::Vector3f rotation_axis (coefficients[1], -coefficients[0], 0.0f);
rotation_axis.normalize ();
float rotation_angle = acosf (coefficients [2]);
Eigen::Affine3f transformation = Eigen::Translation3f (0, 0, coefficients [3]) * Eigen::AngleAxisf (rotation_angle, rotation_axis);
typename pcl16::PointCloud<PointT>::VectorType polygon2D (contour.size ());
for (unsigned pIdx = 0; pIdx < polygon2D.size (); ++pIdx)
polygon2D [pIdx].getVector3fMap () = transformation * contour [pIdx].getVector3fMap ();
typename pcl16::PointCloud<PointT>::VectorType approx_polygon2D;
approximatePolygon2D<PointT> (polygon2D, approx_polygon2D, threshold, refine, closed);
typename pcl16::PointCloud<PointT>::VectorType &approx_contour = approx_polygon.getContour ();
approx_contour.resize (approx_polygon2D.size ());
Eigen::Affine3f inv_transformation = transformation.inverse ();
for (unsigned pIdx = 0; pIdx < approx_polygon2D.size (); ++pIdx)
approx_contour [pIdx].getVector3fMap () = inv_transformation * approx_polygon2D [pIdx].getVector3fMap ();
}
示例5:
void
Cylinder::computePose(Eigen::Vector3f origin, Eigen::Vector3f z_axis)
{
Eigen::Affine3f t;
pcl::getTransformationFromTwoUnitVectorsAndOrigin(sym_axis_, z_axis, origin, t);
pose_ = t.inverse();
}
示例6:
void
pcl::CropBox<PointT>::applyFilter (PointCloud &output)
{
output.resize (input_->points.size ());
int indice_count = 0;
// We filter out invalid points
output.is_dense = true;
Eigen::Affine3f transform = Eigen::Affine3f::Identity();
Eigen::Affine3f inverse_transform = Eigen::Affine3f::Identity();
if (rotation_ != Eigen::Vector3f::Zero ())
{
pcl::getTransformation (0, 0, 0,
rotation_ (0), rotation_ (1), rotation_ (2),
transform);
inverse_transform = transform.inverse ();
}
for (size_t index = 0; index < indices_->size (); ++index)
{
if (!input_->is_dense)
// Check if the point is invalid
if (!isFinite (input_->points[index]))
continue;
// Get local point
PointT local_pt = input_->points[(*indices_)[index]];
// Transform point to world space
if (!(transform_.matrix ().isIdentity ()))
local_pt = pcl::transformPoint<PointT> (local_pt, transform_);
if (translation_ != Eigen::Vector3f::Zero ())
{
local_pt.x -= translation_ (0);
local_pt.y -= translation_ (1);
local_pt.z -= translation_ (2);
}
// Transform point to local space of crop box
if (!(inverse_transform.matrix ().isIdentity ()))
local_pt = pcl::transformPoint<PointT> (local_pt, inverse_transform);
if (local_pt.x < min_pt_[0] || local_pt.y < min_pt_[1] || local_pt.z < min_pt_[2])
continue;
if (local_pt.x > max_pt_[0] || local_pt.y > max_pt_[1] || local_pt.z > max_pt_[2])
continue;
output.points[indice_count++] = input_->points[(*indices_)[index]];
}
output.width = indice_count;
output.height = 1;
output.resize (output.width * output.height);
}
示例7: if
void MsgToPoint3D(const TPPLPoint &pt, const cob_3d_mapping_msgs::Shape::ConstPtr& new_message, Eigen::Vector3f &pos, Eigen::Vector3f &normal) {
if(new_message->params.size()==4) {
Eigen::Vector3f u,v,origin;
Eigen::Affine3f transformation;
normal(0)=new_message->params[0];
normal(1)=new_message->params[1];
normal(2)=new_message->params[2];
origin(0)=new_message->centroid.x;
origin(1)=new_message->centroid.y;
origin(2)=new_message->centroid.z;
//std::cout << "normal: " << normal << std::endl;
//std::cout << "centroid: " << origin << std::endl;
v = normal.unitOrthogonal ();
u = normal.cross (v);
pcl::getTransformationFromTwoUnitVectorsAndOrigin(v, normal, origin, transformation);
transformation=transformation.inverse();
Eigen::Vector3f p3;
p3(0)=pt.x;
p3(1)=pt.y;
p3(2)=0;
pos = transformation*p3;
}
else if(new_message->params.size()==5) {
Eigen::Vector2f v,v2,n2;
v(0)=pt.x;
v(1)=pt.y;
v2=v;
v2(0)*=v2(0);
v2(1)*=v2(1);
n2(0)=new_message->params[3];
n2(1)=new_message->params[4];
//dummy normal
normal(0)=new_message->params[0];
normal(1)=new_message->params[1];
normal(2)=new_message->params[2];
Eigen::Vector3f x,y, origin;
x(0)=1.f;
y(1)=1.f;
x(1)=x(2)=y(0)=y(2)=0.f;
Eigen::Matrix<float,3,2> proj2plane_;
proj2plane_.col(0)=normal.cross(y);
proj2plane_.col(1)=normal.cross(x);
origin(0)=new_message->centroid.x;
origin(1)=new_message->centroid.y;
origin(2)=new_message->centroid.z;
pos = origin+proj2plane_*v + normal*(v2.dot(n2));
normal += normal*(v).dot(n2);
}
}
示例8: computePose
void
Cylinder::computePose(Eigen::Vector3f origin, std::vector<std::vector<Eigen::Vector3f> >& contours_3d)
{
Eigen::Affine3f t;
pcl::getTransformationFromTwoUnitVectorsAndOrigin(sym_axis_, sym_axis_.unitOrthogonal(), origin, t);
Eigen::Vector3f z_cyl = t * contours_3d[0][0];
z_cyl(1) = 0;
Eigen::Vector3f z_axis = t.inverse().rotation() * z_cyl;
computePose(origin, z_axis.normalized());
}
示例9: isGoal
bool FootstepGraph::isGoal(StatePtr state)
{
FootstepState::Ptr goal = getGoal(state->getLeg());
if (publish_progress_) {
jsk_footstep_msgs::FootstepArray msg;
msg.header.frame_id = "odom";
msg.header.stamp = ros::Time::now();
msg.footsteps.push_back(*state->toROSMsg());
pub_progress_.publish(msg);
}
Eigen::Affine3f pose = state->getPose();
Eigen::Affine3f goal_pose = goal->getPose();
Eigen::Affine3f transformation = pose.inverse() * goal_pose;
return (pos_goal_thr_ > transformation.translation().norm()) &&
(rot_goal_thr_ > std::abs(Eigen::AngleAxisf(transformation.rotation()).angle()));
}
示例10: isCollidingBox
bool FootstepGraph::isCollidingBox(const Eigen::Affine3f& c, pcl::PointIndices::Ptr candidates) const
{
const pcl::PointCloud<pcl::PointXYZ>::ConstPtr input_cloud = obstacle_tree_model_->getInputCloud();
Eigen::Affine3f inv_c = c.inverse();
for (size_t i = 0; i < candidates->indices.size(); i++) {
int index = candidates->indices[i];
const pcl::PointXYZ candidate_point = input_cloud->points[index];
// convert candidate_point into `c' local representation.
const Eigen::Vector3f local_p = inv_c * candidate_point.getVector3fMap();
if (std::abs(local_p[0]) < collision_bbox_size_[0] / 2 &&
std::abs(local_p[1]) < collision_bbox_size_[1] / 2 &&
std::abs(local_p[2]) < collision_bbox_size_[2] / 2) {
return true;
}
}
return false;
}
示例11: setTrackerTarget
void setTrackerTarget(){
initTracking();
Eigen::Vector4f c;
Eigen::Affine3f trans = Eigen::Affine3f::Identity ();
pcl::compute3DCentroid<PointT>(*object_to_track,c);
trans.translation().matrix() = Eigen::Vector3f(c[0],c[1],c[2]);
tracker_->setTrans (trans);
pcl::PointCloud<PointT>::Ptr tmp_pc(new pcl::PointCloud<PointT>);
pcl::transformPointCloud<PointT> (*object_to_track, *tmp_pc, trans.inverse());
tracker_->setReferenceCloud(tmp_pc);
tracker_->setInputCloud(cloud);
tracked_object_centroid->clear();
tracked_object_centroid->push_back(PointT(c[0],c[1],c[2]));
}
示例12: radiusAndOriginFromCloud
double
radiusAndOriginFromCloud(pcl::PointCloud<pcl::PointXYZRGB>::ConstPtr in_cloud ,
std::vector<int>& indices,
Eigen::Vector3f& origin,
const Eigen::Vector3f& sym_axis)
{
// Transform into cylinder coordinate frame
Eigen::Affine3f t;
pcl::getTransformationFromTwoUnitVectorsAndOrigin(sym_axis.unitOrthogonal(), sym_axis, origin, t);
pcl::PointCloud<pcl::PointXYZRGB>::Ptr pc_trans( new pcl::PointCloud<pcl::PointXYZRGB>() );
pcl::transformPointCloud(*in_cloud, indices, *pc_trans, t);
// Inliers of circle model
pcl::PointIndices inliers;
// Coefficients of circle model
pcl::ModelCoefficients coeff;
// Create the segmentation object
pcl::SACSegmentation<pcl::PointXYZRGB> seg;
// Optimize coefficients
seg.setOptimizeCoefficients (true);
// Set type of method
//seg.setMethodType (pcl::SAC_MLESAC);
seg.setMethodType (pcl::SAC_RANSAC);
// Set number of maximum iterations
seg.setMaxIterations (10);
// Set type of model
seg.setModelType (pcl::SACMODEL_CIRCLE2D);
// Set threshold of model
seg.setDistanceThreshold (0.010);
// Give as input the filtered point cloud
seg.setInputCloud (pc_trans/*in_cloud*/);
//boost::shared_ptr<std::vector<int> > indices_ptr(&indices);
//seg.setIndices(indices_ptr);
// Call the segmenting method
seg.segment(inliers, coeff);
// origin in cylinder coordinates
Eigen::Vector3f l_origin;
l_origin << coeff.values[0],coeff.values[1],0;
origin = t.inverse() * l_origin;
return coeff.values[2];
}
示例13: updateCalibration
void SLPointCloudWidget::updateCalibration(){
CalibrationData calibration;
bool load_result = calibration.load("calibration.xml");
if (!load_result)
return;
// Camera coordinate system
visualizer->addCoordinateSystem(50, "camera", 0);
// Projector coordinate system
cv::Mat TransformPCV(4, 4, CV_32F, 0.0);
cv::Mat(calibration.Rp).copyTo(TransformPCV.colRange(0, 3).rowRange(0, 3));
cv::Mat(calibration.Tp).copyTo(TransformPCV.col(3).rowRange(0, 3));
TransformPCV.at<float>(3, 3) = 1.0; // make it homogeneous
Eigen::Affine3f TransformP;
cv::cv2eigen(TransformPCV, TransformP.matrix());
visualizer->addCoordinateSystem(50, TransformP.inverse(), "projector", 0);
}
示例14: normalizeSize
bool normalizeSize(std::vector<Eigen::Vector3f, Eigen::aligned_allocator<Eigen::Vector3f> > &points, std::vector<Eigen::Vector3f, Eigen::aligned_allocator<Eigen::Vector3f> > &normals, Eigen::Affine3f &invTransform)
{
// Assumes normalized rotation/translation
Eigen::AlignedBox3f aabb;
for (size_t i = 0; i < points.size(); ++i) {
aabb.extend(points[i]);
}
// Calculate isotropic scaling to that the longest side becomes unit length
const float s = 1.f / aabb.diagonal().maxCoeff();
for (size_t i = 0; i < points.size(); ++i) {
points[i] *= s;
}
// Assemble inverse transform.
invTransform = Eigen::Affine3f::Identity();
invTransform = invTransform.scale(s);
invTransform = invTransform.inverse();
return true;
}
示例15: gridSampleApprox
void
ParticleFilterTracking::resetTrackingTargetModel(const pcl::PointCloud<pcl::PointXYZRGBA>::ConstPtr &new_target_cloud)
{
//prepare the model of tracker's target
Eigen::Vector4f c;
Eigen::Affine3f trans = Eigen::Affine3f::Identity ();
pcl::PointCloud<pcl::PointXYZRGBA>::Ptr transed_ref (new pcl::PointCloud<pcl::PointXYZRGBA>);
pcl::PointCloud<pcl::PointXYZRGBA>::Ptr transed_ref_downsampled (new pcl::PointCloud<pcl::PointXYZRGBA>);
pcl::compute3DCentroid (*new_target_cloud, c);
trans.translation ().matrix () = Eigen::Vector3f (c[0], c[1], c[2]);
pcl::transformPointCloud(*new_target_cloud, *transed_ref, trans.inverse());
gridSampleApprox (transed_ref, *transed_ref_downsampled, downsampling_grid_size_);
//set reference model and trans
{
boost::mutex::scoped_lock lock(mtx_);
tracker_->setReferenceCloud (transed_ref_downsampled);
tracker_->setTrans (trans);
tracker_->resetTracking();
}
//Reset target Model
ROS_INFO("RESET TARGET MODEL");
}