本文整理汇总了C++中eigen::Matrix3f::inverse方法的典型用法代码示例。如果您正苦于以下问题:C++ Matrix3f::inverse方法的具体用法?C++ Matrix3f::inverse怎么用?C++ Matrix3f::inverse使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类eigen::Matrix3f
的用法示例。
在下文中一共展示了Matrix3f::inverse方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: mahalanobis_distance
// Returns squared mahalanobis distance
float Reco::mahalanobis_distance (Eigen::Matrix3f cov, Eigen::Vector3f mean, pcl::PointXYZ pt)
{
Eigen::Vector3f diff (pt.x - mean[0], pt.y - mean[1], pt.z - mean[2]);
//return diff.dot(cov.inverse() * diff);
return sqrt(diff.dot(cov.inverse() * diff));
}
示例2: getRansacInliers
void fi::VPDetectionWrapper::validateVanishingPoint(const std::vector<std::vector< Eigen::Vector2f> > &computed_vp_hypothesis, const Eigen::Matrix3f &cam_calib, Eigen::Vector3f &final_robust_vp_x, Eigen::Vector3f &final_robust_vp_y)
{
Eigen::Matrix3f inv_cam_calib = cam_calib.inverse();
//trans from vps to rays through camera axis, see Z+H Chapter 8, more on single view geometry!
unsigned int num_vps = computed_vp_hypothesis.size();
std::vector< Eigen::Vector3f> computed_vp_hypothesis_x;
std::vector< Eigen::Vector3f> computed_vp_hypothesis_y;
std::vector< Eigen::Vector3f> computed_vp_hypothesis_z;
for (unsigned int i = 0; i < num_vps; i++)
{
std::vector<Eigen::Vector2f> a_vp = computed_vp_hypothesis.at(i);
Eigen::Vector2f a_x = a_vp.at(0);
Eigen::Vector3f x_h, n_x;
x_h(0) = a_x(0);
x_h(1) = a_x(1);
x_h(2) = 1;
n_x = inv_cam_calib * x_h;
n_x = n_x.normalized();
computed_vp_hypothesis_x.push_back(n_x);
Eigen::Vector2f a_y = a_vp.at(1);
Eigen::Vector3f y_h, n_y;
y_h(0) = a_y(0);
y_h(1) = a_y(1);
y_h(2) = 1;
n_y = inv_cam_calib * y_h;
n_y = n_y.normalized();
computed_vp_hypothesis_y.push_back(n_y);
Eigen::Vector2f a_z = a_vp.at(2);
Eigen::Vector3f z_h, n_z;
z_h(0) = a_z(0);
z_h(1) = a_z(1);
z_h(2) = 1;
n_z = inv_cam_calib * z_h;
n_z = n_z.normalized();
computed_vp_hypothesis_z.push_back(n_z);
}
std::vector<Eigen::Vector3f> in_liers_x;
std::vector<Eigen::Vector3f> in_liers_y;
std::vector<Eigen::Vector3f> in_liers_z;
bool found_inliers_x = getRansacInliers(computed_vp_hypothesis_x, in_liers_x);
bool found_inliers_y = getRansacInliers(computed_vp_hypothesis_y, in_liers_y);
bool found_inliers_z = getRansacInliers(computed_vp_hypothesis_z, in_liers_z);
Eigen::VectorXf optimized_vp_x;
Eigen::VectorXf optimized_vp_y;
Eigen::VectorXf optimized_vp_z;
leastQuaresVPFitting(in_liers_x, optimized_vp_x);
leastQuaresVPFitting(in_liers_y, optimized_vp_y);
leastQuaresVPFitting(in_liers_z, optimized_vp_z);
std::cout<<"Vanishing Points Validated"<<std::endl;
//test the angles and see if OK otherwise check again if truelly orthogonal
Eigen::Vector3f vp_x (optimized_vp_x[3], optimized_vp_x[4], optimized_vp_x[5]);;
Eigen::Vector3f vp_y (optimized_vp_y[3], optimized_vp_y[4], optimized_vp_y[5]);
Eigen::Vector3f vp_z (optimized_vp_z[3], optimized_vp_z[4], optimized_vp_z[5]);
Eigen::Vector3f vp_x_centroid (optimized_vp_x[0], optimized_vp_x[1], optimized_vp_x[2]);
Eigen::Vector3f vp_y_centroid (optimized_vp_y[0], optimized_vp_y[1], optimized_vp_y[2]);
Eigen::Vector3f vp_z_centroid (optimized_vp_z[0], optimized_vp_z[1], optimized_vp_z[2]);
float angle_value_radiens_cxy = angleBetweenVectors(vp_x_centroid, vp_y_centroid);
float angle_value_degrees_cxy = pcl::rad2deg(angle_value_radiens_cxy);
float angle_value_radiens_cxz = angleBetweenVectors(vp_x_centroid, vp_z_centroid);
float angle_value_degrees_cxz = pcl::rad2deg(angle_value_radiens_cxz);
float angle_value_radiens_cyz = angleBetweenVectors(vp_y_centroid, vp_z_centroid);
float angle_value_degrees_cyz = pcl::rad2deg(angle_value_radiens_cyz);
float angle_value_radiens_xy = angleBetweenVectors(vp_x, vp_y);
float angle_value_degrees_xy = pcl::rad2deg(angle_value_radiens_xy);
float angle_value_radiens_xz = angleBetweenVectors(vp_x, vp_z);
float angle_value_degrees_xz = pcl::rad2deg(angle_value_radiens_xz);
float angle_value_radiens_yz = angleBetweenVectors(vp_y, vp_z);
float angle_value_degrees_yz = pcl::rad2deg(angle_value_radiens_yz);
//collect only the mean vps
final_robust_vp_x = optimized_vp_x.tail<3> ();
final_robust_vp_y = optimized_vp_y.tail<3> ();
//final_robust_vp_x = vp_x_centroid;
//final_robust_vp_y = vp_y_centroid;
}
示例3: worldBallPosFromImgCoords
std::pair<float, float> worldBallPosFromImgCoords(AL::ALMotionProxy motionProxy,
std::pair<int, int> ballPosCam,
int imgWidth, int imgHeight,
int camera)
{
std::string cameraName = "CameraTop";
if (camera == 1)
{
cameraName = "CameraBottom";
}
// Image coordinates of ball
int u = ballPosCam.first;
int v = ballPosCam.second;
// Angles of observation of the ball
float phi = ((float)u-((float)imgWidth)/2)/(float)imgWidth * img_WFOV;
float theta = ((float)v-((float)imgHeight)/2)/(float)imgHeight * img_HFOV;
// Select the right coordinates for the NAO system!
// x outward from camera, y to the left and z vertically upwards
// Coefficients for line-equation going from NAO camera to the ball
float b_c = -sin(phi);
float c_c = -sin(theta);
float a_c = sqrt((cos(phi)*cos(phi)+cos(theta)*cos(theta))/2);
int space = 2; //FRAME_ROBOT
bool useSensorValues = true;
std::vector<float> transVec =
motionProxy.getTransform(cameraName, space, useSensorValues); // Transform camera -> FRAME_ROBOT
std::vector<float> cameraPos =
motionProxy.getPosition(cameraName, space, useSensorValues); // Camera position in FRAME_ROBOT
// std::cout << "Position of bottom camera: " << std::endl;
// std::cout << cameraPos.at(0) << " " << cameraPos.at(1) << " " << cameraPos.at(2) << std::endl;
// std::cout << cameraPos.at(3) << " " << cameraPos.at(4) << " " << cameraPos.at(5) << std::endl;
// Put the camera transform into an Eigen matrix for easy multiplication
Eigen::Matrix4f trans;
trans <<
transVec[0] , transVec[1] , transVec[2] , transVec[3] ,
transVec[4] , transVec[5] , transVec[6] , transVec[7] ,
transVec[8] , transVec[9] , transVec[10], transVec[11],
transVec[12], transVec[13], transVec[14], transVec[15];
Eigen::Vector4f vec(a_c, b_c, c_c, 1);
// Transform the line equation from NAO camera coordinate system into FRAME_ROBOT
Eigen::Vector4f transformedLine = trans*vec;
// std::cout << "trans*vec = " << transformedLine << std::endl;
// Solve line-plane intersection with plane at z (floor)
// Solution from Wikipedia line-plane intersection article
float z = 0.00;
Eigen::Matrix3f lineMat;
lineMat <<
cameraPos.at(0)-transformedLine[0], 1.0-0.0, 0.0-0.0,
cameraPos.at(1)-transformedLine[1], 0.0-0.0, 1.0-0.0,
cameraPos.at(2)-transformedLine[2], z - z, z - z;
Eigen::Vector3f lineVec;
lineVec << cameraPos.at(0)-0.0, cameraPos.at(1)-0.0, cameraPos.at(2)-z;
Eigen::Vector3f txy = lineMat.inverse()*lineVec;
std::cout << "Ball is at (x, y): (" << txy[1] << ", " << txy[2] << ")" << std::endl;
std::pair<float, float> return_value(txy[1], txy[2]);
return return_value; //Return ball position (x, y)
}
示例4: main
//.........这里部分代码省略.........
cv::imshow("Depth", depthImg);
char key = cv::waitKey(1);
if(key == 'q')
{
break;
}
else if(key == ' ')
{
key = cv::waitKey(0);
}
if(key == 'q')
{
break;
}
Eigen::Matrix3f Rcurr = Eigen::Matrix3f::Identity();
Eigen::Vector3f tcurr = Eigen::Vector3f::Zero();
// #1
odom->getIncrementalTransformation(tcurr,
Rcurr,
logReader.timestamp,
(unsigned char *)logReader.deCompImage->imageData,
(unsigned short *)&decompressionBuffer[0]);
fout1<<tcurr[0]<<" "<<tcurr[1]<<" "<<tcurr[2]<<" "<<Rcurr(0,0)<<" "<<Rcurr(0,1)<<" "<<Rcurr(0,2)<<" "<<Rcurr(1,0)<<" "<<Rcurr(1,1)<<" "<<Rcurr(1,2)<<" "<<Rcurr(2,0)<<" "<<Rcurr(2,1)<<" "<<Rcurr(2,2)<<endl;
Eigen::Matrix3f Rdelta = Rcurr.inverse() * lastPlaceRecognitionRot;
Eigen::Vector3f tdelta = tcurr - lastPlaceRecognitionTrans;
//Eigen::MatrixXd covariance = odom->getCovariance();
//Eigen::MatrixXd covariance=Eigen::Matrix<double, 6, 6>::Identity()* 1e-3;
if((Projection::rodrigues2(Rdelta).norm() + tdelta.norm()) >= 0.1)
{
Eigen::MatrixXd covariance = odom->getCovariance();
iSAM.addCameraCameraConstraint(lastTime,
logReader.timestamp,
lastPlaceRecognitionRot,
lastPlaceRecognitionTrans,
Rcurr,
tcurr);
//covariance);
printCovariance(fout5, covariance);
lastTime = logReader.timestamp;
lastPlaceRecognitionRot = Rcurr;
lastPlaceRecognitionTrans = tcurr;
cout<<"before add keyframe"<<endl;
// #2
map.addKeyframe((unsigned char *)logReader.deCompImage->imageData,
(unsigned short *)&decompressionBuffer[0],
Rcurr,
tcurr,
logReader.timestamp);
示例5: if
//.........这里部分代码省略.........
else
lift_motor->set_pos(-293720);
sleep(10);
printf("Start Test\n");
// pose correction code inserted here first make sure tag is attached vertically, second camera has no pitch angle relative to the vehicle
if ((fork_count%2) == 1)
{
printf("Start Correction!\n");
int count_detect = 0;
while(ros::ok())
{
if(abs_pose1)
{
Eigen::Quaternion<float> quat;
quat.w() = abs_pose1->pose.pose.orientation.w;
quat.x() = abs_pose1->pose.pose.orientation.x;
quat.y() = abs_pose1->pose.pose.orientation.y;
quat.z() = abs_pose1->pose.pose.orientation.z;
Eigen::Matrix3f Rotation;
Rotation = qToRotation(quat);
Eigen::Matrix3f FixTF;
FixTF << 1, 0, 0,
0, 0, -1,
0, 1, 0;
Rotation = FixTF * Rotation.inverse();
float yaw_error;
yaw_error = getYawFromMat(Rotation);
gain = -3265*(k_angle*yaw_error)/3.1415926;
if(fabs(gain)>150)
{
gain = boost::math::sign(gain) * 150;
}
locomotor->set_vel(floor(gain), floor(gain));
if (fabs(yaw_error*180/3.1415926) < 0.5)
{
locomotor->set_vel(0,0);
printf("Yaw %f corrected!\n", yaw_error*180/3.1415926);
break;
}
}
else
{
locomotor->set_vel(0, 0);
usleep(10000);
count_detect++;
}
ros::spinOnce();
if (count_detect>1000)
{
ROS_WARN("No Tag detected when stoped");
ros::shutdown();
exit(1);
}