本文整理汇总了C++中eigen::Matrix::bottomRightCorner方法的典型用法代码示例。如果您正苦于以下问题:C++ Matrix::bottomRightCorner方法的具体用法?C++ Matrix::bottomRightCorner怎么用?C++ Matrix::bottomRightCorner使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类eigen::Matrix
的用法示例。
在下文中一共展示了Matrix::bottomRightCorner方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: computeVariances
bool StereoSensorProcessor::computeVariances(
const pcl::PointCloud<pcl::PointXYZRGB>::ConstPtr pointCloud,
const Eigen::Matrix<double, 6, 6>& robotPoseCovariance,
Eigen::VectorXf& variances)
{
variances.resize(pointCloud->size());
// Projection vector (P).
const Eigen::RowVector3f projectionVector = Eigen::RowVector3f::UnitZ();
// Sensor Jacobian (J_s).
const Eigen::RowVector3f sensorJacobian = projectionVector * (rotationMapToBase_.transposed() * rotationBaseToSensor_.transposed()).toImplementation().cast<float>();
// Robot rotation covariance matrix (Sigma_q).
Eigen::Matrix3f rotationVariance = robotPoseCovariance.bottomRightCorner(3, 3).cast<float>();
// Preparations for#include <pcl/common/transforms.h> robot rotation Jacobian (J_q) to minimize computation for every point in point cloud.
const Eigen::Matrix3f C_BM_transpose = rotationMapToBase_.transposed().toImplementation().cast<float>();
const Eigen::RowVector3f P_mul_C_BM_transpose = projectionVector * C_BM_transpose;
const Eigen::Matrix3f C_SB_transpose = rotationBaseToSensor_.transposed().toImplementation().cast<float>();
const Eigen::Matrix3f B_r_BS_skew = kindr::getSkewMatrixFromVector(Eigen::Vector3f(translationBaseToSensorInBaseFrame_.toImplementation().cast<float>()));
for (unsigned int i = 0; i < pointCloud->size(); ++i)
{
// For every point in point cloud.
// Preparation.
pcl::PointXYZRGB point = pointCloud->points[i];
double disparity = sensorParameters_.at("depth_to_disparity_factor")/point.z;
Eigen::Vector3f pointVector(point.x, point.y, point.z); // S_r_SP
float heightVariance = 0.0; // sigma_p
// Measurement distance.
float measurementDistance = pointVector.norm();
// Compute sensor covariance matrix (Sigma_S) with sensor model.
float varianceNormal = pow(sensorParameters_.at("depth_to_disparity_factor") / pow(disparity, 2), 2)
* ((sensorParameters_.at("p_5") * disparity + sensorParameters_.at("p_2"))
* sqrt(pow(sensorParameters_.at("p_3") * disparity + sensorParameters_.at("p_4") - getJ(i), 2)
+ pow(240 - getI(i), 2)) + sensorParameters_.at("p_1"));
float varianceLateral = pow(sensorParameters_.at("lateral_factor") * measurementDistance, 2);
Eigen::Matrix3f sensorVariance = Eigen::Matrix3f::Zero();
sensorVariance.diagonal() << varianceLateral, varianceLateral, varianceNormal;
// Robot rotation Jacobian (J_q).
const Eigen::Matrix3f C_SB_transpose_times_S_r_SP_skew = kindr::getSkewMatrixFromVector(Eigen::Vector3f(C_SB_transpose * pointVector));
Eigen::RowVector3f rotationJacobian = P_mul_C_BM_transpose * (C_SB_transpose_times_S_r_SP_skew + B_r_BS_skew);
// Measurement variance for map (error propagation law).
heightVariance = rotationJacobian * rotationVariance * rotationJacobian.transpose();
heightVariance += sensorJacobian * sensorVariance * sensorJacobian.transpose();
// Copy to list.
variances(i) = heightVariance;
}
return true;
}
示例2: computeVariances
bool LaserSensorProcessor::computeVariances(
const pcl::PointCloud<pcl::PointXYZRGB>::ConstPtr pointCloud,
const Eigen::Matrix<double, 6, 6>& robotPoseCovariance,
Eigen::VectorXf& variances)
{
variances.resize(pointCloud->size());
// Projection vector (P).
const Eigen::RowVector3f projectionVector = Eigen::RowVector3f::UnitZ();
// Sensor Jacobian (J_s).
const Eigen::RowVector3f sensorJacobian = projectionVector * (rotationMapToBase_.transposed() * rotationBaseToSensor_.transposed()).toImplementation().cast<float>();
// Robot rotation covariance matrix (Sigma_q).
Eigen::Matrix3f rotationVariance = robotPoseCovariance.bottomRightCorner(3, 3).cast<float>();
// Preparations for robot rotation Jacobian (J_q) to minimize computation for every point in point cloud.
const Eigen::Matrix3f C_BM_transpose = rotationMapToBase_.transposed().toImplementation().cast<float>();
const Eigen::RowVector3f P_mul_C_BM_transpose = projectionVector * C_BM_transpose;
const Eigen::Matrix3f C_SB_transpose = rotationBaseToSensor_.transposed().toImplementation().cast<float>();
const Eigen::Matrix3f B_r_BS_skew = kindr::linear_algebra::getSkewMatrixFromVector(Eigen::Vector3f(translationBaseToSensorInBaseFrame_.toImplementation().cast<float>()));
for (unsigned int i = 0; i < pointCloud->size(); ++i)
{
// For every point in point cloud.
// Preparation.
auto& point = pointCloud->points[i];
Eigen::Vector3f pointVector(point.x, point.y, point.z); // S_r_SP
float heightVariance = 0.0; // sigma_p
// Measurement distance.
float measurementDistance = pointVector.norm();
// Compute sensor covariance matrix (Sigma_S) with sensor model.
float varianceNormal = pow(sensorParameters_.at("min_radius"), 2);
float varianceLateral = pow(sensorParameters_.at("beam_constant") + sensorParameters_.at("beam_angle") * measurementDistance, 2);
Eigen::Matrix3f sensorVariance = Eigen::Matrix3f::Zero();
sensorVariance.diagonal() << varianceLateral, varianceLateral, varianceNormal;
// Robot rotation Jacobian (J_q).
const Eigen::Matrix3f C_SB_transpose_times_S_r_SP_skew = kindr::linear_algebra::getSkewMatrixFromVector(Eigen::Vector3f(C_SB_transpose * pointVector));
Eigen::RowVector3f rotationJacobian = P_mul_C_BM_transpose * (C_SB_transpose_times_S_r_SP_skew + B_r_BS_skew);
// Measurement variance for map (error propagation law).
heightVariance = rotationJacobian * rotationVariance * rotationJacobian.transpose();
heightVariance += sensorJacobian * sensorVariance * sensorJacobian.transpose();
// Copy to list.
variances(i) = heightVariance;
}
return true;
}