本文整理汇总了C++中eigen::Matrix3f类的典型用法代码示例。如果您正苦于以下问题:C++ Matrix3f类的具体用法?C++ Matrix3f怎么用?C++ Matrix3f使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Matrix3f类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: CalculateStiffnessMatrix
void Element::CalculateStiffnessMatrix(const Eigen::Matrix3f& D, std::vector<Eigen::Triplet<float> >& triplets)
{
Eigen::Vector3f x, y;
x << nodesX[nodesIds[0]], nodesX[nodesIds[1]], nodesX[nodesIds[2]];
y << nodesY[nodesIds[0]], nodesY[nodesIds[1]], nodesY[nodesIds[2]];
Eigen::Matrix3f C;
C << Eigen::Vector3f(1.0f, 1.0f, 1.0f), x, y;
Eigen::Matrix3f IC = C.inverse();
for (int i = 0; i < 3; i++)
{
B(0, 2 * i + 0) = IC(1, i);
B(0, 2 * i + 1) = 0.0f;
B(1, 2 * i + 0) = 0.0f;
B(1, 2 * i + 1) = IC(2, i);
B(2, 2 * i + 0) = IC(2, i);
B(2, 2 * i + 1) = IC(1, i);
}
Eigen::Matrix<float, 6, 6> K = B.transpose() * D * B * C.determinant() / 2.0f;
for (int i = 0; i < 3; i++)
{
for (int j = 0; j < 3; j++)
{
Eigen::Triplet<float> trplt11(2 * nodesIds[i] + 0, 2 * nodesIds[j] + 0, K(2 * i + 0, 2 * j + 0));
Eigen::Triplet<float> trplt12(2 * nodesIds[i] + 0, 2 * nodesIds[j] + 1, K(2 * i + 0, 2 * j + 1));
Eigen::Triplet<float> trplt21(2 * nodesIds[i] + 1, 2 * nodesIds[j] + 0, K(2 * i + 1, 2 * j + 0));
Eigen::Triplet<float> trplt22(2 * nodesIds[i] + 1, 2 * nodesIds[j] + 1, K(2 * i + 1, 2 * j + 1));
triplets.push_back(trplt11);
triplets.push_back(trplt12);
triplets.push_back(trplt21);
triplets.push_back(trplt22);
}
}
}
示例2: ComputeDarbouxVector
bool PositionBasedElasticRod::ComputeDarbouxVector(const Eigen::Matrix3f& dA, const Eigen::Matrix3f& dB, const float mid_edge_length, Eigen::Vector3f& darboux_vector)
{
float factor = 1.0f + dA.col(0).dot(dB.col(0)) + dA.col(1).dot(dB.col(1)) + dA.col(2).dot(dB.col(2));
factor = 2.0f / (mid_edge_length * factor);
for (int c = 0; c < 3; ++c)
{
const int i = permutation[c][0];
const int j = permutation[c][1];
const int k = permutation[c][2];
darboux_vector[i] = dA.col(j).dot(dB.col(k)) - dA.col(k).dot(dB.col(j));
}
darboux_vector *= factor;
return true;
}
示例3: transformOrientationToGroundFrame
void transformOrientationToGroundFrame(const VirtualRobot::RobotPtr& robot,
const Eigen::Matrix6Xf& leftFootTrajectory,
const VirtualRobot::RobotNodePtr& leftFoot,
const VirtualRobot::RobotNodePtr& rightFoot,
const VirtualRobot::RobotNodeSetPtr& bodyJoints,
const Eigen::MatrixXf& bodyTrajectory,
const Eigen::Matrix3Xf& trajectory,
const std::vector<SupportInterval>& intervals,
Eigen::Matrix3Xf& relativeTrajectory)
{
Eigen::Matrix4f leftInitialPose = bodyJoints->getKinematicRoot()->getGlobalPose();
int N = trajectory.cols();
int M = trajectory.rows();
relativeTrajectory.resize(M, N);
BOOST_ASSERT(M > 0 && M <= 3);
auto intervalIter = intervals.begin();
for (int i = 0; i < N; i++)
{
while (i >= intervalIter->endIdx)
{
intervalIter = std::next(intervalIter);
}
// Move basis along with the left foot
Eigen::Matrix4f leftFootPose = Eigen::Matrix4f::Identity();
VirtualRobot::MathTools::posrpy2eigen4f(1000 * leftFootTrajectory.block(0, i, 3, 1),
leftFootTrajectory.block(3, i, 3, 1), leftFootPose);
robot->setGlobalPose(leftFootPose);
bodyJoints->setJointValues(bodyTrajectory.col(i));
Eigen::Matrix3f worldToRef = computeGroundFrame(leftFoot->getGlobalPose(),
rightFoot->getGlobalPose(),
intervalIter->phase).block(0, 0, 3, 3);
relativeTrajectory.block(0, i, M, 1) = worldToRef.colPivHouseholderQr().solve(trajectory.col(i)).block(0, 0, M, 1);
}
}
示例4: demeanPointCloud
template <typename PointT> void
pcl::SampleConsensusModelRegistration<PointT>::estimateRigidTransformationSVD (
const pcl::PointCloud<PointT> &cloud_src,
const std::vector<int> &indices_src,
const pcl::PointCloud<PointT> &cloud_tgt,
const std::vector<int> &indices_tgt,
Eigen::VectorXf &transform)
{
transform.resize (16);
Eigen::Vector4f centroid_src, centroid_tgt;
// Estimate the centroids of source, target
compute3DCentroid (cloud_src, indices_src, centroid_src);
compute3DCentroid (cloud_tgt, indices_tgt, centroid_tgt);
// Subtract the centroids from source, target
Eigen::MatrixXf cloud_src_demean;
demeanPointCloud (cloud_src, indices_src, centroid_src, cloud_src_demean);
Eigen::MatrixXf cloud_tgt_demean;
demeanPointCloud (cloud_tgt, indices_tgt, centroid_tgt, cloud_tgt_demean);
// Assemble the correlation matrix H = source * target'
Eigen::Matrix3f H = (cloud_src_demean * cloud_tgt_demean.transpose ()).topLeftCorner<3, 3>();
// Compute the Singular Value Decomposition
Eigen::JacobiSVD<Eigen::Matrix3f> svd (H, Eigen::ComputeFullU | Eigen::ComputeFullV);
Eigen::Matrix3f u = svd.matrixU ();
Eigen::Matrix3f v = svd.matrixV ();
// Compute R = V * U'
if (u.determinant () * v.determinant () < 0)
{
for (int x = 0; x < 3; ++x)
v (x, 2) *= -1;
}
Eigen::Matrix3f R = v * u.transpose ();
// Return the correct transformation
transform.segment<3> (0) = R.row (0); transform[12] = 0;
transform.segment<3> (4) = R.row (1); transform[13] = 0;
transform.segment<3> (8) = R.row (2); transform[14] = 0;
Eigen::Vector3f t = centroid_tgt.head<3> () - R * centroid_src.head<3> ();
transform[3] = t[0]; transform[7] = t[1]; transform[11] = t[2]; transform[15] = 1.0;
}
示例5: ComputeMaterialFrame
// ----------------------------------------------------------------------------------------------
bool PositionBasedElasticRod::ComputeMaterialFrame(
const Eigen::Vector3f& pA,
const Eigen::Vector3f& pB,
const Eigen::Vector3f& pG,
Eigen::Matrix3f& frame)
{
frame.col(2) = (pB - pA);
frame.col(2).normalize();
frame.col(1) = (frame.col(2).cross(pG - pA));
frame.col(1).normalize();
frame.col(0) = frame.col(1).cross(frame.col(2));
// frame.col(0).normalize();
return true;
}
示例6: enhanceAlignment
bool FruitDetector::enhanceAlignment(
const Eigen::Matrix3f& new_rotation, const boost::shared_ptr<Fruit>& f) {
// get euler angles. (Z-Y-X) convention --> (Yaw, Pitch, Roll)
const Eigen::Vector3f euler_angles = new_rotation.eulerAngles(2, 1, 0);
const float new_yaw = euler_angles[0];
const float new_pitch = euler_angles[1];
const float new_roll = euler_angles[2];
const float old_roll = f->get_RPY().roll;
const float old_pitch = f->get_RPY().pitch;
// check if the new aligned model is less tilted
if (new_roll < old_roll && new_pitch < old_pitch)
return true;
return false;
}
示例7: find_toy_block
bool ObjectFinder::find_toy_block(float surface_height, geometry_msgs::PoseStamped &object_pose) {
Eigen::Vector3f plane_normal;
double plane_dist;
//bool valid_plane;
Eigen::Vector3f major_axis;
Eigen::Vector3f centroid;
bool found_object = true; //should verify this
double block_height = 0.035; //this height is specific to the TOY_BLOCK model
//if insufficient points in plane, find_plane_fit returns "false"
//should do more sanity testing on found_object status
//hard-coded search bounds based on a block of width 0.035
found_object = pclUtils_.find_plane_fit(0.4, 1, -0.5, 0.5, surface_height + 0.025, surface_height + 0.045, 0.001,
plane_normal, plane_dist, major_axis, centroid);
//should have put a return value on find_plane_fit;
//
if (plane_normal(2) < 0) plane_normal(2) *= -1.0; //in world frame, normal must point UP
Eigen::Matrix3f R;
Eigen::Vector3f y_vec;
R.col(0) = major_axis;
R.col(2) = plane_normal;
R.col(1) = plane_normal.cross(major_axis);
Eigen::Quaternionf quat(R);
object_pose.header.frame_id = "base_link";
object_pose.pose.position.x = centroid(0);
object_pose.pose.position.y = centroid(1);
//the TOY_BLOCK model has its origin in the middle of the block, not the top surface
//so lower the block model origin by half the block height from upper surface
object_pose.pose.position.z = centroid(2)-0.5*block_height;
//create R from normal and major axis, then convert R to quaternion
object_pose.pose.orientation.x = quat.x();
object_pose.pose.orientation.y = quat.y();
object_pose.pose.orientation.z = quat.z();
object_pose.pose.orientation.w = quat.w();
return found_object;
}
示例8: hkp
std::vector<Point3f> PnPUtil::BackprojectPts(const std::vector<Point2f>& pts, const Eigen::Matrix4f& camTf, const Eigen::Matrix3f& K, const Mat& depth)
{
std::vector<Point3f> pts3d;
for(std::size_t i = 0; i < pts.size(); i++)
{
Point2f pt = pts[i];
Eigen::Vector3f hkp(pt.x, pt.y, 1);
Eigen::Vector3f backproj = K.inverse()*hkp;
backproj /= backproj(2);
backproj *= depth.at<float>(pt.y, pt.x);
if(depth.at<float>(pt.y, pt.x) == 0)
std::cout << "Bad depth (0)" << std::endl;
Eigen::Vector4f backproj_h(backproj(0), backproj(1), backproj(2), 1);
backproj_h = camTf*backproj_h;
pts3d.push_back(Point3f(backproj_h(0), backproj_h(1), backproj_h(2)));
}
return pts3d;
}
示例9: cloud_temp
void SQ_fitter<PointT>::getBoundingBox(const PointCloudPtr &_cloud,
double _dim[3],
double _trans[3],
double _rot[3] ) {
// 1. Compute the bounding box center
Eigen::Vector4d centroid;
pcl::compute3DCentroid( *_cloud, centroid );
_trans[0] = centroid(0);
_trans[1] = centroid(1);
_trans[2] = centroid(2);
// 2. Compute main axis orientations
pcl::PCA<PointT> pca;
pca.setInputCloud( _cloud );
Eigen::Vector3f eigVal = pca.getEigenValues();
Eigen::Matrix3f eigVec = pca.getEigenVectors();
// Make sure 3 vectors are normal w.r.t. each other
Eigen::Vector3f temp;
eigVec.col(2) = eigVec.col(0); // Z
Eigen::Vector3f v3 = (eigVec.col(1)).cross( eigVec.col(2) );
eigVec.col(0) = v3;
Eigen::Vector3f rpy = eigVec.eulerAngles(2,1,0);
_rot[0] = (double)rpy(2);
_rot[1] = (double)rpy(1);
_rot[2] = (double)rpy(0);
// Transform _cloud
Eigen::Matrix4f transf = Eigen::Matrix4f::Identity();
transf.block(0,3,3,1) << (float)centroid(0), (float)centroid(1), (float)centroid(2);
transf.block(0,0,3,3) = eigVec;
Eigen::Matrix4f tinv; tinv = transf.inverse();
PointCloudPtr cloud_temp( new pcl::PointCloud<PointT>() );
pcl::transformPointCloud( *_cloud, *cloud_temp, tinv );
// Get maximum and minimum
PointT minPt; PointT maxPt;
pcl::getMinMax3D( *cloud_temp, minPt, maxPt );
_dim[0] = ( maxPt.x - minPt.x ) / 2.0;
_dim[1] = ( maxPt.y - minPt.y ) / 2.0;
_dim[2] = ( maxPt.z - minPt.z ) / 2.0;
}
示例10: createPointCloud
pcl::PointCloud<pcl::PointXYZRGB> createPointCloud(
const Eigen::Matrix3f K, const cv::Mat &depth, const cv::Mat &color
) {
pcl::PointCloud<pcl::PointXYZRGB> pointCloud;
int w = depth.cols;
int h = depth.rows;
float *pDepth = (float*)depth.data;
float *pColor = (float*)color.data;
for (int y = 0; y < h; ++y) {
for (int x = 0; x < w; ++x) {
size_t off = (y*w + x);
size_t off3 = off * 3;
// Check depth value validity
float d = pDepth[x + y*w];
if (d == 0.0f || std::isnan(d)) { continue; }
// RGBXYZ point
pcl::PointXYZRGB point;
// Color
point.b = max(0, min(255, (int)(pColor[off3 ]*255)));
point.g = max(0, min(255, (int)(pColor[off3+1]*255)));
point.r = max(0, min(255, (int)(pColor[off3+2]*255)));
// Position
Eigen::Vector3f pos = K.inverse() * Eigen::Vector3f(x*d, y*d, d);
point.x = pos[0];
point.y = pos[1];
point.z = pos[2];
pointCloud.push_back(point);
}
}
return pointCloud;
}
示例11: computeSensorOffsetAndK
void computeSensorOffsetAndK(Eigen::Isometry3f &sensorOffset, Eigen::Matrix3f &cameraMatrix, ParameterCamera *cameraParam, int reduction) {
sensorOffset = Isometry3f::Identity();
cameraMatrix.setZero();
int cmax = 4;
int rmax = 3;
for (int c=0; c<cmax; c++){
for (int r=0; r<rmax; r++){
sensorOffset.matrix()(r, c) = cameraParam->offset()(r, c);
if (c<3)
cameraMatrix(r,c) = cameraParam->Kcam()(r, c);
}
}
sensorOffset.translation() = Vector3f(0.15f, 0.0f, 0.05f);
Quaternionf quat = Quaternionf(0.5, -0.5, 0.5, -0.5);
sensorOffset.linear() = quat.toRotationMatrix();
sensorOffset.matrix().block<1, 4>(3, 0) << 0.0f, 0.0f, 0.0f, 1.0f;
float scale = 1./reduction;
cameraMatrix *= scale;
cameraMatrix(2,2) = 1;
}
示例12: computeCovarianceMatrix
/**
* ComputeCovarianceMatrix
*/
void ZAdaptiveNormals::computeCovarianceMatrix (const v4r::DataMatrix2D<Eigen::Vector3f> &cloud, const std::vector<int> &indices, const Eigen::Vector3f &mean, Eigen::Matrix3f &cov)
{
Eigen::Vector3f pt;
cov.setZero ();
for (unsigned i = 0; i < indices.size (); ++i)
{
pt = cloud.data[indices[i]] - mean;
cov(1,1) += pt[1] * pt[1];
cov(1,2) += pt[1] * pt[2];
cov(2,2) += pt[2] * pt[2];
pt *= pt[0];
cov(0,0) += pt[0];
cov(0,1) += pt[1];
cov(0,2) += pt[2];
}
cov(1,0) = cov(0,1);
cov(2,0) = cov(0,2);
cov(2,1) = cov(1,2);
}
示例13: computeCovarianceMatrix
/**
* computeCovarianceMatrix
*/
void PlaneEstimationRANSAC::computeCovarianceMatrix (const std::vector<Eigen::Vector3f> &pts, const std::vector<int> &indices, const Eigen::Vector3f &mean, Eigen::Matrix3f &cov)
{
Eigen::Vector3f pt;
cov.setZero ();
for (unsigned i = 0; i < indices.size (); ++i)
{
pt = pts[indices[i]] - mean;
cov(1,1) += pt[1] * pt[1];
cov(1,2) += pt[1] * pt[2];
cov(2,2) += pt[2] * pt[2];
pt *= pt[0];
cov(0,0) += pt[0];
cov(0,1) += pt[1];
cov(0,2) += pt[2];
}
cov(1,0) = cov(0,1);
cov(2,0) = cov(0,2);
cov(2,1) = cov(1,2);
}
示例14: main
int main(int argc, char **argv){
#if 0
DOF6::TFLinkvf rot1,rot2;
Eigen::Matrix4f tf1 = build_random_tflink(rot1,3);
Eigen::Matrix4f tf2 = build_random_tflink(rot2,3);
DOF6::DOF6_Source<DOF6::TFLinkvf,DOF6::TFLinkvf,float> abc(rot1.makeShared(), rot2.makeShared());
abc.getRotation();
abc.getTranslation();
std::cout<<"tf1\n"<<tf1<<"\n";
std::cout<<"tf2\n"<<tf2<<"\n";
std::cout<<"tf1*2\n"<<tf1*tf2<<"\n";
std::cout<<"tf2*1\n"<<tf2*tf1<<"\n";
std::cout<<"tf1\n"<<rot1.getTransformation()<<"\n";
std::cout<<"tf2\n"<<rot2.getTransformation()<<"\n";
std::cout<<"tf1*2\n"<<(rot1+rot2).getTransformation()<<"\n";
rot1.check();
rot2.check();
return 0;
pcl::RotationFromCorrespondences rfc;
Eigen::Vector3f n, nn, v,n2,n3,z,y,tv;
float a = 0.1f;
z.fill(0);y.fill(0);
z(2)=1;y(1)=1;
nn.fill(0);
nn(0) = 1;
Eigen::AngleAxisf aa(a,nn);
nn.fill(100);
n.fill(0);
n(0) = 1;
n2.fill(0);
n2=n;
n2(1) = 0.2;
n3.fill(0);
n3=n;
n3(2) = 0.2;
n2.normalize();
n3.normalize();
tv.fill(1);
tv.normalize();
#if 0
#if 0
rfc.add(n,aa.toRotationMatrix()*n+nn,
1*n.cross(y),1*n.cross(z),
1*(aa.toRotationMatrix()*n).cross(y),1*(aa.toRotationMatrix()*n).cross(z),
1,1/sqrtf(3));
#else
rfc.add(n,aa.toRotationMatrix()*n,
0*n.cross(y),0*n.cross(z),
0*(aa.toRotationMatrix()*n).cross(y),0*(aa.toRotationMatrix()*n).cross(z),
1,0);
#endif
#if 1
rfc.add(n2,aa.toRotationMatrix()*n2+nn,
tv,tv,
tv,tv,
1,1);
#else
rfc.add(n2,aa.toRotationMatrix()*n2+nn,
1*n2.cross(y),1*n2.cross(z),
1*(aa.toRotationMatrix()*n2).cross(y),1*(aa.toRotationMatrix()*n2).cross(z),
1,1/sqrtf(3));
#endif
#else
float f=1;
Eigen::Vector3f cyl;
cyl.fill(1);
cyl(0)=1;
Eigen::Matrix3f cylM;
cylM.fill(0);
cylM.diagonal() = cyl;
rfc.add(n,aa.toRotationMatrix()*n,
f*n.cross(y),f*n.cross(z),
f*(aa.toRotationMatrix()*n).cross(y),f*(aa.toRotationMatrix()*n).cross(z),
1,0);
rfc.add(n2,aa.toRotationMatrix()*n2+nn,
1*n2.cross(y),1*n2.cross(z),
1*(aa.toRotationMatrix()*n2).cross(y),1*(aa.toRotationMatrix()*n2).cross(z),
1,1);
#endif
rfc.add(n3,aa.toRotationMatrix()*n3+nn,
//tv,tv,
//tv,tv,
n3.cross(y),n3.cross(z),
1*(aa.toRotationMatrix()*n3).cross(y),1*(aa.toRotationMatrix()*n3).cross(z),
//.........这里部分代码省略.........
示例15: onGMM
void onGMM(const gaussian_mixture_model::GaussianMixture & mix)
{
visualization_msgs::MarkerArray msg;
ROS_INFO("gmm_rviz_converter: Received message.");
uint i;
for (i = 0; i < mix.gaussians.size(); i++)
{
visualization_msgs::Marker marker;
marker.header.frame_id = m_frame_id;
marker.header.stamp = ros::Time::now();
marker.ns = m_rviz_namespace;
marker.id = i;
marker.type = visualization_msgs::Marker::SPHERE;
marker.action = visualization_msgs::Marker::ADD;
marker.lifetime = ros::Duration();
Eigen::Vector3f coords;
for (uint ir = 0; ir < NUM_OUTPUT_COORDINATES; ir++)
coords[ir] = m_conversion_mask[ir]->GetMean(m_conversion_mask,mix.gaussians[i]);
marker.pose.position.x = coords.x();
marker.pose.position.y = coords.y();
marker.pose.position.z = coords.z();
Eigen::Matrix3f covmat;
for (uint ir = 0; ir < NUM_OUTPUT_COORDINATES; ir++)
for (uint ic = 0; ic < NUM_OUTPUT_COORDINATES; ic++)
covmat(ir,ic) = m_conversion_mask[ir]->GetCov(m_conversion_mask,mix.gaussians[i],ic);
Eigen::EigenSolver<Eigen::Matrix3f> evsolver(covmat);
Eigen::Matrix3f eigenvectors = evsolver.eigenvectors().real();
if (eigenvectors.determinant() < 0.0)
eigenvectors.col(0) = - eigenvectors.col(0);
Eigen::Matrix3f rotation = eigenvectors;
Eigen::Quaternionf quat = Eigen::Quaternionf(Eigen::AngleAxisf(rotation));
marker.pose.orientation.x = quat.x();
marker.pose.orientation.y = quat.y();
marker.pose.orientation.z = quat.z();
marker.pose.orientation.w = quat.w();
Eigen::Vector3f eigenvalues = evsolver.eigenvalues().real();
Eigen::Vector3f scale = Eigen::Vector3f(eigenvalues.array().abs().sqrt());
if (m_normalize)
scale.normalize();
marker.scale.x = mix.weights[i] * scale.x() * m_scale;
marker.scale.y = mix.weights[i] * scale.y() * m_scale;
marker.scale.z = mix.weights[i] * scale.z() * m_scale;
marker.color.a = 1.0;
rainbow(float(i) / float(mix.gaussians.size()),marker.color.r,marker.color.g,marker.color.b);
msg.markers.push_back(marker);
}
// this a waste of resources, but we need to delete old markers in some way
// someone should add a "clear all" command to rviz
// (using expiration time is not an option, here)
for ( ; i < m_max_markers; i++)
{
visualization_msgs::Marker marker;
marker.id = i;
marker.action = visualization_msgs::Marker::DELETE;
marker.lifetime = ros::Duration();
marker.header.frame_id = m_frame_id;
marker.header.stamp = ros::Time::now();
marker.ns = m_rviz_namespace;
msg.markers.push_back(marker);
}
m_pub.publish(msg);
ROS_INFO("gmm_rviz_converter: Sent message.");
}