本文整理汇总了C++中eigen::Affine3d::matrix方法的典型用法代码示例。如果您正苦于以下问题:C++ Affine3d::matrix方法的具体用法?C++ Affine3d::matrix怎么用?C++ Affine3d::matrix使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类eigen::Affine3d
的用法示例。
在下文中一共展示了Affine3d::matrix方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: transformTFToEigen
TEST(TFEigenConversions, tf_eigen_transform)
{
tf::Transform t;
tf::Quaternion tq;
tq[0] = gen_rand(-1.0,1.0);
tq[1] = gen_rand(-1.0,1.0);
tq[2] = gen_rand(-1.0,1.0);
tq[3] = gen_rand(-1.0,1.0);
tq.normalize();
t.setOrigin(tf::Vector3(gen_rand(-10,10),gen_rand(-10,10),gen_rand(-10,10)));
t.setRotation(tq);
Eigen::Affine3d k;
transformTFToEigen(t,k);
for(int i=0; i < 3; i++)
{
ASSERT_NEAR(t.getOrigin()[i],k.matrix()(i,3),1e-6);
for(int j=0; j < 3; j++)
{
ASSERT_NEAR(t.getBasis()[i][j],k.matrix()(i,j),1e-6);
}
}
for (int col = 0 ; col < 3; col ++)
ASSERT_NEAR(k.matrix()(3, col), 0, 1e-6);
ASSERT_NEAR(k.matrix()(3,3), 1, 1e-6);
}
示例2: transformTFToEigen
Eigen::Affine3d transformTFToEigen(const tf::Transform &t) {
Eigen::Affine3d e;
for (int i = 0; i < 3; i++) {
e.matrix()(i, 3) = t.getOrigin()[i];
for (int j = 0; j < 3; j++) {
e.matrix()(i, j) = t.getBasis()[i][j];
}
}
// Fill in identity in last row
for (int col = 0; col < 3; col++)
e.matrix()(3, col) = 0;
e.matrix()(3, 3) = 1;
return e;
}
示例3: transformTFToEigen
Eigen::Affine3d transformTFToEigen(const tf::Transform &t) {
Eigen::Affine3d e;
// treat the Eigen::Affine as a 4x4 matrix:
for (int i = 0; i < 3; i++) {
e.matrix()(i, 3) = t.getOrigin()[i]; //copy the origin from tf to Eigen
for (int j = 0; j < 3; j++) {
e.matrix()(i, j) = t.getBasis()[i][j]; //and copy 3x3 rotation matrix
}
}
// Fill in identity in last row
for (int col = 0; col < 3; col++)
e.matrix()(3, col) = 0;
e.matrix()(3, 3) = 1;
return e;
}
示例4: getFrustumCulledVoxels
void TSDFVolumeOctree::getFrustumCulledVoxels (const Eigen::Affine3d& trans,
std::vector<OctreeNode::Ptr> &voxels) const {
std::vector<OctreeNode::Ptr> voxels_all;
octree_->getLeaves (voxels_all, max_cell_size_x_, max_cell_size_y_, max_cell_size_z_);
pcl::PointCloud<pcl::PointXYZ>::Ptr voxel_cloud(new pcl::PointCloud<pcl::PointXYZ> (voxels_all.size(), 1));
std::vector<int> indices;
for (size_t i = 0; i < voxel_cloud->size(); ++i) {
pcl::PointXYZ &pt = voxel_cloud->at (i);
voxels_all[i]->getCenter (pt.x, pt.y, pt.z);
}
pcl::FrustumCulling<pcl::PointXYZ> fc (false);
Eigen::Matrix4f cam2robot;
cam2robot << 0, 0, 1, 0,
0, -1, 0, 0,
1, 0, 0, 0,
0, 0, 0, 1;
Eigen::Matrix4f trans_robot = trans.matrix().cast<float> () * cam2robot;
fc.setCameraPose (trans_robot);
fc.setHorizontalFOV (70);
fc.setVerticalFOV (70);
fc.setNearPlaneDistance (min_sensor_dist_);
fc.setFarPlaneDistance (max_sensor_dist_);
fc.setInputCloud (voxel_cloud);
fc.filter (indices);
voxels.resize (indices.size());
for (size_t i = 0; i < indices.size(); ++i) {
voxels[i] = voxels_all[indices[i]];
}
}
示例5: srvSetHuboObjectPose
bool srvSetHuboObjectPose(HuboApplication::SetHuboObjectPose::Request &req,
HuboApplication::SetHuboObjectPose::Response &res)
{
bool response = true;
Eigen::Affine3d tempPose;
Eigen::Isometry3d armPose;
tf::poseMsgToEigen(req.Target, tempPose);
if (tempPose(0,3) != tempPose(0,3)) // null check
{
res.Success = false;
return false;
}
armPose = tempPose.matrix();
//std::cerr << tempPose.matrix() << std::endl;
m_Manip.setControlMode(OBJECT_POSE);
m_Manip.setAngleMode(QUATERNION);
m_Manip.setPose(armPose, req.ObjectIndex);
m_Manip.sendCommand();
//std::cerr << armPose.matrix() << std::endl;
res.Success = response;
return response;
}
示例6: srvSetHuboArmPose
bool srvSetHuboArmPose(HuboApplication::SetHuboArmPose::Request &req,
HuboApplication::SetHuboArmPose::Response &res)
{
bool response = true;
Eigen::Affine3d tempPose;
Eigen::Isometry3d armPose;
tf::poseMsgToEigen(req.Target, tempPose);
if (tempPose(0,3) != tempPose(0,3)) // null check
{
res.Success = false;
return false;
}
//std::cerr << tempPose.matrix() << std::endl;
armPose = tempPose.matrix();
//armPose.translation() = tempPose.translation();
//armPose.linear() = tempPose.rotation();
//std::cerr << armPose.matrix() << std::endl;
m_Manip.setControlMode(END_EFFECTOR);
m_Manip.setAngleMode(QUATERNION);
m_Manip.setPose(armPose, req.ArmIndex);
m_Manip.sendCommand();
//std::cerr << armPose.matrix() << std::endl;
res.Success = response;
return response;
}
示例7: transformEigenToTF
void transformEigenToTF(const Eigen::Affine3d &e, tf::Transform &t)
{
t.setOrigin(tf::Vector3( e.matrix()(0,3), e.matrix()(1,3), e.matrix()(2,3)));
t.setBasis(tf::Matrix3x3(e.matrix()(0,0), e.matrix()(0,1),e.matrix()(0,2),
e.matrix()(1,0), e.matrix()(1,1),e.matrix()(1,2),
e.matrix()(2,0), e.matrix()(2,1),e.matrix()(2,2)));
}
示例8: rgb
void Data4Viewer::drawGlobalView()
{
_pKinect->_pRGBCamera->setGLProjectionMatrix(0.1f, 100.f);
glMatrixMode(GL_MODELVIEW);
Eigen::Affine3d tmp; tmp.setIdentity();
Eigen::Matrix4d mMat;
mMat.row(0) = tmp.matrix().row(0);
mMat.row(1) = -tmp.matrix().row(1);
mMat.row(2) = -tmp.matrix().row(2);
mMat.row(3) = tmp.matrix().row(3);
glLoadMatrixd(mMat.data());
GpuMat rgb(_pKinect->_cvmRGB);
_pKinect->_pRGBCamera->renderCameraInLocal(rgb, _pGL.get(), false, NULL, 0.2f, true); //render in model coordinate
//cout<<("drawRGBView");
return;
}
示例9: mainloop
void visualOdometry::mainloop(){
FRAME lastFrame;
std::string file_name_ground =pd_.getData( "file_name_ground");
readFromFile( file_name_ground,ground_truth_list_);
lastFrame = readFrameFromGroundTruthList(start_index_ ,ground_truth_list_);
// 我们总是在比较currFrame和lastFrame
string detector = pd_.getData( "detector" );
string descriptor = pd_.getData( "descriptor" );
CAMERA_INTRINSIC_PARAMETERS camera = getDefaultCamera();
computeKeyPointsAndDesp( lastFrame, detector, descriptor );
// 是否显示点云
bool visualize = pd_.getData("visualize_pointcloud")==string("true");
int min_inliers = atoi( pd_.getData("min_inliers").c_str() );
double max_norm = atof( pd_.getData("max_norm").c_str() );
for (int currIndex=start_index_+1; currIndex<end_index_; currIndex++ )
{
current_index_ = currIndex;
cout<<"Reading files "<<currIndex<<endl;
FRAME currFrame = readFrameFromGroundTruthList(currIndex, ground_truth_list_); // 读取currFrame
computeKeyPointsAndDesp( currFrame, detector, descriptor );
// 比较currFrame 和 lastFrame
RESULT_OF_PNP result = estimateMotion( lastFrame, currFrame, camera );
if ( result.inliers < min_inliers ){ //inliers不够,放弃该帧
std::cout<<"not enouth inliers: "<<result.inliers;
continue;
}
//ROS_INFO_STREAM("trans: "<< result.tvec <<" rot: "<< result.rvec);
cv::Mat rotation_matrix;
cv::Rodrigues(result.rvec, rotation_matrix);
Eigen::Matrix3d rotation_eigen_temp;
cv::cv2eigen(rotation_matrix,rotation_eigen_temp);
Eigen::AngleAxisd rotation_eigen(rotation_eigen_temp);
Eigen::Affine3d incrementalAffine = Eigen::Affine3d::Identity();
incrementalAffine = rotation_eigen;
incrementalAffine(0,3) = result.tvec.ptr<double>(0)[0];
incrementalAffine(1,3) = result.tvec.ptr<double>(1)[0];
incrementalAffine(2,3) = result.tvec.ptr<double>(2)[0];
//pose_camera_ = incrementalAffine * pose_camera_;
publishTrajectory();
publishGroundTruth();
ROS_INFO_STREAM("RT: "<< incrementalAffine.matrix());
lastFrame = currFrame;
}
}
示例10: transform
Plane Plane::transform(const Eigen::Affine3d& transform)
{
Eigen::Vector4d n;
n[0] = normal_[0];
n[1] = normal_[1];
n[2] = normal_[2];
n[3] = d_;
Eigen::Matrix4d m = transform.matrix();
Eigen::Vector4d n_d = m.transpose() * n;
Eigen::Vector4d n_dd = n_d.normalized();
return Plane(Eigen::Vector3d(n_dd[0], n_dd[1], n_dd[2]), n_dd[3]);
}
示例11: transformEigenToTF
TEST(TFEigenConversions, eigen_tf_transform)
{
tf::Transform t;
Eigen::Affine3d k;
Eigen::Quaterniond kq;
kq.coeffs()(0) = gen_rand(-1.0,1.0);
kq.coeffs()(1) = gen_rand(-1.0,1.0);
kq.coeffs()(2) = gen_rand(-1.0,1.0);
kq.coeffs()(3) = gen_rand(-1.0,1.0);
kq.normalize();
k.translate(Eigen::Vector3d(gen_rand(-10,10),gen_rand(-10,10),gen_rand(-10,10)));
k.rotate(kq);
transformEigenToTF(k,t);
for(int i=0; i < 3; i++)
{
ASSERT_NEAR(t.getOrigin()[i],k.matrix()(i,3),1e-6);
for(int j=0; j < 3; j++)
{
ASSERT_NEAR(t.getBasis()[i][j],k.matrix()(i,j),1e-6);
}
}
}
示例12: idle
void idle()
{
Timer frameTimer; frameTimer.start();
// Timing things
timeFrame+=1;
double oscillationPeriod = factors.at("StimulusDuration")*TIMER_MS;
switch (stimMotion)
{
case SAWTOOTH_MOTION:
periodicValue = oscillationAmplitude*mathcommon::sawtooth(timeFrame,oscillationPeriod);
break;
case TRIANGLE_MOTION:
periodicValue = oscillationAmplitude*mathcommon::trianglewave(timeFrame,oscillationPeriod);
break;
case SINUSOIDAL_MOTION:
periodicValue = oscillationAmplitude*sin(3.14*timeFrame/(oscillationPeriod));
break;
default:
SAWTOOTH_MOTION;
}
timingFile << totalTimer.getElapsedTimeInMilliSec() << " " << periodicValue << endl;
// Simulate head translation
// Coordinates picker
markers[1] = Vector3d(0,0,0);
markers[2] = Vector3d(0,10,0);
markers[3] = Vector3d(0,0,10);
headEyeCoords.update(markers[1],markers[2],markers[3]);
eyeLeft = headEyeCoords.getLeftEye();
eyeRight = headEyeCoords.getRightEye();
Vector3d fixationPoint = (headEyeCoords.getRigidStart().getFullTransformation() * ( Vector3d(0,0,focalDistance) ) );
// Projection of view normal on the focal plane
Eigen::ParametrizedLine<double,3> pline = Eigen::ParametrizedLine<double,3>::Through(eyeRight,fixationPoint);
projPoint = pline.intersection(focalPlane)*((fixationPoint - eyeRight).normalized()) + eyeRight;
stimTransformation.matrix().setIdentity();
stimTransformation.translation() <<0,0,focalDistance;
Timer sleepTimer;
sleepTimer.sleep((TIMER_MS - frameTimer.getElapsedTimeInMilliSec())/2);
}
示例13: ts
Eigen::Matrix4d getTranformationMatrix(const string &frameName, const string &coordSys ){
tf::TransformListener listener;
ros::Time ts(0) ;
tf::StampedTransform transform;
Eigen::Affine3d pose;
try {
listener.waitForTransform( coordSys, frameName, ts, ros::Duration(1) );
listener.lookupTransform( coordSys, frameName, ts, transform);
tf::TransformTFToEigen(transform, pose);
}
catch (tf::TransformException ex){
ROS_ERROR("%s",ex.what());
}
return pose.matrix();
}
示例14: cos
TEST(TfEigen, ConvertTransform)
{
Eigen::Matrix4d tm;
double alpha = M_PI/4.0;
double theta = M_PI/6.0;
double gamma = M_PI/12.0;
tm << cos(theta)*cos(gamma),-cos(theta)*sin(gamma),sin(theta), 1, //
cos(alpha)*sin(gamma)+sin(alpha)*sin(theta)*cos(gamma),cos(alpha)*cos(gamma)-sin(alpha)*sin(theta)*sin(gamma),-sin(alpha)*cos(theta), 2, //
sin(alpha)*sin(gamma)-cos(alpha)*sin(theta)*cos(gamma),cos(alpha)*sin(theta)*sin(gamma)+sin(alpha)*cos(gamma),cos(alpha)*cos(theta), 3, //
0, 0, 0, 1;
Eigen::Affine3d T(tm);
geometry_msgs::TransformStamped msg = tf2::eigenToTransform(T);
Eigen::Affine3d Tback = tf2::transformToEigen(msg);
EXPECT_TRUE(T.isApprox(Tback));
EXPECT_TRUE(tm.isApprox(Tback.matrix()));
}
示例15: colorize
static bool colorize(const drc::map_image_t& iDepthMap,
const Eigen::Affine3d& iLocalToCamera,
const bot_core::image_t& iImage,
const BotCamTrans* iCamTrans,
bot_core::image_t& oImage) {
oImage.utime = iDepthMap.utime;
oImage.width = iDepthMap.width;
oImage.height = iDepthMap.height;
oImage.row_stride = 3*iDepthMap.width;
oImage.pixelformat = PIXEL_FORMAT_RGB;
oImage.size = oImage.row_stride * oImage.height;
oImage.nmetadata = 0;
oImage.data.resize(oImage.size);
Eigen::Matrix4d xform;
for (int i = 0; i < 4; ++i) {
for (int j = 0; j < 4; ++j) {
xform(i,j) = iDepthMap.transform[i][j];
}
}
xform = iLocalToCamera.matrix()*xform.inverse();
for (int i = 0; i < iDepthMap.height; ++i) {
for (int j = 0; j < iDepthMap.width; ++j) {
double z;
if (iDepthMap.format == drc::map_image_t::FORMAT_GRAY_FLOAT32) {
z = ((float*)(&iDepthMap.data[0] + i*iDepthMap.row_bytes))[j];
if (z < -1e10) {
continue;
}
}
else if (iDepthMap.format == drc::map_image_t::FORMAT_GRAY_UINT8) {
uint8_t val = iDepthMap.data[i*iDepthMap.row_bytes + j];
if (val == 0) {
continue;
}
z = val;
}
else {
continue;
}
Eigen::Vector4d pt = xform*Eigen::Vector4d(j,i,z,1);
double p[3] = {pt(0)/pt(3),pt(1)/pt(3),pt(2)/pt(3)};
double pix[3];
bot_camtrans_project_point(iCamTrans, p, pix);
if (pix[2] < 0) {
continue;
}
uint8_t r,g,b;
if (!interpolate(pix[0], pix[1], iImage, r, g, b)) {
continue;
}
int outImageIndex = i*oImage.row_stride + 3*j;
oImage.data[outImageIndex+0] = r;
oImage.data[outImageIndex+1] = g;
oImage.data[outImageIndex+2] = b;
}
}
return true;
}