本文整理汇总了C++中eigen::Transform::matrix方法的典型用法代码示例。如果您正苦于以下问题:C++ Transform::matrix方法的具体用法?C++ Transform::matrix怎么用?C++ Transform::matrix使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类eigen::Transform
的用法示例。
在下文中一共展示了Transform::matrix方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: perspective
Eigen::Matrix<Scalar,4,4> perspective(Scalar fovy, Scalar aspect, Scalar zNear, Scalar zFar){
Eigen::Transform<Scalar,3,Eigen::Projective> tr;
tr.matrix().setZero();
assert(aspect > 0);
assert(zFar > zNear);
Scalar radf = M_PI * fovy / 180.0;
Scalar tan_half_fovy = std::tan(radf / 2.0);
tr(0,0) = 1.0 / (aspect * tan_half_fovy);
tr(1,1) = 1.0 / (tan_half_fovy);
tr(2,2) = - (zFar + zNear) / (zFar - zNear);
tr(3,2) = - 1.0;
tr(2,3) = - (2.0 * zFar * zNear) / (zFar - zNear);
return tr.matrix();
}
示例2:
template <typename PointT> void
pcl::people::GroundBasedPeopleDetectionApp<PointT>::applyTransformationGround ()
{
if (transformation_set_ && ground_coeffs_set_)
{
Eigen::Transform<float, 3, Eigen::Affine> transform;
transform = transformation_;
ground_coeffs_transformed_ = transform.matrix() * ground_coeffs_;
}
else
{
ground_coeffs_transformed_ = ground_coeffs_;
}
}
示例3: renderSprites
void renderSprites() {
glUseProgram(spriteShaderProgramId);
entityManagerRef->visitEntitiesWithTypeMask(componentMask, [&](Entity<EntityManagerTypes...> &entity){
auto &aabbComponent = entity.template getComponent<AABBComponent>();
auto &transformComponent = entity.template getComponent<TransformComponent>();
Eigen::Translation<GLfloat, 3> translationMat((transformComponent.x - HALF_SCREEN_WIDTH) / HALF_SCREEN_WIDTH,
(transformComponent.y - HALF_SCREEN_HEIGHT) / HALF_SCREEN_HEIGHT,
0);
Eigen::DiagonalMatrix<GLfloat, 3> scaleMat(aabbComponent.width / SCREEN_WIDTH,
aabbComponent.height / SCREEN_HEIGHT,
1);
Eigen::Transform<GLfloat, 3, Eigen::Affine> transformMatrix = translationMat * scaleMat;
boundsSprite.render(transformMatrix.matrix());
});
}
示例4: CalibrateKinectCheckerboard
CalibrateKinectCheckerboard()
: nh_("~"), it_(nh_), calibrated(false)
{
// Load parameters from the server.
nh_.param<std::string>("fixed_frame", fixed_frame, "/base_link");
nh_.param<std::string>("camera_frame", camera_frame, "/camera_link");
nh_.param<std::string>("target_frame", target_frame, "/calibration_pattern");
nh_.param<std::string>("tip_frame", tip_frame, "/gripper_link");
nh_.param<int>("checkerboard_width", checkerboard_width, 6);
nh_.param<int>("checkerboard_height", checkerboard_height, 7);
nh_.param<double>("checkerboard_grid", checkerboard_grid, 0.027);
// Set pattern detector sizes
pattern_detector_.setPattern(cv::Size(checkerboard_width, checkerboard_height), checkerboard_grid, CHESSBOARD);
transform_.translation().setZero();
transform_.matrix().topLeftCorner<3, 3>() = Quaternionf().setIdentity().toRotationMatrix();
// Create subscriptions
info_sub_ = nh_.subscribe("/camera/rgb/camera_info", 1, &CalibrateKinectCheckerboard::infoCallback, this);
// Also publishers
pub_ = it_.advertise("calibration_pattern_out", 1);
detector_pub_ = nh_.advertise<pcl::PointCloud<pcl::PointXYZ> >("detector_cloud", 1);
physical_pub_ = nh_.advertise<pcl::PointCloud<pcl::PointXYZ> >("physical_points_cloud", 1);
// Create ideal points
ideal_points_.push_back( pcl::PointXYZ(0, 0, 0) );
ideal_points_.push_back( pcl::PointXYZ((checkerboard_width-1)*checkerboard_grid, 0, 0) );
ideal_points_.push_back( pcl::PointXYZ(0, (checkerboard_height-1)*checkerboard_grid, 0) );
ideal_points_.push_back( pcl::PointXYZ((checkerboard_width-1)*checkerboard_grid, (checkerboard_height-1)*checkerboard_grid, 0) );
// Create proper gripper tip point
nh_.param<double>("gripper_tip_x", gripper_tip.point.x, 0.0);
nh_.param<double>("gripper_tip_y", gripper_tip.point.y, 0.0);
nh_.param<double>("gripper_tip_z", gripper_tip.point.z, 0.0);
gripper_tip.header.frame_id = tip_frame;
ROS_INFO("[calibrate] Initialized.");
}
示例5: asyncUpdate
//TODO: This is ugly and hacky and needs to get refactored
void OverlayManager::asyncUpdate()
{
boost::lock_guard<boost::mutex> guard(mtx_);
vr::TrackedDeviceIndex_t controller1 = -1;
vr::TrackedDeviceIndex_t controller2 = -1;
vr::VRControllerState_t hmdState;
vr::VRControllerState_t controller1State;
vr::VRControllerState_t controller2State;
vr::TrackedDevicePose_t hmdPose;
vr::TrackedDevicePose_t controller1Pose;
vr::TrackedDevicePose_t controller2Pose;
vr::HmdMatrix34_t overlayTransform;
vr::HmdVector2_t overlayCenter;
vr::ETrackingUniverseOrigin origin;
unsigned int width, height;
//Find the controllers
vr::IVRSystem* vrSys = vr::VRSystem();
vr::IVRCompositor* vrComp = vr::VRCompositor();
vr::IVROverlay* vrOvrly = vr::VROverlay();
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++)
{
switch (vrSys->GetTrackedDeviceClass(i))
{
case vr::TrackedDeviceClass_Controller:
if (controller1 == -1)
{
controller1 = i;
}
if (controller1 >= 0 && i !=controller1)
{
controller2 = i;
}
if (controller2 >= 0)
{
break;
}
}
}
int count = 0;
for (std::vector<std::shared_ptr<Overlay>>::iterator it = m_overlayVec.begin(); it != m_overlayVec.end(); ++it)
{
if (vrSys && controller1 >= 0 && (*it)->isVisible())
{
//Set padding of the overlay based on scale
float padding = 0.5f * ((float)(*it)->getScale() / 100.0f);
float z_padding = 0.1f;
//Get the controller pose information relative to tracking space
vrSys->GetControllerStateWithPose(vrComp->GetTrackingSpace(), controller1, &controller1State, sizeof(controller1State), &controller1Pose);
vrSys->GetControllerStateWithPose(vrComp->GetTrackingSpace(), controller2, &controller2State, sizeof(controller2State), &controller2Pose);
vrSys->GetControllerStateWithPose(vrComp->GetTrackingSpace(), vr::k_unTrackedDeviceIndex_Hmd, &hmdState, sizeof(hmdState), &hmdPose);
//Center of the overlay adjusted for scale
overlayCenter.v[0] = 0.5f;// * ((float)(*it)->getScale() / 100.0f);
overlayCenter.v[1] = 0.5f;// * ((float)(*it)->getScale() / 100.0f);
//Get the overlay transform relative to tracking space
vr::EVROverlayError err = vr::VROverlayError_None;
if (err = vrOvrly->GetTransformForOverlayCoordinates((*it)->getOverlayHandle(), vrComp->GetTrackingSpace(), overlayCenter, &overlayTransform))
{
DBOUT("Error with overlay!!" << err << std::endl);
}
//Converts Controller world tracking transform matrix to a transform matrix relative to the overlay
Eigen::Transform<float, 3, Eigen::Affine> controller1Transform;
Eigen::Transform<float, 3, Eigen::Affine> controller2Transform;
Eigen::Transform<float, 3, Eigen::Affine> overlayTrans;
for (int i = 0; i < 3; ++i)
{
for (int j = 0; j < 4; ++j)
{
controller1Transform(i, j) = controller1Pose.mDeviceToAbsoluteTracking.m[i][j];
controller2Transform(i, j) = controller2Pose.mDeviceToAbsoluteTracking.m[i][j];
overlayTrans(i, j) = overlayTransform.m[i][j];
}
}
Eigen::Matrix<float, 4, 4> overlayInverse = overlayTrans.matrix().inverse();
Eigen::Matrix<float, 4, 4> controller1OverlayTransform = overlayInverse * controller1Transform.matrix();
Eigen::Matrix<float, 4, 4> controller2OverlayTransform = overlayInverse * controller2Transform.matrix();
//Boolean values for if the controller is within the bounds of the overlay based on the padding
//z-padding is used for the depth across the face of the overlay
bool controller1InOverlay = (controller1OverlayTransform(0, 3) < padding && controller1OverlayTransform(0, 3) > -padding) &&
(controller1OverlayTransform(1, 3) < padding && controller1OverlayTransform(1, 3) > -padding) &&
//.........这里部分代码省略.........
示例6: TrackingUpdate
void OverlayManager::TrackingUpdate(std::vector<std::shared_ptr<Overlay>>::iterator it, vr::VRControllerState_t controllerState, vr::TrackedDevicePose_t controllerPose, bool controllerInOverlay, vr::IVRSystem* vrSys, vr::IVRCompositor* vrComp)
{
vr::TrackedDeviceIndex_t controller1 = -1;
vr::TrackedDeviceIndex_t controller2 = -1;
vr::VRControllerState_t hmdState;
vr::VRControllerState_t controller1State;
vr::VRControllerState_t controller2State;
vr::TrackedDevicePose_t hmdPose;
vr::TrackedDevicePose_t controller1Pose;
vr::TrackedDevicePose_t controller2Pose;
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++)
{
switch (vrSys->GetTrackedDeviceClass(i))
{
case vr::TrackedDeviceClass_Controller:
if (controller1 == -1)
{
controller1 = i;
}
if (controller1 >= 0 && i != controller1)
{
controller2 = i;
}
if (controller2 >= 0)
{
break;
}
}
}
vrSys->GetControllerStateWithPose(vrComp->GetTrackingSpace(), controller1, &controller1State, sizeof(controller1State), &controller1Pose);
vrSys->GetControllerStateWithPose(vrComp->GetTrackingSpace(), controller2, &controller2State, sizeof(controller2State), &controller2Pose);
vrSys->GetControllerStateWithPose(vrComp->GetTrackingSpace(), vr::k_unTrackedDeviceIndex_Hmd, &hmdState, sizeof(hmdState), &hmdPose);
if (controllerInOverlay) //controller trigger squeezed, in overlay and not being tracked to controller1
{
if ((*it)->getTracking() == 0)
{
(*it)->setOverlayMatrix(controllerPose.mDeviceToAbsoluteTracking);
}
else
{
//Must be same sized for matrix inverse calculation
Eigen::Transform<float, 3, Eigen::Affine> trackedSource;
Eigen::Transform<float, 3, Eigen::Affine> invertedSource;
Eigen::Transform<float, 3, Eigen::Affine> controllerTransform;
//Eigen::Transform<float, 3, Eigen::Affine> newTransform;
vr::HmdMatrix34_t newPosition;
memset(&newPosition, 0, sizeof(vr::HmdMatrix34_t));
//HMD Calculation
//Populate boost matrices
for (unsigned i = 0; i < 3; ++i)
{
for (unsigned j = 0; j < 4; ++j)
{
if ((*it)->getTracking() == 1)
{
trackedSource(i, j) = hmdPose.mDeviceToAbsoluteTracking.m[i][j];
}
if ((*it)->getTracking() == 2)
{
trackedSource(i, j) = controller1Pose.mDeviceToAbsoluteTracking.m[i][j];
}
if ((*it)->getTracking() == 3)
{
trackedSource(i, j) = controller2Pose.mDeviceToAbsoluteTracking.m[i][j];
}
controllerTransform(i, j) = controllerPose.mDeviceToAbsoluteTracking.m[i][j];
} //End Column loop
} //End Row loop
Eigen::Matrix<float, 4, 4> invMatrix = trackedSource.matrix().inverse();
Eigen::Matrix<float, 4, 4> newTransform;
newTransform = invMatrix * controllerTransform.matrix();
//Copy values from the new matrix into the openVR matrix
for (unsigned i = 0; i < 3; ++i)
{
for (unsigned j = 0; j < 4; ++j)
{
if (i < 3)
{
newPosition.m[i][j] = newTransform(i, j);
}
} // end column loop
//.........这里部分代码省略.........
示例7: shapes
#include <iostream>
TEST_CASE("views")
{
aam::MatrixX shapes(2, 4);
shapes << 1, 2, 3, 4,
5, 6, 7, 8;
aam::MatrixX shapesSeparated(2, 4);
shapesSeparated << 1, 2, 5, 6,
3, 4, 7, 8;
REQUIRE(shapesSeparated.block(0, 0, 2, 2).isApprox(aam::toSeparatedView<aam::Scalar>(shapes.row(0))));
REQUIRE(shapesSeparated.block(0, 2, 2, 2).isApprox(aam::toSeparatedView<aam::Scalar>(shapes.row(1))));
// Test with affine transforms
Eigen::Transform<aam::Scalar, 2, Eigen::AffineCompact> t;
t = Eigen::Translation<aam::Scalar, 2>(0.5, 0.5) * Eigen::Scaling(aam::Scalar(2));
auto x = aam::toSeparatedView<aam::Scalar>(shapes.row(0)).rowwise().homogeneous();
aam::MatrixX r = x * t.matrix().transpose();
aam::MatrixX shouldBe(2, 2);
shouldBe << 2.5, 4.5, 6.5, 8.5;
REQUIRE(r.isApprox(shouldBe));
aam::MatrixX shouldBeArray(1, 4);
shouldBeArray << 2.5, 4.5, 6.5, 8.5;
REQUIRE(aam::toInterleavedView<aam::Scalar>(r).isApprox(shouldBeArray));
}
示例8: BOOST_AUTO
typename PointMatcher<T>::TransformationParameters ErrorMinimizersImpl<T>::PointToPlaneWithCovErrorMinimizer::compute(
const DataPoints& filteredReading,
const DataPoints& filteredReference,
const OutlierWeights& outlierWeights,
const Matches& matches)
{
assert(matches.ids.rows() > 0);
// Fetch paired points
typename ErrorMinimizer::ErrorElements& mPts = this->getMatchedPoints(filteredReading, filteredReference, matches, outlierWeights);
const int dim = mPts.reading.features.rows();
const int nbPts = mPts.reading.features.cols();
// Adjust if the user forces 2D minimization on XY-plane
int forcedDim = dim - 1;
if(force2D && dim == 4)
{
mPts.reading.features.conservativeResize(3, Eigen::NoChange);
mPts.reading.features.row(2) = Matrix::Ones(1, nbPts);
mPts.reference.features.conservativeResize(3, Eigen::NoChange);
mPts.reference.features.row(2) = Matrix::Ones(1, nbPts);
forcedDim = dim - 2;
}
// Fetch normal vectors of the reference point cloud (with adjustment if needed)
const BOOST_AUTO(normalRef, mPts.reference.getDescriptorViewByName("normals").topRows(forcedDim));
// Note: Normal vector must be precalculated to use this error. Use appropriate input filter.
assert(normalRef.rows() > 0);
// Compute cross product of cross = cross(reading X normalRef)
const Matrix cross = this->crossProduct(mPts.reading.features, normalRef);
// wF = [weights*cross, weight*normals]
// F = [cross, normals]
Matrix wF(normalRef.rows()+ cross.rows(), normalRef.cols());
Matrix F(normalRef.rows()+ cross.rows(), normalRef.cols());
for(int i=0; i < cross.rows(); i++)
{
wF.row(i) = mPts.weights.array() * cross.row(i).array();
F.row(i) = cross.row(i);
}
for(int i=0; i < normalRef.rows(); i++)
{
wF.row(i + cross.rows()) = mPts.weights.array() * normalRef.row(i).array();
F.row(i + cross.rows()) = normalRef.row(i);
}
// Unadjust covariance A = wF * F'
const Matrix A = wF * F.transpose();
if (A.fullPivHouseholderQr().rank() != A.rows())
{
// TODO: handle that properly
//throw ConvergenceError("encountered singular while minimizing point to plane distance");
}
const Matrix deltas = mPts.reading.features - mPts.reference.features;
// dot product of dot = dot(deltas, normals)
Matrix dotProd = Matrix::Zero(1, normalRef.cols());
for(int i=0; i<normalRef.rows(); i++)
{
dotProd += (deltas.row(i).array() * normalRef.row(i).array()).matrix();
}
// b = -(wF' * dot)
const Vector b = -(wF * dotProd.transpose());
// Cholesky decomposition
Vector x(A.rows());
x = A.llt().solve(b);
// Transform parameters to matrix
Matrix mOut;
if(dim == 4 && !force2D)
{
Eigen::Transform<T, 3, Eigen::Affine> transform;
// IT IS NOT CORRECT TO USE EULER ANGLES!
// Rotation in Eular angles follow roll-pitch-yaw (1-2-3) rule
/*transform = Eigen::AngleAxis<T>(x(0), Eigen::Matrix<T,1,3>::UnitX())
* Eigen::AngleAxis<T>(x(1), Eigen::Matrix<T,1,3>::UnitY())
* Eigen::AngleAxis<T>(x(2), Eigen::Matrix<T,1,3>::UnitZ()); */
// Reverse roll-pitch-yaw conversion, very useful piece of knowledge, keep it with you all time!
/*const T pitch = -asin(transform(2,0));
const T roll = atan2(transform(2,1), transform(2,2));
const T yaw = atan2(transform(1,0) / cos(pitch), transform(0,0) / cos(pitch));
std::cerr << "d angles" << x(0) - roll << ", " << x(1) - pitch << "," << x(2) - yaw << std::endl;*/
transform = Eigen::AngleAxis<T>(x.head(3).norm(),x.head(3).normalized());
transform.translation() = x.segment(3, 3);
mOut = transform.matrix();
}
else
{
Eigen::Transform<T, 2, Eigen::Affine> transform;
transform = Eigen::Rotation2D<T> (x(0));
transform.translation() = x.segment(1, 2);
//.........这里部分代码省略.........
示例9: shapes
TEST_CASE("transform")
{
aam::MatrixX shapes(2, 4);
shapes << 1, 2, 3, 4,
5, 6, 7, 8;
aam::MatrixX shapesResult(2, 4);
shapesResult << 2.5, 4.5, 6.5, 8.5,
10.5, 12.5, 14.5, 16.5;
// Test with affine transforms
Eigen::Transform<aam::Scalar, 2, Eigen::AffineCompact> t;
t = Eigen::Translation<aam::Scalar, 2>(0.5, 0.5) * Eigen::Scaling(aam::Scalar(2));
aam::Affine2 m = t.matrix().transpose();
// Version 1
// Can work in place (rather slow) but output needs to be pre-allocated.
aam::RowVectorX r1(4);
aam::transformShape(m, shapes.row(0), r1);
REQUIRE(r1.isApprox(shapesResult.row(0)));
// Version 2
// In place
aam::RowVectorX r2 = shapes.row(1);
aam::transformShapeInPlace(m, r2);
REQUIRE(r2.isApprox(shapesResult.row(1)));