本文整理汇总了C++中eigen::Affine3f::translation方法的典型用法代码示例。如果您正苦于以下问题:C++ Affine3f::translation方法的具体用法?C++ Affine3f::translation怎么用?C++ Affine3f::translation使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类eigen::Affine3f
的用法示例。
在下文中一共展示了Affine3f::translation方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: l
Eigen::Affine3f WorldDownloadManager::toEigenAffine(const T1 & linear,const T2 & translation)
{
Eigen::Matrix3f l;
Eigen::Vector3f t;
for (uint r = 0; r < 3; r++)
for (uint c = 0; c < 3; c++)
l(r,c) = linear[r * 3 + c];
for (uint r = 0; r < 3; r++)
t[r] = translation[r];
Eigen::Affine3f result;
result.linear() = l;
result.translation() = t;
return result;
}
示例2: trackPointCloud
void SLTrackerWorker::trackPointCloud(PointCloudConstPtr pointCloud){
// Recursively call self until latest event is hit
busy = true;
QCoreApplication::sendPostedEvents(this, QEvent::MetaCall);
bool result = busy;
busy = false;
if(!result){
std::cerr << "SLTrackerWorker: dropped point cloud!" << std::endl;
return;
}
if(!referenceSet){
tracker->setReference(pointCloud);
referenceSet = true;
return;
}
performanceTime.start();
Eigen::Affine3f T;
bool converged;
float RMS;
tracker->determineTransformation(pointCloud, T, converged, RMS);
// Emit result
if(converged)
emit newPoseEstimate(T);
// std::cout << "Pose: " << T.matrix() << std::endl;
std::cout << "Tracker: " << performanceTime.elapsed() << "ms" << std::endl;
if(writeToDisk){
Eigen::Vector3f t(T.translation());
Eigen::Quaternionf q(T.rotation());
(*ofStream) << trackingTime.elapsed() << ",";
(*ofStream) << t.x() << "," << t.y() << "," << t.z() << ",";
(*ofStream) << q.w() << "," << q.x() << "," << q.y() << "," << q.z() << "," << RMS;
(*ofStream) << std::endl;
}
}
示例3: setTrackerTarget
void setTrackerTarget(){
initTracking();
Eigen::Vector4f c;
Eigen::Affine3f trans = Eigen::Affine3f::Identity ();
pcl::compute3DCentroid<PointT>(*object_to_track,c);
trans.translation().matrix() = Eigen::Vector3f(c[0],c[1],c[2]);
tracker_->setTrans (trans);
pcl::PointCloud<PointT>::Ptr tmp_pc(new pcl::PointCloud<PointT>);
pcl::transformPointCloud<PointT> (*object_to_track, *tmp_pc, trans.inverse());
tracker_->setReferenceCloud(tmp_pc);
tracker_->setInputCloud(cloud);
tracked_object_centroid->clear();
tracked_object_centroid->push_back(PointT(c[0],c[1],c[2]));
}
示例4: printPose
void
printPose( Eigen::Affine3f const& pose )
{
// debug
std::cout << pose.linear() << std::endl <<
pose.translation().transpose() << std::endl;
float alpha = atan2( pose.linear()(1,0), pose.linear()(0,0) );
float beta = atan2( -pose.linear()(2,0),
sqrt( pose.linear()(2,1) * pose.linear()(2,1) +
pose.linear()(2,2) * pose.linear()(2,2) )
);
float gamma = atan2( pose.linear()(2,1), pose.linear()(2,2) );
std::cout << "alpha: " << alpha << " " << alpha * 180.f / M_PI << std::endl;
std::cout << "beta: " << beta << " " << beta * 180.f / M_PI << std::endl;
std::cout << "gamma: " << gamma << " " << gamma * 180.f / M_PI << std::endl;
}
示例5: modelToScene
transformation objectModelSV::modelToScene(const pcl::PointNormal & pointModel, const Eigen::Affine3f & transformSceneToGlobal, const float alpha)
{
Eigen::Vector3f modelPoint=pointModel.getVector3fMap();
Eigen::Vector3f modelNormal=pointModel.getNormalVector3fMap ();
// Get transformation from model local frame to global frame
Eigen::Vector3f cross=modelNormal.cross (Eigen::Vector3f::UnitX ()).normalized ();
Eigen::AngleAxisf rotationModelToGlobal(acosf (modelNormal.dot (Eigen::Vector3f::UnitX ())), cross);
if (isnan(cross[0]))
{
rotationModelToGlobal=Eigen::AngleAxisf(0.0,Eigen::Vector3f::UnitX ());
}
//std::cout<< "ola:" <<acosf (modelNormal.dot (Eigen::Vector3f::UnitX ()))<<std::endl;
//std::cout <<"X:"<< Eigen::Translation3f( rotationModelToGlobal * ((-1) * modelPoint)).x() << std::endl;
//std::cout <<"Y:"<< Eigen::Translation3f( rotationModelToGlobal * ((-1) * modelPoint)).y() << std::endl;
//std::cout <<"Z:"<< Eigen::Translation3f( rotationModelToGlobal * ((-1) * modelPoint)).z() << std::endl;
Eigen::Affine3f transformModelToGlobal = Eigen::Translation3f( rotationModelToGlobal * ((-1) * modelPoint)) * rotationModelToGlobal;
// Get transformation from model local frame to scene local frame
Eigen::Affine3f completeTransform = transformSceneToGlobal.inverse () * Eigen::AngleAxisf(alpha, Eigen::Vector3f::UnitX ()) * transformModelToGlobal;
//std::cout << Eigen::AngleAxisf(alpha, Eigen::Vector3f::UnitX ()).matrix() << std::endl;
Eigen::Quaternion<float> rotationQ=Eigen::Quaternion<float>(completeTransform.rotation());
// if object is symmetric remove yaw rotation (assume symmetry around z axis)
if(symmetric)
{
Eigen::Vector3f eulerAngles;
// primeiro [0] -> rot. around x (roll) [1] -> rot. around y (pitch) [2] -> rot. around z (yaw)
quaternionToEuler(rotationQ, eulerAngles[0], eulerAngles[1], eulerAngles[2]);
//pcl::getEulerAngles(completeTransform,eulerAngles[0], eulerAngles[1], eulerAngles[2]);
//eulerAngles[2]=0.0;
eulerToQuaternion(rotationQ, eulerAngles[0], eulerAngles[1], eulerAngles[2]);
//quaternionToEuler(rotationQ, eulerAngles[2], eulerAngles[1], eulerAngles[2]);
//std::cout << "EULER ANGLES: " << eulerAngles << std::endl;
}
//std::cout << "rotation: " << rotationQ << std::endl;
return transformation(rotationQ, Eigen::Translation3f(completeTransform.translation()) );
}
示例6: result_cloud
//Draw model reference point cloud
void
drawResult (pcl::visualization::PCLVisualizer& viz)
{
ParticleXYZRPY result = tracker_->getResult ();
Eigen::Affine3f transformation = tracker_->toEigenMatrix (result);
//move close to camera a little for better visualization
transformation.translation () += Eigen::Vector3f (0.0f, 0.0f, -0.005f);
CloudPtr result_cloud (new Cloud ());
pcl::transformPointCloud<RefPointType> (*(tracker_->getReferenceCloud ()), *result_cloud, transformation);
//Draw blue model reference point cloud
{
pcl::visualization::PointCloudColorHandlerCustom<RefPointType> blue_color (result_cloud, 0, 0, 255);
if (!viz.updatePointCloud (result_cloud, blue_color, "resultcloud"))
viz.addPointCloud (result_cloud, blue_color, "resultcloud");
}
}
示例7: check
bool TransitionLimitXYZRPY::check(FootstepState::Ptr from,
FootstepState::Ptr to) const
{
// from * trans = to
const Eigen::Affine3f diff = to->getPose() * from->getPose().inverse();
const Eigen::Vector3f diff_pos(diff.translation());
if (std::abs(diff_pos[0]) < x_max_ &&
std::abs(diff_pos[1]) < y_max_ &&
std::abs(diff_pos[2]) < z_max_) {
float roll, pitch, yaw;
pcl::getEulerAngles(diff, roll, pitch, yaw);
return (std::abs(roll) < roll_max_ &&
std::abs(pitch) < pitch_max_ &&
std::abs(yaw) < yaw_max_);
}
else {
return false;
}
}
示例8: q
geometry_msgs::Pose transformEigenAffine3fToPose(Eigen::Affine3f e) {
Eigen::Vector3f Oe;
Eigen::Matrix3f Re;
geometry_msgs::Pose pose;
Oe = e.translation();
Re = e.linear();
Eigen::Quaternionf q(Re); // convert rotation matrix Re to a quaternion, q
pose.position.x = Oe(0);
pose.position.y = Oe(1);
pose.position.z = Oe(2);
pose.orientation.x = q.x();
pose.orientation.y = q.y();
pose.orientation.z = q.z();
pose.orientation.w = q.w();
return pose;
}
示例9: paintGL
void GLWidget::paintGL()
{
makeCurrent();
glClearColor(0.7, 0.7, 0.7, 1.0);
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
glEnable(GL_DEPTH_TEST);
glEnable(GL_BLEND);
if(Interface::showBackgroundImage)
{
// renderTexture.imageAlpha = Interface::alphaBackgroundImage;
// renderTexture.renderTexture(*mTextManagerObj.getBaseTexture(), Eigen::Vector2i(this->width(), this->height()));
}
// phong.render(*mTextManagerObj.getMesh(), *currentCamera, light_trackball);
// multi.render(mTextManagerObj, *currentCamera, light_trackball);
multiMask.useWeights = Interface::useWeights;
multiMask.render(mTextManagerObj, *currentCamera, light_trackball);
// depthMap.render(*mTextManagerObj.getMesh(), calibrationCamera, light_trackball);
if(Interface::camera == Interface::CAMERA_TYPES::FREE)
{
// camera.render();
if(Interface::ShowCameras){
for(int i =0 ; i < mTextManagerObj.getNumPhotos(); i++)
{
mTextManagerObj.changePhotoReferenceTo(i);
camRep.resetModelMatrix();
Eigen::Affine3f m = (*(mTextManagerObj.getViewMatrix())).inverse();
m.translation() -= mTextManagerObj.getMesh()->getCentroid();
m.translation() *= mTextManagerObj.getMesh()->getScale();
m.scale (0.08);
camRep.setModelMatrix(m);
camRep.render(*currentCamera, light_trackball);
}
mTextManagerObj.changePhotoReferenceTo(0);
}
}
}
示例10: main
int main(int argc, char** argv) {
ros::init(argc, argv, "object_finder_node"); // name this node
ROS_INFO("instantiating the object finder action server: ");
ObjectFinder object_finder_as; // create an instance of the class "ObjectFinder"
tf::TransformListener tfListener;
ROS_INFO("listening for kinect-to-base transform:");
tf::StampedTransform stf_kinect_wrt_base;
bool tferr = true;
ROS_INFO("waiting for tf between kinect_pc_frame and base_link...");
while (tferr) {
tferr = false;
try {
//The direction of the transform returned will be from the target_frame to the source_frame.
//Which if applied to data, will transform data in the source_frame into the target_frame.
//See tf/CoordinateFrameConventions#Transform_Direction
tfListener.lookupTransform("base_link", "kinect_pc_frame", ros::Time(0), stf_kinect_wrt_base);
} catch (tf::TransformException &exception) {
ROS_WARN("%s; retrying...", exception.what());
tferr = true;
ros::Duration(0.5).sleep(); // sleep for half a second
ros::spinOnce();
}
}
ROS_INFO("kinect to base_link tf is good");
object_finder_as.xformUtils_.printStampedTf(stf_kinect_wrt_base);
tf::Transform tf_kinect_wrt_base = object_finder_as.xformUtils_.get_tf_from_stamped_tf(stf_kinect_wrt_base);
g_affine_kinect_wrt_base = object_finder_as.xformUtils_.transformTFToAffine3f(tf_kinect_wrt_base);
cout << "affine rotation: " << endl;
cout << g_affine_kinect_wrt_base.linear() << endl;
cout << "affine offset: " << g_affine_kinect_wrt_base.translation().transpose() << endl;
ROS_INFO("going into spin");
// from here, all the work is done in the action server, with the interesting stuff done within "executeCB()"
while (ros::ok()) {
ros::spinOnce(); //normally, can simply do: ros::spin();
ros::Duration(0.1).sleep();
}
return 0;
}
示例11: launch
void ViewController::launch(FileData* game, Eigen::Vector3f center)
{
if(game->getType() != GAME)
{
LOG(LogError) << "tried to launch something that isn't a game";
return;
}
Eigen::Affine3f origCamera = mCamera;
origCamera.translation() = -mCurrentView->getPosition();
center += mCurrentView->getPosition();
stopAnimation(1); // make sure the fade in isn't still playing
mLockInput = true;
if(Settings::getInstance()->getString("TransitionStyle") == "fade")
{
// fade out, launch game, fade back in
auto fadeFunc = [this](float t) {
//t -= 1;
//mFadeOpacity = lerp<float>(0.0f, 1.0f, t*t*t + 1);
mFadeOpacity = lerp<float>(0.0f, 1.0f, t);
};
setAnimation(new LambdaAnimation(fadeFunc, 800), 0, [this, game, fadeFunc]
{
game->getSystem()->launchGame(mWindow, game);
mLockInput = false;
setAnimation(new LambdaAnimation(fadeFunc, 800), 0, nullptr, true);
this->onFileChanged(game, FILE_METADATA_CHANGED);
});
}else{
// move camera to zoom in on center + fade out, launch game, come back in
setAnimation(new LaunchAnimation(mCamera, mFadeOpacity, center, 1500), 0, [this, origCamera, center, game]
{
game->getSystem()->launchGame(mWindow, game);
mCamera = origCamera;
mLockInput = false;
setAnimation(new LaunchAnimation(mCamera, mFadeOpacity, center, 600), 0, nullptr, true);
this->onFileChanged(game, FILE_METADATA_CHANGED);
});
}
}
示例12: stereoToVec
gsl_vector* VelStereoMatcher::stereoToVec(const StereoProperties stereo)
{
Eigen::Affine3f tform = stereo.getLeftCamera().tform;
Eigen::Matrix4f intrinsics = stereo.getLeftCamera().intrinsics.matrix();
Eigen::Vector3f tran;
tran = tform.translation();
float x = tran[0];
float y = tran[1];
float z = tran[2];
Eigen::Matrix3f mat = tform.rotation();
Eigen::AngleAxisf axis;
axis = mat;
float ax = axis.axis()[0] * axis.angle();
float ay = axis.axis()[1] * axis.angle();
float az = axis.axis()[2] * axis.angle();
float fx = intrinsics(0,0);
float fy = intrinsics(1,1);
float cx = intrinsics(0,2);
float cy = intrinsics(1,2);
float baseline = stereo.baseline;
gsl_vector* vec = gsl_vector_alloc(11);
gsl_vector_set(vec, 0, x);
gsl_vector_set(vec, 1, y);
gsl_vector_set(vec, 2, z);
gsl_vector_set(vec, 3, ax);
gsl_vector_set(vec, 4, ay);
gsl_vector_set(vec, 5, az);
gsl_vector_set(vec, 6, fx);
gsl_vector_set(vec, 7, fy);
gsl_vector_set(vec, 8, cx);
gsl_vector_set(vec, 9, cy);
gsl_vector_set(vec, 10, baseline);
return vec;
}
示例13: showPoseEstimate
void SLTrackerDialog::showPoseEstimate(const Eigen::Affine3f & T){
if(ui->poseTab->isVisible()){
ui->poseWidget->showPoseEstimate(T);
} else if(ui->traceTab->isVisible()){
Eigen::Vector3f t(T.translation());
Eigen::Quaternionf q(T.rotation());
ui->translationTraceWidget->addMeasurement("tx", t(0));
ui->translationTraceWidget->addMeasurement("ty", t(1));
ui->translationTraceWidget->addMeasurement("tz", t(2));
ui->translationTraceWidget->draw();
ui->rotationTraceWidget->addMeasurement("qw", q.w());
ui->rotationTraceWidget->addMeasurement("qx", q.x());
ui->rotationTraceWidget->addMeasurement("qy", q.y());
ui->rotationTraceWidget->addMeasurement("qz", q.z());
ui->rotationTraceWidget->draw();
}
}
示例14: result_cloud
void
drawResult (pcl::visualization::PCLVisualizer& viz)
{
ParticleXYZRPY result = tracker_->getResult ();
Eigen::Affine3f transformation = tracker_->toEigenMatrix (result);
// move a little bit for better visualization
transformation.translation () += Eigen::Vector3f (0.0f, 0.0f, -0.005f);
RefCloudPtr result_cloud (new RefCloud ());
if (!visualize_non_downsample_)
pcl::transformPointCloud<RefPointType> (*(tracker_->getReferenceCloud ()), *result_cloud, transformation);
else
pcl::transformPointCloud<RefPointType> (*reference_, *result_cloud, transformation);
{
pcl::visualization::PointCloudColorHandlerCustom<RefPointType> red_color (result_cloud, 0, 0, 255);
if (!viz.updatePointCloud (result_cloud, red_color, "resultcloud"))
viz.addPointCloud (result_cloud, red_color, "resultcloud");
}
}
示例15:
void CGLUtil::getRTFromWorld2CamCV(Eigen::Matrix3f* pRw_, Eigen::Vector3f* pTw_) {
//only the matrix in the top of the modelview matrix stack works
Eigen::Affine3f M;
glGetFloatv(GL_MODELVIEW_MATRIX,M.matrix().data());
Eigen::Matrix3f S;
*pTw_ = M.translation();
*pRw_ = M.linear();
//M.computeRotationScaling(pRw_,&S);
//*pTw_ = (*pTw_)/S(0,0);
//negate row no. 1 and 2, to switch from GL to CV convention
for (int r = 1; r < 3; r++){
for (int c = 0; c < 3; c++){
(*pRw_)(r,c) = -(*pRw_)(r,c);
}
(*pTw_)(r) = -(*pTw_)(r);
}
//PRINT(S);
//PRINT(*pRw_);
//PRINT(*pTw_);
return;
}