本文整理汇总了C++中Quaterniond::setFromTwoVectors方法的典型用法代码示例。如果您正苦于以下问题:C++ Quaterniond::setFromTwoVectors方法的具体用法?C++ Quaterniond::setFromTwoVectors怎么用?C++ Quaterniond::setFromTwoVectors使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Quaterniond
的用法示例。
在下文中一共展示了Quaterniond::setFromTwoVectors方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: direction
Quaterniond
ArrowVisualizer::orientation(const Entity* parent, double t) const
{
// The subclass computes the direction
Vector3d targetDirection = direction(parent, t);
Quaterniond rotation = Quaterniond::Identity();
// The arrow geometry points in the +x direction, so calculate the rotation
// required to make the arrow point in target direction
rotation.setFromTwoVectors(Vector3d::UnitX(), targetDirection);
return rotation;
}
示例2: body
ArrowReferenceMark::ArrowReferenceMark(const Body& _body) :
body(_body),
size(1.0),
color(1.0f, 1.0f, 1.0f),
#ifdef USE_HDR
opacity(0.0f)
#else
opacity(1.0f)
#endif
{
}
void
ArrowReferenceMark::setSize(float _size)
{
size = _size;
}
void
ArrowReferenceMark::setColor(Color _color)
{
color = _color;
}
void
ArrowReferenceMark::render(Renderer* /* renderer */,
const Vector3f& /* position */,
float /* discSize */,
double tdb) const
{
Vector3d v = getDirection(tdb);
if (v.norm() < 1.0e-12)
{
// Skip rendering of zero-length vectors
return;
}
v.normalize();
Quaterniond q;
q.setFromTwoVectors(Vector3d::UnitZ(), v);
if (opacity == 1.0f)
{
// Enable depth buffering
glEnable(GL_DEPTH_TEST);
glDepthMask(GL_TRUE);
glDisable(GL_BLEND);
}
else
{
glEnable(GL_DEPTH_TEST);
glDepthMask(GL_FALSE);
glEnable(GL_BLEND);
#ifdef USE_HDR
glBlendFunc(GL_ONE_MINUS_SRC_ALPHA, GL_SRC_ALPHA);
#else
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
#endif
}
glPushMatrix();
glRotate(q.cast<float>());
glScalef(size, size, size);
glDisable(GL_LIGHTING);
glDisable(GL_TEXTURE_2D);
float shaftLength = 0.85f;
float headLength = 0.10f;
float shaftRadius = 0.010f;
float headRadius = 0.025f;
unsigned int nSections = 30;
glColor4f(color.red(), color.green(), color.blue(), opacity);
RenderArrow(shaftLength, headLength, shaftRadius, headRadius, nSections);
glPopMatrix();
glDisable(GL_DEPTH_TEST);
glDepthMask(GL_FALSE);
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE);
}
示例3: match
void PointcloudProc::match(const Frame& frame0, const Frame& frame1,
const Eigen::Vector3d& trans, const Eigen::Quaterniond& rot,
std::vector<cv::DMatch>& matches) const
{
PointCloud<PointXYZRGBNormal> transformed_cloud;
// First, transform the current frame. (Is this inverse?) (Or just transform the other cloud?)
//transformPointCloudWithNormals<PointXYZRGBNormal>(frame1.cloud, transformed_cloud, -trans.cast<float>(), rot.cast<float>().conjugate());
//transformPointCloudWithNormals<PointXYZRGBNormal>(frame0.pointcloud, transformed_cloud, -trans.cast<float>(), rot.cast<float>().conjugate());
transformPointCloudWithNormals<PointXYZRGBNormal>(frame0.pointcloud, transformed_cloud, Vector3f(0,0,0), rot.cast<float>().conjugate());
transformPointCloudWithNormals<PointXYZRGBNormal>(transformed_cloud, transformed_cloud, -trans.cast<float>(), Quaternionf(1, 0, 0, 0));
//pcl::io::savePCDFileASCII ("cloud0.pcd", transformed_cloud);
//pcl::io::savePCDFileASCII ("cloud1.pcd", frame1.pointcloud);
// Optional/TODO: Perform ICP to further refine estimate.
/*PointCloud<PointXYZRGBNormal> cloud_reg;
IterativeClosestPointNonLinear<PointXYZRGBNormal, PointXYZRGBNormal> reg;
reg.setInputCloud (boost::make_shared<const PointCloud<PointXYZRGBNormal> > (transformed_cloud));
reg.setInputTarget (boost::make_shared<const PointCloud<PointXYZRGBNormal> > (frame1.pointcloud));
reg.setMaximumIterations(50);
reg.setTransformationEpsilon (1e-8);
reg.align(cloud_reg); */
// Find matches between pointclouds in frames. (TODO: also compare normals)
std::vector<int> f0_indices, f1_indices;
getMatchingIndices(transformed_cloud, frame1.pointcloud, f0_indices, f1_indices);
// Fill in keypoints and projections of relevant features.
// Currently just done when setting the pointcloud.
// Convert matches into the correct format.
matches.clear();
// Starting at i=1 as a hack to not let through (0,0,0) matches (why is this in the ptcloud?))
#pragma omp parallel for shared( transformed_cloud, f0_indices, f1_indices )
for (unsigned int i=1; i < f0_indices.size(); i++)
{
const PointXYZRGBNormal &pt0 = transformed_cloud.points[f0_indices[i]];
const PointXYZRGBNormal &pt1 = frame1.pointcloud.points[f1_indices[i]];
// Figure out distance and angle between normals
Quaterniond normalquat;
Vector3d norm0(pt0.normal[0], pt0.normal[1], pt0.normal[2]), norm1(pt1.normal[0], pt1.normal[1], pt1.normal[2]);
normalquat.setFromTwoVectors(norm0, norm1);
//double angledist = normalquat.angularDistance(normalquat.Identity());
double dist = (Vector3d(pt0.x, pt0.y, pt0.z)-Vector3d(pt1.x, pt1.y, pt1.z)).norm();
/* Vector4d p0_pt = Vector4d(pt0.x, pt0.y, pt0.z, 1.0);
Vector3d expected_proj = projectPoint(p0_pt, frame0.cam);
Vector3d diff = expected_proj - frame1.pl_kpts[f1_indices[i]].head<3>();
diff(2) = diff(2) - diff(0);
printf("[Proj difference] %f %f %f\n", diff(0), diff(1), diff(2)); */
if ((norm0 - norm1).norm() < 0.5 && dist < 0.2)
#pragma omp critical
matches.push_back(cv::DMatch(f0_indices[i], f1_indices[i], dist));
}
printf("[Frame] Found %d matches, then converted %d matches.\n", (int)f0_indices.size(), (int)matches.size());
}
示例4: setupSBA
//.........这里部分代码省略.........
double pointnoise = 1.0;
// Add points into the system, and add noise.
for (i = 0; i < points.size(); i++)
{
// Add up to .5 points of noise.
Vector4d temppoint = points[i];
temppoint.x() += pointnoise*(drand48() - 0.5);
temppoint.y() += pointnoise*(drand48() - 0.5);
temppoint.z() += pointnoise*(drand48() - 0.5);
sys.addPoint(temppoint);
}
Vector2d proj2d;
Vector3d proj, pc, baseline;
Vector3d imagenormal(0, 0, 1);
Matrix3d covar0;
covar0 << sqrt(imagenormal(0)), 0, 0, 0, sqrt(imagenormal(1)), 0, 0, 0, sqrt(imagenormal(2));
Matrix3d covar;
Quaterniond rotation;
Matrix3d rotmat;
unsigned int midindex = middleplane.points.size();
unsigned int leftindex = midindex + leftplane.points.size();
unsigned int rightindex = leftindex + rightplane.points.size();
printf("Normal for Middle Plane: [%f %f %f], index %d -> %d\n", middleplane.normal.x(), middleplane.normal.y(), middleplane.normal.z(), 0, midindex-1);
printf("Normal for Left Plane: [%f %f %f], index %d -> %d\n", leftplane.normal.x(), leftplane.normal.y(), leftplane.normal.z(), midindex, leftindex-1);
printf("Normal for Right Plane: [%f %f %f], index %d -> %d\n", rightplane.normal.x(), rightplane.normal.y(), rightplane.normal.z(), leftindex, rightindex-1);
// Project points into nodes.
for (i = 0; i < points.size(); i++)
{
for (j = 0; j < sys.nodes.size(); j++)
{
// Project the point into the node's image coordinate system.
sys.nodes[j].setProjection();
sys.nodes[j].project2im(proj2d, points[i]);
// Camera coords for right camera
baseline << sys.nodes[j].baseline, 0, 0;
pc = sys.nodes[j].Kcam * (sys.nodes[j].w2n*points[i] - baseline);
proj.head<2>() = proj2d;
proj(2) = pc(0)/pc(2);
// If valid (within the range of the image size), add the stereo
// projection to SBA.
if (proj.x() > 0 && proj.x() < maxx && proj.y() > 0 && proj.y() < maxy)
{
sys.addStereoProj(j, i, proj);
// Create the covariance matrix:
// image plane normal = [0 0 1]
// wall normal = [0 0 -1]
// covar = (R)T*[0 0 0;0 0 0;0 0 1]*R
rotation.setFromTwoVectors(imagenormal, normals[i]);
rotmat = rotation.toRotationMatrix();
covar = rotmat.transpose()*covar0*rotmat;
// if (!(i % sys.nodes.size() == j))
// sys.setProjCovariance(j, i, covar);
}
}
}
// Add noise to node position.
double transscale = 2.0;
double rotscale = 0.2;
// Don't actually add noise to the first node, since it's fixed.
for (i = 1; i < sys.nodes.size(); i++)
{
Vector4d temptrans = sys.nodes[i].trans;
Quaterniond tempqrot = sys.nodes[i].qrot;
// Add error to both translation and rotation.
temptrans.x() += transscale*(drand48() - 0.5);
temptrans.y() += transscale*(drand48() - 0.5);
temptrans.z() += transscale*(drand48() - 0.5);
tempqrot.x() += rotscale*(drand48() - 0.5);
tempqrot.y() += rotscale*(drand48() - 0.5);
tempqrot.z() += rotscale*(drand48() - 0.5);
tempqrot.normalize();
sys.nodes[i].trans = temptrans;
sys.nodes[i].qrot = tempqrot;
// These methods should be called to update the node.
sys.nodes[i].normRot();
sys.nodes[i].setTransform();
sys.nodes[i].setProjection();
sys.nodes[i].setDr(true);
}
}