本文整理汇总了C++中eigen::Vector3d::data方法的典型用法代码示例。如果您正苦于以下问题:C++ Vector3d::data方法的具体用法?C++ Vector3d::data怎么用?C++ Vector3d::data使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类eigen::Vector3d
的用法示例。
在下文中一共展示了Vector3d::data方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: updatePickLines
//--------------------------------------------------------------------------------------------------
void Camera::updatePickLines()
{
// Now draw lines from the pick points
double halfVerticalAngle = 0.5*Utilities::degToRad( mpVtkCamera->GetViewAngle() );
double verticalLength = tan( halfVerticalAngle );
double horizontalLength = verticalLength*(mImageWidth/mImageHeight);
Eigen::Vector3d cameraPos;
Eigen::Vector3d cameraAxisX;
Eigen::Vector3d cameraAxisY;
Eigen::Vector3d cameraAxisZ;
mpVtkCamera->GetPosition( cameraPos[ 0 ], cameraPos[ 1 ], cameraPos[ 2 ] );
mpVtkCamera->GetDirectionOfProjection( cameraAxisZ[ 0 ], cameraAxisZ[ 1 ], cameraAxisZ[ 2 ] );
mpVtkCamera->GetViewUp( cameraAxisY[ 0 ], cameraAxisY[ 1 ], cameraAxisY[ 2 ] );
cameraAxisX = cameraAxisY.cross( cameraAxisZ );
vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New();
vtkSmartPointer<vtkCellArray> lines = vtkSmartPointer<vtkCellArray>::New();
for ( uint32_t pickPointIdx = 0; pickPointIdx < mPickPoints.size(); pickPointIdx++ )
{
const Eigen::Vector2d& p = mPickPoints[ pickPointIdx ];
Eigen::Vector3d startPos = cameraPos;
Eigen::Vector3d rayDir = cameraAxisZ
- p[ 0 ]*horizontalLength*cameraAxisX
- p[ 1 ]*verticalLength*cameraAxisY;
rayDir.normalize();
Eigen::Vector3d endPos = startPos + 2.0*rayDir;
// Create the line
points->InsertNextPoint( startPos.data() );
points->InsertNextPoint( endPos.data() );
vtkSmartPointer<vtkLine> line = vtkSmartPointer<vtkLine>::New();
line->GetPointIds()->SetId( 0, 2*pickPointIdx );
line->GetPointIds()->SetId( 1, 2*pickPointIdx + 1 );
// Store the line
lines->InsertNextCell( line );
}
// Create a polydata to store everything in
vtkSmartPointer<vtkPolyData> linesPolyData = vtkSmartPointer<vtkPolyData>::New();
// Add the points and lines to the dataset
linesPolyData->SetPoints( points );
linesPolyData->SetLines( lines );
mpPickLinesMapper->SetInput( linesPolyData );
}
示例2: unproject
IGL_INLINE int igl::unproject(
const Eigen::PlainObjectBase<Derivedwin> & win,
Eigen::PlainObjectBase<Derivedobj> & obj)
{
Eigen::Vector3d dwin(win(0),win(1),win(2));
Eigen::Vector3d dobj;
int ret = unproject(dwin(0),dwin(1),dwin(2),
&dobj.data()[0],
&dobj.data()[1],
&dobj.data()[2]);
obj(0) = dobj(0);
obj(1) = dobj(1);
obj(2) = dobj(2);
return ret;
}
示例3: q
void
ExtendedHandEyeCalibration::refine(const std::vector<Eigen::Matrix4d, Eigen::aligned_allocator<Eigen::Matrix4d> >& H1,
const std::vector<Eigen::Matrix4d, Eigen::aligned_allocator<Eigen::Matrix4d> >& H2,
Eigen::Matrix4d& H_12,
double& s) const
{
Eigen::Quaterniond q(H_12.block<3,3>(0,0));
Eigen::Vector3d t = H_12.block<3,1>(0,3);
ceres::Problem problem;
for (size_t i = 0; i < H1.size(); i++)
{
ceres::CostFunction* costFunction =
new ceres::AutoDiffCostFunction<PoseError, 6, 4, 3, 1>(
new PoseError(H1.at(i), H2.at(i)));
problem.AddResidualBlock(costFunction, 0, q.coeffs().data(), t.data(), &s);
}
ceres::LocalParameterization* quaternionParameterization =
new EigenQuaternionParameterization;
problem.SetParameterization(q.coeffs().data(), quaternionParameterization);
ceres::Solver::Options options;
options.gradient_tolerance = 1e-12;
options.linear_solver_type = ceres::DENSE_QR;
options.max_num_iterations = 500;
ceres::Solver::Summary summary;
ceres::Solve(options, &problem, &summary);
H_12.block<3,3>(0,0) = q.toRotationMatrix();
H_12.block<3,1>(0,3) = t;
}
示例4: assert
IGL_INLINE int igl::opengl2::project(
const Eigen::PlainObjectBase<Derivedobj> & obj,
Eigen::PlainObjectBase<Derivedwin> & win)
{
assert(obj.size() >= 3);
Eigen::Vector3d dobj(obj(0),obj(1),obj(2));
Eigen::Vector3d dwin;
int ret = igl::opengl2::project(dobj(0),dobj(1),dobj(2),
&dwin.data()[0],
&dwin.data()[1],
&dwin.data()[2]);
win(0) = dwin(0);
win(1) = dwin(1);
win(2) = dwin(2);
return ret;
}
示例5: return
/** \brief compute the 3 eigen values and eigenvectors for a 3x3 covariance matrix
* \param covariance_matrix a 3x3 covariance matrix in eigen2::matrix3d format
* \param eigen_values the resulted eigenvalues in eigen2::vector3d
* \param eigen_vectors a 3x3 matrix in eigen2::matrix3d format, containing each eigenvector on a new line
*/
bool
eigen_cov (Eigen::Matrix3d covariance_matrix, Eigen::Vector3d &eigen_values, Eigen::Matrix3d &eigen_vectors)
{
char jobz = 'V'; // 'V': Compute eigenvalues and eigenvectors
char uplo = 'U'; // 'U': Upper triangle of A is stored
int n = 3, lda = 3, info = -1;
int lwork = 3 * n - 1;
double *work = new double[lwork];
for (int i = 0; i < 3; i++)
for (int j = 0; j < 3; j++)
eigen_vectors (i, j) = covariance_matrix (i, j);
dsyev_ (&jobz, &uplo, &n, eigen_vectors.data (), &lda, eigen_values.data (), work, &lwork, &info);
delete work;
return (info == 0);
}
示例6: return
Eigen::Vector3d zsw::AdVectorIntegrator::operator()(const double* pos) const
{
if(vfs_.size() < 3) {
Eigen::Vector3d vel;
vfs_[vfs_.size()-1]->val(pos, vel.data());
return vel*h_;
}
Eigen::Vector3d ori_pos, tmp_pos;
std::copy(pos, pos+3, ori_pos.data());
Eigen::Vector3d k1;
vfs_[vfs_.size()-3]->val(pos, k1.data());
Eigen::Vector3d k2;
tmp_pos = ori_pos + h_*k1;
vfs_[vfs_.size()-2]->val(tmp_pos.data(), k2.data());
Eigen::Vector3d k3;
tmp_pos = ori_pos +2*h_*(-k1+2*k2);
vfs_[vfs_.size()-1]->val(tmp_pos.data(), k3.data());
return (k1+4*k2+k3)/6*h_;
}
示例7:
Eigen::Vector3d Quaternion2Euler(float x, float y, float z, float w) {
KDL::Rotation kdl_rotation = KDL::Rotation::Quaternion(x, y, z, w);
Eigen::Vector3d result;
kdl_rotation.GetRPY(result.data()[2], result.data()[1], result.data()[0]);
return result;
}
示例8: EstimateConductionVelocity
void EstimateConductionVelocity(vtkDataSet* mesh, const char* isoch_array)
{
vtkDataArray* isoc = mesh->GetPointData()->GetArray(isoch_array);
vtkSmartPointer<vtkFloatArray> velocity = vtkSmartPointer<vtkFloatArray>::New();
velocity->SetName("Velocity");
velocity->SetNumberOfComponents(1);
velocity->SetNumberOfTuples(mesh->GetNumberOfCells());
for(vtkIdType cellid=0; cellid<mesh->GetNumberOfCells(); cellid++)
{
if( cellid % 100000 == 0 )
{
std::cout<<"Processing point "<<cellid<<"/"<<mesh->GetNumberOfCells()<<"\r"<<std::flush;
}
vtkCell *cell = mesh->GetCell(cellid);
Eigen::Vector3d direction;
//calculate the gradient within the cell
double pcenter[3];
int subId = cell->GetParametricCenter(pcenter);
//save the isochrone values in the same order as the vertices
Eigen::VectorXd iso_values(cell->GetNumberOfPoints());
for(int i=0; i<iso_values.size(); i++) iso_values[i] = isoc->GetTuple1(cell->GetPointId(i));
cell->Derivatives(0, pcenter, iso_values.data(), 1, direction.data());
direction.normalize();
//std::cout<<"Cell: "<<cellid<<std::endl;
//std::cout<<"Gradient: "<<direction<<std::endl;
//project all the vertices onto the gradient direction
//get cell center
Eigen::Vector3d center;
Eigen::VectorXd weights(cell->GetNumberOfPoints());
cell->EvaluateLocation(subId, pcenter, center.data(), weights.data());
//prepare storage for the projection coordinates.
//center of the cell will be the origin
Eigen::VectorXd vertex_projections( cell->GetNumberOfPoints() );
//find the longest projection of the direction on the edges
for(int ptid = 0; ptid < cell->GetNumberOfPoints(); ptid++)
{
Eigen::Vector3d vertex;
mesh->GetPoint(cell->GetPointId(ptid), vertex.data());
Eigen::Vector3d vertex_vector = vertex-center;
vertex_projections[ptid] = vertex_vector.dot(direction);
}
//find the projections with the smallest and largest coordinate
int max_vertex_id = 0;
int min_vertex_id = 1;
double max_time = iso_values[max_vertex_id];
double min_time = iso_values[min_vertex_id];
//std::cout<<"Projection Coordinates: "<<vertex_projections<<std::endl;
//std::cout<<"Time values: "<<iso_values<<std::endl;
for(int i=0; i<iso_values.size(); i++)
{
if( max_time<iso_values[i] )
{
max_time = iso_values[i];
max_vertex_id = i;
}
if( min_time>iso_values[i] )
{
min_time = iso_values[i];
min_vertex_id = i;
}
}
//std::cout<<"Min id: "<<min_vertex_id<<", max id: "<<max_vertex_id<<std::endl;
//std::cout<<"Min time:"<<min_time<<", max time: "<<max_time<<std::endl;
double v=0;
double max_time_diff = max_time-min_time;
double coordinate_diff = fabs(vertex_projections[max_vertex_id] - vertex_projections[min_vertex_id]);
if(max_time_diff>0)
{
v = coordinate_diff/max_time_diff;
//std::cout<<"Velocity:"<<v<<std::endl;
}
velocity->SetTuple1(cellid, v);
}
std::cout<<std::endl;
mesh->GetCellData()->AddArray(velocity);
//.........这里部分代码省略.........
示例9: setPosition
void Light::setPosition(const Eigen::Vector3d& position)
{
setPosition(position.data());
}
示例10: mexFunction
void mexFunction(int nlhs, mxArray* plhs[], int nrhs, const mxArray* prhs[]) {
// DEBUG
// mexPrintf("constructModelmex: START\n");
// END_DEBUG
mxArray* pm;
if (nrhs != 1) {
mexErrMsgIdAndTxt("Drake:constructModelmex:BadInputs",
"Usage model_ptr = constructModelmex(obj)");
}
if (isa(prhs[0], "DrakeMexPointer")) { // then it's calling the destructor
destroyDrakeMexPointer<RigidBodyTree*>(prhs[0]);
return;
}
const mxArray* pRBM = prhs[0];
RigidBodyTree* model = new RigidBodyTree();
model->bodies.clear(); // a little gross: the default constructor makes a
// body "world". zap it because we will construct one
// again below
// model->robot_name = get_strings(mxGetPropertySafe(pRBM, 0,"name"));
const mxArray* pBodies = mxGetPropertySafe(pRBM, 0, "body");
if (!pBodies)
mexErrMsgIdAndTxt("Drake:constructModelmex:BadInputs",
"the body array is invalid");
int num_bodies = static_cast<int>(mxGetNumberOfElements(pBodies));
const mxArray* pFrames = mxGetPropertySafe(pRBM, 0, "frame");
if (!pFrames)
mexErrMsgIdAndTxt("Drake:constructModelmex:BadInputs",
"the frame array is invalid");
int num_frames = static_cast<int>(mxGetNumberOfElements(pFrames));
for (int i = 0; i < num_bodies; i++) {
// DEBUG
// mexPrintf("constructModelmex: body %d\n", i);
// END_DEBUG
std::unique_ptr<RigidBody> b(new RigidBody());
b->set_body_index(i);
b->set_name(mxGetStdString(mxGetPropertySafe(pBodies, i, "linkname")));
pm = mxGetPropertySafe(pBodies, i, "robotnum");
b->set_model_instance_id((int)mxGetScalar(pm) - 1);
pm = mxGetPropertySafe(pBodies, i, "mass");
b->set_mass(mxGetScalar(pm));
pm = mxGetPropertySafe(pBodies, i, "com");
Eigen::Vector3d com;
if (!mxIsEmpty(pm)) {
memcpy(com.data(), mxGetPrSafe(pm), sizeof(double) * 3);
b->set_center_of_mass(com);
}
pm = mxGetPropertySafe(pBodies, i, "I");
if (!mxIsEmpty(pm)) {
drake::SquareTwistMatrix<double> I;
memcpy(I.data(), mxGetPrSafe(pm), sizeof(double) * 6 * 6);
b->set_spatial_inertia(I);
}
pm = mxGetPropertySafe(pBodies, i, "position_num");
b->set_position_start_index((int)mxGetScalar(pm) - 1); // zero-indexed
pm = mxGetPropertySafe(pBodies, i, "velocity_num");
b->set_velocity_start_index((int)mxGetScalar(pm) - 1); // zero-indexed
pm = mxGetPropertySafe(pBodies, i, "parent");
if (!pm || mxIsEmpty(pm)) {
b->set_parent(nullptr);
} else {
int parent_ind = static_cast<int>(mxGetScalar(pm)) - 1;
if (parent_ind >= static_cast<int>(model->bodies.size()))
mexErrMsgIdAndTxt("Drake:constructModelmex:BadInputs",
"bad body.parent %d (only have %d bodies)",
parent_ind, model->bodies.size());
if (parent_ind >= 0) b->set_parent(model->bodies[parent_ind].get());
}
if (b->has_parent_body()) {
string joint_name =
mxGetStdString(mxGetPropertySafe(pBodies, i, "jointname"));
// mexPrintf("adding joint %s\n", joint_name.c_str());
pm = mxGetPropertySafe(pBodies, i, "Ttree");
// todo: check that the size is 4x4
Isometry3d transform_to_parent_body;
memcpy(transform_to_parent_body.data(), mxGetPrSafe(pm),
sizeof(double) * 4 * 4);
int floating =
(int)mxGetScalar(mxGetPropertySafe(pBodies, i, "floating"));
Eigen::Vector3d joint_axis;
pm = mxGetPropertySafe(pBodies, i, "joint_axis");
memcpy(joint_axis.data(), mxGetPrSafe(pm), sizeof(double) * 3);
//.........这里部分代码省略.........
示例11: setChannels
bool setChannels(const std::string& iInputChannel,
const std::string& iOutputChannel) {
if (mSubscription != NULL) {
mLcm->unsubscribe(mSubscription);
}
mInputChannel = iInputChannel;
mOutputChannel = iOutputChannel;
BotCamTrans* inputCamTrans =
bot_param_get_new_camtrans(mBotWrapper->getBotParam(),
mInputChannel.c_str());
if (inputCamTrans == NULL) {
std::cout << "error: cannot find camera " << mInputChannel << std::endl;
return false;
}
BotCamTrans* outputCamTrans =
bot_param_get_new_camtrans(mBotWrapper->getBotParam(),
mOutputChannel.c_str());
if (outputCamTrans == NULL) {
std::cout << "error: cannot find camera " << mOutputChannel << std::endl;
return false;
}
int inputWidth = bot_camtrans_get_width(inputCamTrans);
int inputHeight = bot_camtrans_get_height(inputCamTrans);
mOutputWidth = bot_camtrans_get_width(outputCamTrans);
mOutputHeight = bot_camtrans_get_height(outputCamTrans);
// precompute warp field
mWarpFieldIntX.resize(mOutputWidth*mOutputHeight);
mWarpFieldIntY.resize(mWarpFieldIntX.size());
mWarpFieldFrac00.resize(mWarpFieldIntX.size());
mWarpFieldFrac01.resize(mWarpFieldIntX.size());
mWarpFieldFrac10.resize(mWarpFieldIntX.size());
mWarpFieldFrac11.resize(mWarpFieldIntX.size());
Eigen::Isometry3d outputToInput;
if (!mBotWrapper->getTransform(mOutputChannel, mInputChannel,
outputToInput)) {
std::cout << "error: cannot get transform from " << mOutputChannel <<
" to " << mInputChannel << std::endl;
return false;
}
Eigen::Matrix3d rotation = outputToInput.rotation();
Eigen::Vector3d ray;
for (int i = 0, pos = 0; i < mOutputHeight; ++i) {
for (int j = 0; j < mOutputWidth; ++j, ++pos) {
mWarpFieldIntX[pos] = -1;
if (0 != bot_camtrans_unproject_pixel(outputCamTrans, j, i,
ray.data())) {
continue;
}
ray = rotation*ray;
double pix[3];
if (0 != bot_camtrans_project_point(inputCamTrans, ray.data(), pix)) {
continue;
}
if ((pix[2] < 0) ||
(pix[0] < 0) || (pix[0] >= inputWidth-1) ||
(pix[1] < 0) || (pix[1] >= inputHeight-1)) {
continue;
}
mWarpFieldIntX[pos] = (int)pix[0];
mWarpFieldIntY[pos] = (int)pix[1];
double fracX = pix[0] - mWarpFieldIntX[pos];
double fracY = pix[1] - mWarpFieldIntY[pos];
mWarpFieldFrac00[pos] = (1-fracX)*(1-fracY);
mWarpFieldFrac01[pos] = (1-fracX)*fracY;
mWarpFieldFrac10[pos] = fracX*(1-fracY);
mWarpFieldFrac11[pos] = fracX*fracY;
}
}
mLcm->subscribe(mInputChannel, &ImageWarper::onImage, this);
return true;
}