本文整理汇总了C++中NonlinearFactorGraph::linearize方法的典型用法代码示例。如果您正苦于以下问题:C++ NonlinearFactorGraph::linearize方法的具体用法?C++ NonlinearFactorGraph::linearize怎么用?C++ NonlinearFactorGraph::linearize使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类NonlinearFactorGraph
的用法示例。
在下文中一共展示了NonlinearFactorGraph::linearize方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: update
/* ************************************************************************* */
void NonlinearISAM::update(const NonlinearFactorGraph& newFactors,
const Values& initialValues) {
if(newFactors.size() > 0) {
// Reorder and relinearize every reorderInterval updates
if(reorderInterval_ > 0 && ++reorderCounter_ >= reorderInterval_) {
reorder_relinearize();
reorderCounter_ = 0;
}
factors_.push_back(newFactors);
// Linearize new factors and insert them
// TODO: optimize for whole config?
linPoint_.insert(initialValues);
// Augment ordering
// TODO: allow for ordering constraints within the new variables
// FIXME: should just loop over new values
BOOST_FOREACH(const NonlinearFactorGraph::sharedFactor& factor, newFactors)
BOOST_FOREACH(Key key, factor->keys())
ordering_.tryInsert(key, ordering_.nVars()); // will do nothing if already present
boost::shared_ptr<GaussianFactorGraph> linearizedNewFactors = newFactors.linearize(linPoint_, ordering_);
// Update ISAM
isam_.update(*linearizedNewFactors);
}
}
示例2: newConfig
/* ************************************************************************* */
TEST(DoglegOptimizer, Iterate) {
// really non-linear factor graph
NonlinearFactorGraph fg = example::createReallyNonlinearFactorGraph();
// config far from minimum
Point2 x0(3,0);
Values config;
config.insert(X(1), x0);
double Delta = 1.0;
for(size_t it=0; it<10; ++it) {
GaussianBayesNet gbn = *fg.linearize(config)->eliminateSequential();
// Iterate assumes that linear error = nonlinear error at the linearization point, and this should be true
double nonlinearError = fg.error(config);
double linearError = GaussianFactorGraph(gbn).error(config.zeroVectors());
DOUBLES_EQUAL(nonlinearError, linearError, 1e-5);
// cout << "it " << it << ", Delta = " << Delta << ", error = " << fg->error(*config) << endl;
VectorValues dx_u = gbn.optimizeGradientSearch();
VectorValues dx_n = gbn.optimize();
DoglegOptimizerImpl::IterationResult result = DoglegOptimizerImpl::Iterate(Delta, DoglegOptimizerImpl::SEARCH_EACH_ITERATION, dx_u, dx_n, gbn, fg, config, fg.error(config));
Delta = result.Delta;
EXPECT(result.f_error < fg.error(config)); // Check that error decreases
Values newConfig(config.retract(result.dx_d));
config = newConfig;
DOUBLES_EQUAL(fg.error(config), result.f_error, 1e-5); // Check that error is correctly filled in
}
}
示例3: check_smoother
/* ************************************************************************* */
bool check_smoother(const NonlinearFactorGraph& fullgraph, const Values& fullinit, const IncrementalFixedLagSmoother& smoother, const Key& key) {
GaussianFactorGraph linearized = *fullgraph.linearize(fullinit);
VectorValues delta = linearized.optimize();
Values fullfinal = fullinit.retract(delta);
Point2 expected = fullfinal.at<Point2>(key);
Point2 actual = smoother.calculateEstimate<Point2>(key);
return assert_equal(expected, actual);
}
示例4: SmartFactor
/* *************************************************************************/
TEST( SmartProjectionCameraFactor, comparisonGeneralSfMFactor1 ) {
using namespace bundler;
Values values;
values.insert(c1, level_camera);
values.insert(c2, level_camera_right);
NonlinearFactorGraph smartGraph;
SmartFactor::shared_ptr factor1(new SmartFactor(unit2));
factor1->add(level_uv, c1);
factor1->add(level_uv_right, c2);
smartGraph.push_back(factor1);
Matrix expectedHessian = smartGraph.linearize(values)->hessian().first;
Vector expectedInfoVector = smartGraph.linearize(values)->hessian().second;
Point3 expectedPoint;
if (factor1->point())
expectedPoint = *(factor1->point());
// COMMENTS:
// 1) triangulation introduces small errors, then for a fair comparison we use expectedPoint as
// value in the generalGrap
NonlinearFactorGraph generalGraph;
SFMFactor sfm1(level_uv, unit2, c1, l1);
SFMFactor sfm2(level_uv_right, unit2, c2, l1);
generalGraph.push_back(sfm1);
generalGraph.push_back(sfm2);
values.insert(l1, expectedPoint); // note: we get rid of possible errors in the triangulation
Matrix actualFullHessian = generalGraph.linearize(values)->hessian().first;
Matrix actualFullInfoVector = generalGraph.linearize(values)->hessian().second;
Matrix actualHessian = actualFullHessian.block(0, 0, 18, 18)
- actualFullHessian.block(0, 18, 18, 3)
* (actualFullHessian.block(18, 18, 3, 3)).inverse()
* actualFullHessian.block(18, 0, 3, 18);
Vector actualInfoVector = actualFullInfoVector.block(0, 0, 18, 1)
- actualFullHessian.block(0, 18, 18, 3)
* (actualFullHessian.block(18, 18, 3, 3)).inverse()
* actualFullInfoVector.block(18, 0, 3, 1);
EXPECT(assert_equal(expectedHessian, actualHessian, 1e-7));
EXPECT(assert_equal(expectedInfoVector, actualInfoVector, 1e-7));
}
示例5: update
/* ************************************************************************* */
void NonlinearISAM::update(const NonlinearFactorGraph& newFactors, const Values& initialValues) {
if(newFactors.size() > 0) {
// Reorder and relinearize every reorderInterval updates
if(reorderInterval_ > 0 && ++reorderCounter_ >= reorderInterval_) {
reorder_relinearize();
reorderCounter_ = 0;
}
factors_.push_back(newFactors);
// Linearize new factors and insert them
// TODO: optimize for whole config?
linPoint_.insert(initialValues);
boost::shared_ptr<GaussianFactorGraph> linearizedNewFactors = newFactors.linearize(linPoint_);
// Update ISAM
isam_.update(*linearizedNewFactors, eliminationFunction_);
}
}
示例6: poseModel
/* ************************************************************************* */
TEST( inference, marginals2)
{
NonlinearFactorGraph fg;
SharedDiagonal poseModel(noiseModel::Isotropic::Sigma(3, 0.1));
SharedDiagonal pointModel(noiseModel::Isotropic::Sigma(2, 0.1));
fg.add(PriorFactor<Pose2>(X(0), Pose2(), poseModel));
fg.add(BetweenFactor<Pose2>(X(0), X(1), Pose2(1.0,0.0,0.0), poseModel));
fg.add(BetweenFactor<Pose2>(X(1), X(2), Pose2(1.0,0.0,0.0), poseModel));
fg.add(BearingRangeFactor<Pose2, Point2>(X(0), L(0), Rot2(), 1.0, pointModel));
fg.add(BearingRangeFactor<Pose2, Point2>(X(1), L(0), Rot2(), 1.0, pointModel));
fg.add(BearingRangeFactor<Pose2, Point2>(X(2), L(0), Rot2(), 1.0, pointModel));
Values init;
init.insert(X(0), Pose2(0.0,0.0,0.0));
init.insert(X(1), Pose2(1.0,0.0,0.0));
init.insert(X(2), Pose2(2.0,0.0,0.0));
init.insert(L(0), Point2(1.0,1.0));
Ordering ordering(*fg.orderingCOLAMD(init));
FactorGraph<GaussianFactor>::shared_ptr gfg(fg.linearize(init, ordering));
GaussianMultifrontalSolver solver(*gfg);
solver.marginalFactor(ordering[L(0)]);
}
示例7: etree
/* ************************************************************************* *
Bayes tree for smoother with "nested dissection" ordering:
C1 x5 x6 x4
C2 x3 x2 : x4
C3 x1 : x2
C4 x7 : x6
*/
TEST( GaussianJunctionTreeB, constructor2 ) {
// create a graph
NonlinearFactorGraph nlfg;
Values values;
boost::tie(nlfg, values) = createNonlinearSmoother(7);
SymbolicFactorGraph::shared_ptr symbolic = nlfg.symbolic();
// linearize
GaussianFactorGraph::shared_ptr fg = nlfg.linearize(values);
Ordering ordering;
ordering += X(1), X(3), X(5), X(7), X(2), X(6), X(4);
// create an ordering
GaussianEliminationTree etree(*fg, ordering);
SymbolicEliminationTree stree(*symbolic, ordering);
GaussianJunctionTree actual(etree);
Ordering o324;
o324 += X(3), X(2), X(4);
Ordering o56;
o56 += X(5), X(6);
Ordering o7;
o7 += X(7);
Ordering o1;
o1 += X(1);
// Can no longer test these:
// Ordering sep1;
// Ordering sep2; sep2 += X(4);
// Ordering sep3; sep3 += X(6);
// Ordering sep4; sep4 += X(2);
GaussianJunctionTree::sharedNode x324 = actual.roots().front();
LONGS_EQUAL(2, x324->children.size());
GaussianJunctionTree::sharedNode x1 = x324->children.front();
GaussianJunctionTree::sharedNode x56 = x324->children.back();
if (x1->children.size() > 0)
x1.swap(x56); // makes it work with different tie-breakers
LONGS_EQUAL(0, x1->children.size());
LONGS_EQUAL(1, x56->children.size());
GaussianJunctionTree::sharedNode x7 = x56->children[0];
LONGS_EQUAL(0, x7->children.size());
EXPECT(assert_equal(o324, x324->orderedFrontalKeys));
EXPECT_LONGS_EQUAL(5, x324->factors.size());
EXPECT_LONGS_EQUAL(9, x324->problemSize_);
EXPECT(assert_equal(o56, x56->orderedFrontalKeys));
EXPECT_LONGS_EQUAL(4, x56->factors.size());
EXPECT_LONGS_EQUAL(9, x56->problemSize_);
EXPECT(assert_equal(o7, x7->orderedFrontalKeys));
EXPECT_LONGS_EQUAL(2, x7->factors.size());
EXPECT_LONGS_EQUAL(4, x7->problemSize_);
EXPECT(assert_equal(o1, x1->orderedFrontalKeys));
EXPECT_LONGS_EQUAL(2, x1->factors.size());
EXPECT_LONGS_EQUAL(4, x1->problemSize_);
}