本文整理汇总了C++中GaussianFactorGraph类的典型用法代码示例。如果您正苦于以下问题:C++ GaussianFactorGraph类的具体用法?C++ GaussianFactorGraph怎么用?C++ GaussianFactorGraph使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了GaussianFactorGraph类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: TEST
/* ************************************************************************* */
TEST (Serialization, gaussian_factor_graph) {
GaussianFactorGraph graph;
{
Matrix A1 = (Matrix(2, 2) << 1., 2., 3., 4.);
Matrix A2 = (Matrix(2, 2) << 6., 0.2, 8., 0.4);
Matrix R = (Matrix(2, 2) << 0.1, 0.3, 0.0, 0.34);
Vector d(2); d << 0.2, 0.5;
GaussianConditional cg(0, d, R, 1, A1, 2, A2);
graph.push_back(cg);
}
{
Key i1 = 4, i2 = 7;
Matrix A1 = eye(3), A2 = -1.0 * eye(3);
Vector b = ones(3);
SharedDiagonal model = noiseModel::Diagonal::Sigmas((Vector(3) << 1.0, 2.0, 3.0));
JacobianFactor jacobianfactor(i1, A1, i2, A2, b, model);
HessianFactor hessianfactor(jacobianfactor);
graph.push_back(jacobianfactor);
graph.push_back(hessianfactor);
}
EXPECT(equalsObj(graph));
EXPECT(equalsXML(graph));
EXPECT(equalsBinary(graph));
}
示例2: TEST
/* ************************************************************************* */
TEST(GaussianFactorGraph, multiplyHessianAdd2) {
GaussianFactorGraph gfg = createGaussianFactorGraphWithHessianFactor();
// brute force
Matrix AtA;
Vector eta;
boost::tie(AtA, eta) = gfg.hessian();
Vector X(6);
X << 1, 2, 3, 4, 5, 6;
Vector Y(6);
Y << -450, -450, 300, 400, 2950, 3450;
EXPECT(assert_equal(Y, AtA * X));
VectorValues x = map_list_of<Key, Vector>(0, Vector2(1, 2))(1, Vector2(3, 4))(2, Vector2(5, 6));
VectorValues expected;
expected.insert(0, Vector2(-450, -450));
expected.insert(1, Vector2(300, 400));
expected.insert(2, Vector2(2950, 3450));
VectorValues actual;
gfg.multiplyHessianAdd(1.0, x, actual);
EXPECT(assert_equal(expected, actual));
// now, do it with non-zero y
gfg.multiplyHessianAdd(1.0, x, actual);
EXPECT(assert_equal(2 * expected, actual));
}
示例3: TEST
/* ************************************************************************* */
TEST( ISAM, iSAM_smoother )
{
Ordering ordering;
for (int t = 1; t <= 7; t++) ordering += X(t);
// Create smoother with 7 nodes
GaussianFactorGraph smoother = createSmoother(7);
// run iSAM for every factor
GaussianISAM actual;
for(boost::shared_ptr<GaussianFactor> factor: smoother) {
GaussianFactorGraph factorGraph;
factorGraph.push_back(factor);
actual.update(factorGraph);
}
// Create expected Bayes Tree by solving smoother with "natural" ordering
GaussianBayesTree expected = *smoother.eliminateMultifrontal(ordering);
// Verify sigmas in the bayes tree
for(const GaussianBayesTree::sharedClique& clique: expected.nodes() | br::map_values) {
GaussianConditional::shared_ptr conditional = clique->conditional();
EXPECT(!conditional->get_model());
}
// Check whether BayesTree is correct
EXPECT(assert_equal(GaussianFactorGraph(expected).augmentedHessian(), GaussianFactorGraph(actual).augmentedHessian()));
// obtain solution
VectorValues e; // expected solution
for (int t = 1; t <= 7; t++) e.insert(X(t), Vector::Zero(2));
VectorValues optimized = actual.optimize(); // actual solution
EXPECT(assert_equal(e, optimized));
}
示例4: TEST
/* ************************************************************************* */
TEST( GaussianBayesTree, balanced_smoother_shortcuts )
{
// Create smoother with 7 nodes
GaussianFactorGraph smoother = createSmoother(7);
// Create the Bayes tree
Ordering ordering;
ordering += X(1),X(3),X(5),X(7),X(2),X(6),X(4);
GaussianBayesTree bayesTree = *smoother.eliminateMultifrontal(ordering);
// Check the conditional P(Root|Root)
GaussianBayesNet empty;
GaussianBayesTree::sharedClique R = bayesTree.roots().front();
GaussianBayesNet actual1 = R->shortcut(R);
EXPECT(assert_equal(empty,actual1,tol));
// Check the conditional P(C2|Root)
GaussianBayesTree::sharedClique C2 = bayesTree[X(3)];
GaussianBayesNet actual2 = C2->shortcut(R);
EXPECT(assert_equal(empty,actual2,tol));
// Check the conditional P(C3|Root), which should be equal to P(x2|x4)
/** TODO: Note for multifrontal conditional:
* p_x2_x4 is now an element conditional of the multifrontal conditional bayesTree[ordering[X(2)]]->conditional()
* We don't know yet how to take it out.
*/
// GaussianConditional::shared_ptr p_x2_x4 = bayesTree[ordering[X(2)]]->conditional();
// p_x2_x4->print("Conditional p_x2_x4: ");
// GaussianBayesNet expected3(p_x2_x4);
// GaussianISAM::sharedClique C3 = isamTree[ordering[X(1)]];
// GaussianBayesNet actual3 = GaussianISAM::shortcut(C3,R);
// EXPECT(assert_equal(expected3,actual3,tol));
}
示例5: main
/* ************************************************************************* */
int main() {
gtsam::Key PoseKey1(11);
gtsam::Key PoseKey2(12);
gtsam::Key VelKey1(21);
gtsam::Key VelKey2(22);
gtsam::Key BiasKey1(31);
double measurement_dt(0.1);
Vector world_g(Vector3(0.0, 0.0, 9.81));
Vector world_rho(Vector3(0.0, -1.5724e-05, 0.0)); // NED system
gtsam::Vector ECEF_omega_earth(Vector3(0.0, 0.0, 7.292115e-5));
gtsam::Vector world_omega_earth(world_R_ECEF.matrix() * ECEF_omega_earth);
SharedGaussian model(noiseModel::Isotropic::Sigma(9, 0.1));
// Second test: zero angular motion, some acceleration - generated in matlab
Vector measurement_acc(Vector3(6.501390843381716, -6.763926150509185, -2.300389940090343));
Vector measurement_gyro(Vector3(0.1, 0.2, 0.3));
InertialNavFactor_GlobalVelocity<Pose3, Vector3, imuBias::ConstantBias> f(PoseKey1, VelKey1, BiasKey1, PoseKey2, VelKey2, measurement_acc, measurement_gyro, measurement_dt, world_g, world_rho, world_omega_earth, model);
Rot3 R1(0.487316618, 0.125253866, 0.86419557,
0.580273724, 0.693095498, -0.427669306,
-0.652537293, 0.709880342, 0.265075427);
Point3 t1(2.0,1.0,3.0);
Pose3 Pose1(R1, t1);
Vector3 Vel1 = Vector(Vector3(0.5,-0.5,0.4));
Rot3 R2(0.473618898, 0.119523052, 0.872582019,
0.609241153, 0.67099888, -0.422594037,
-0.636011287, 0.731761397, 0.244979388);
Point3 t2 = t1.compose( Point3(Vel1*measurement_dt) );
Pose3 Pose2(R2, t2);
Vector dv = measurement_dt * (R1.matrix() * measurement_acc + world_g);
Vector3 Vel2 = Vel1 + dv;
imuBias::ConstantBias Bias1;
Values values;
values.insert(PoseKey1, Pose1);
values.insert(PoseKey2, Pose2);
values.insert(VelKey1, Vel1);
values.insert(VelKey2, Vel2);
values.insert(BiasKey1, Bias1);
Ordering ordering;
ordering.push_back(PoseKey1);
ordering.push_back(VelKey1);
ordering.push_back(BiasKey1);
ordering.push_back(PoseKey2);
ordering.push_back(VelKey2);
GaussianFactorGraph graph;
gttic_(LinearizeTiming);
for(size_t i = 0; i < 100000; ++i) {
GaussianFactor::shared_ptr g = f.linearize(values);
graph.push_back(g);
}
gttoc_(LinearizeTiming);
tictoc_finishedIteration_();
tictoc_print_();
}
示例6: TEST
/* ************************************************************************* */
TEST(HessianFactor, CombineAndEliminate2) {
Matrix A01 = I_3x3;
Vector3 b0(1.5, 1.5, 1.5);
Vector3 s0(1.6, 1.6, 1.6);
Matrix A10 = 2.0 * I_3x3;
Matrix A11 = -2.0 * I_3x3;
Vector3 b1(2.5, 2.5, 2.5);
Vector3 s1(2.6, 2.6, 2.6);
Matrix A21 = 3.0 * I_3x3;
Vector3 b2(3.5, 3.5, 3.5);
Vector3 s2(3.6, 3.6, 3.6);
GaussianFactorGraph gfg;
gfg.add(1, A01, b0, noiseModel::Diagonal::Sigmas(s0, true));
gfg.add(0, A10, 1, A11, b1, noiseModel::Diagonal::Sigmas(s1, true));
gfg.add(1, A21, b2, noiseModel::Diagonal::Sigmas(s2, true));
Matrix93 A0, A1;
A0 << A10, Z_3x3, Z_3x3;
A1 << A11, A01, A21;
Vector9 b, sigmas;
b << b1, b0, b2;
sigmas << s1, s0, s2;
// create a full, uneliminated version of the factor
JacobianFactor jacobian(0, A0, 1, A1, b,
noiseModel::Diagonal::Sigmas(sigmas, true));
// Make sure combining works
HessianFactor hessian(gfg);
EXPECT(assert_equal(HessianFactor(jacobian), hessian, 1e-6));
EXPECT(
assert_equal(jacobian.augmentedInformation(),
hessian.augmentedInformation(), 1e-9));
// perform elimination on jacobian
Ordering ordering = list_of(0);
GaussianConditional::shared_ptr expectedConditional;
JacobianFactor::shared_ptr expectedFactor;
boost::tie(expectedConditional, expectedFactor) = //
jacobian.eliminate(ordering);
// Eliminate
GaussianConditional::shared_ptr actualConditional;
HessianFactor::shared_ptr actualHessian;
boost::tie(actualConditional, actualHessian) = //
EliminateCholesky(gfg, ordering);
EXPECT(assert_equal(*expectedConditional, *actualConditional, 1e-6));
VectorValues v;
v.insert(1, Vector3(1, 2, 3));
EXPECT_DOUBLES_EQUAL(expectedFactor->error(v), actualHessian->error(v), 1e-9);
EXPECT(
assert_equal(expectedFactor->augmentedInformation(),
actualHessian->augmentedInformation(), 1e-9));
EXPECT(assert_equal(HessianFactor(*expectedFactor), *actualHessian, 1e-6));
}
示例7: init
/* ************************************************************************* */
KalmanFilter::State KalmanFilter::init(const Vector& x0,
const SharedDiagonal& P0) {
// Create a factor graph f(x0), eliminate it into P(x0)
GaussianFactorGraph factorGraph;
factorGraph.add(0, I_, x0, P0); // |x-x0|^2_diagSigma
return solve(factorGraph, useQR());
}
示例8: timePlanarSmootherEliminate
// Create a planar factor graph and eliminate
double timePlanarSmootherEliminate(int N, bool old = true) {
GaussianFactorGraph fg = planarGraph(N).get<0>();
clock_t start = clock();
fg.eliminateMultifrontal();
clock_t end = clock ();
double dif = (double)(end - start) / CLOCKS_PER_SEC;
return dif;
}
示例9: timeKalmanSmoother
// Create a Kalman smoother for t=1:T and optimize
double timeKalmanSmoother(int T) {
GaussianFactorGraph smoother = createSmoother(T);
clock_t start = clock();
// Keys will come out sorted since keys() returns a set
smoother.optimize(Ordering(smoother.keys()));
clock_t end = clock ();
double dif = (double)(end - start) / CLOCKS_PER_SEC;
return dif;
}
示例10: clone
/* ************************************************************************* */
GaussianFactorGraph GaussianFactorGraph::clone() const {
GaussianFactorGraph result;
BOOST_FOREACH(const sharedFactor& f, *this) {
if (f)
result.push_back(f->clone());
else
result.push_back(sharedFactor()); // Passes on null factors so indices remain valid
}
return result;
}
示例11: check_smoother
/* ************************************************************************* */
bool check_smoother(const NonlinearFactorGraph& fullgraph, const Values& fullinit, const IncrementalFixedLagSmoother& smoother, const Key& key) {
GaussianFactorGraph linearized = *fullgraph.linearize(fullinit);
VectorValues delta = linearized.optimize();
Values fullfinal = fullinit.retract(delta);
Point2 expected = fullfinal.at<Point2>(key);
Point2 actual = smoother.calculateEstimate<Point2>(key);
return assert_equal(expected, actual);
}
示例12: TEST
/* ************************************************************************* */
TEST(HessianFactor, CombineAndEliminate)
{
Matrix A01 = (Matrix(3,3) <<
1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, 1.0);
Vector b0 = (Vector(3) << 1.5, 1.5, 1.5);
Vector s0 = (Vector(3) << 1.6, 1.6, 1.6);
Matrix A10 = (Matrix(3,3) <<
2.0, 0.0, 0.0,
0.0, 2.0, 0.0,
0.0, 0.0, 2.0);
Matrix A11 = (Matrix(3,3) <<
-2.0, 0.0, 0.0,
0.0, -2.0, 0.0,
0.0, 0.0, -2.0);
Vector b1 = (Vector(3) << 2.5, 2.5, 2.5);
Vector s1 = (Vector(3) << 2.6, 2.6, 2.6);
Matrix A21 = (Matrix(3,3) <<
3.0, 0.0, 0.0,
0.0, 3.0, 0.0,
0.0, 0.0, 3.0);
Vector b2 = (Vector(3) << 3.5, 3.5, 3.5);
Vector s2 = (Vector(3) << 3.6, 3.6, 3.6);
GaussianFactorGraph gfg;
gfg.add(1, A01, b0, noiseModel::Diagonal::Sigmas(s0, true));
gfg.add(0, A10, 1, A11, b1, noiseModel::Diagonal::Sigmas(s1, true));
gfg.add(1, A21, b2, noiseModel::Diagonal::Sigmas(s2, true));
Matrix zero3x3 = zeros(3,3);
Matrix A0 = gtsam::stack(3, &A10, &zero3x3, &zero3x3);
Matrix A1 = gtsam::stack(3, &A11, &A01, &A21);
Vector b = gtsam::concatVectors(3, &b1, &b0, &b2);
Vector sigmas = gtsam::concatVectors(3, &s1, &s0, &s2);
// create a full, uneliminated version of the factor
JacobianFactor expectedFactor(0, A0, 1, A1, b, noiseModel::Diagonal::Sigmas(sigmas, true));
// perform elimination on jacobian
GaussianConditional::shared_ptr expectedConditional;
JacobianFactor::shared_ptr expectedRemainingFactor;
boost::tie(expectedConditional, expectedRemainingFactor) = expectedFactor.eliminate(Ordering(list_of(0)));
// Eliminate
GaussianConditional::shared_ptr actualConditional;
HessianFactor::shared_ptr actualCholeskyFactor;
boost::tie(actualConditional, actualCholeskyFactor) = EliminateCholesky(gfg, Ordering(list_of(0)));
EXPECT(assert_equal(*expectedConditional, *actualConditional, 1e-6));
EXPECT(assert_equal(HessianFactor(*expectedRemainingFactor), *actualCholeskyFactor, 1e-6));
}
示例13: TEST
/**
* TEST gtsam solver with an over-constrained system
* x + y = 1
* x - y = 5
* x + 2y = 6
*/
TEST(LPSolver, overConstrainedLinearSystem) {
GaussianFactorGraph graph;
Matrix A1 = Vector3(1, 1, 1);
Matrix A2 = Vector3(1, -1, 2);
Vector b = Vector3(1, 5, 6);
JacobianFactor factor(1, A1, 2, A2, b, noiseModel::Constrained::All(3));
graph.push_back(factor);
VectorValues x = graph.optimize();
// This check confirms that gtsam linear constraint solver can't handle
// over-constrained system
CHECK(factor.error(x) != 0.0);
}
示例14: fuse
/* ************************************************************************* */
KalmanFilter::State fuse(const KalmanFilter::State& p,
GaussianFactor* newFactor, bool useQR) {
// Create a factor graph
GaussianFactorGraph factorGraph;
// push back previous solution and new factor
factorGraph.push_back(p->toFactor());
factorGraph.push_back(GaussianFactor::shared_ptr(newFactor));
// Eliminate graph in order x0, x1, to get Bayes net P(x0|x1)P(x1)
return solve(factorGraph, useQR);
}
示例15: P
/* ************************************************************************* *
Bayes tree for smoother with "nested dissection" ordering:
Node[x1] P(x1 | x2)
Node[x3] P(x3 | x2 x4)
Node[x5] P(x5 | x4 x6)
Node[x7] P(x7 | x6)
Node[x2] P(x2 | x4)
Node[x6] P(x6 | x4)
Node[x4] P(x4)
becomes
C1 x5 x6 x4
C2 x3 x2 : x4
C3 x1 : x2
C4 x7 : x6
************************************************************************* */
TEST( GaussianBayesTree, balanced_smoother_marginals )
{
// Create smoother with 7 nodes
GaussianFactorGraph smoother = createSmoother(7);
// Create the Bayes tree
Ordering ordering;
ordering += X(1),X(3),X(5),X(7),X(2),X(6),X(4);
GaussianBayesTree bayesTree = *smoother.eliminateMultifrontal(ordering);
VectorValues actualSolution = bayesTree.optimize();
VectorValues expectedSolution = VectorValues::Zero(actualSolution);
EXPECT(assert_equal(expectedSolution,actualSolution,tol));
LONGS_EQUAL(4, (long)bayesTree.size());
double tol=1e-5;
// Check marginal on x1
JacobianFactor expected1 = GaussianDensity::FromMeanAndStddev(X(1), zero(2), sigmax1);
JacobianFactor actual1 = *bayesTree.marginalFactor(X(1));
Matrix expectedCovarianceX1 = eye(2,2) * (sigmax1 * sigmax1);
Matrix actualCovarianceX1;
GaussianFactor::shared_ptr m = bayesTree.marginalFactor(X(1), EliminateCholesky);
actualCovarianceX1 = bayesTree.marginalFactor(X(1), EliminateCholesky)->information().inverse();
EXPECT(assert_equal(expectedCovarianceX1, actualCovarianceX1, tol));
EXPECT(assert_equal(expected1,actual1,tol));
// Check marginal on x2
double sigx2 = 0.68712938; // FIXME: this should be corrected analytically
JacobianFactor expected2 = GaussianDensity::FromMeanAndStddev(X(2), zero(2), sigx2);
JacobianFactor actual2 = *bayesTree.marginalFactor(X(2));
EXPECT(assert_equal(expected2,actual2,tol));
// Check marginal on x3
JacobianFactor expected3 = GaussianDensity::FromMeanAndStddev(X(3), zero(2), sigmax3);
JacobianFactor actual3 = *bayesTree.marginalFactor(X(3));
EXPECT(assert_equal(expected3,actual3,tol));
// Check marginal on x4
JacobianFactor expected4 = GaussianDensity::FromMeanAndStddev(X(4), zero(2), sigmax4);
JacobianFactor actual4 = *bayesTree.marginalFactor(X(4));
EXPECT(assert_equal(expected4,actual4,tol));
// Check marginal on x7 (should be equal to x1)
JacobianFactor expected7 = GaussianDensity::FromMeanAndStddev(X(7), zero(2), sigmax7);
JacobianFactor actual7 = *bayesTree.marginalFactor(X(7));
EXPECT(assert_equal(expected7,actual7,tol));
}