本文整理汇总了C++中optimizablegraph::Vertex::copyB方法的典型用法代码示例。如果您正苦于以下问题:C++ Vertex::copyB方法的具体用法?C++ Vertex::copyB怎么用?C++ Vertex::copyB使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类optimizablegraph::Vertex
的用法示例。
在下文中一共展示了Vertex::copyB方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: optimize
int SparseOptimizerIncremental::optimize(int iterations, bool online)
{
(void) iterations; // we only do one iteration anyhow
OptimizationAlgorithm* solver = _algorithm;
solver->init(online);
int cjIterations=0;
bool ok=true;
if (! online || batchStep) {
//cerr << "performing batch step" << endl;
if (! online) {
ok = _underlyingSolver->buildStructure();
if (! ok) {
cerr << __PRETTY_FUNCTION__ << ": Failure while building CCS structure" << endl;
return 0;
}
}
// copy over the updated estimate as new linearization point
if (slamDimension == 3) {
for (size_t i = 0; i < indexMapping().size(); ++i) {
OnlineVertexSE2* v = static_cast<OnlineVertexSE2*>(indexMapping()[i]);
v->setEstimate(v->updatedEstimate);
}
}
else if (slamDimension == 6) {
for (size_t i = 0; i < indexMapping().size(); ++i) {
OnlineVertexSE3* v= static_cast<OnlineVertexSE3*>(indexMapping()[i]);
v->setEstimate(v->updatedEstimate);
}
}
SparseOptimizer::computeActiveErrors();
SparseOptimizer::linearizeSystem();
_underlyingSolver->buildSystem();
int numBlocksRequired = _ivMap.size();
if (_cmember.size() < numBlocksRequired) {
_cmember.resize(2 * numBlocksRequired);
}
memset(_cmember.data(), 0, numBlocksRequired * sizeof(int));
if (_ivMap.size() > 100) {
for (size_t i = _ivMap.size() - 20; i < _ivMap.size(); ++i) {
const HyperGraph::EdgeSet& eset = _ivMap[i]->edges();
for (HyperGraph::EdgeSet::const_iterator it = eset.begin(); it != eset.end(); ++it) {
OptimizableGraph::Edge* e = static_cast<OptimizableGraph::Edge*>(*it);
OptimizableGraph::Vertex* v1 = static_cast<OptimizableGraph::Vertex*>(e->vertices()[0]);
OptimizableGraph::Vertex* v2 = static_cast<OptimizableGraph::Vertex*>(e->vertices()[1]);
if (v1->hessianIndex() >= 0)
_cmember(v1->hessianIndex()) = 1;
if (v2->hessianIndex() >= 0)
_cmember(v2->hessianIndex()) = 1;
}
}
}
ok = _underlyingSolver->solve();
// get the current cholesky factor along with the permutation
_L = _solverInterface->L();
if (_perm.size() < (int)_L->n)
_perm.resize(2*_L->n);
int* p = (int*)_L->Perm;
for (size_t i = 0; i < _L->n; ++i)
_perm[p[i]] = i;
}
else {
// update the b vector
for (HyperGraph::VertexSet::iterator it = _touchedVertices.begin(); it != _touchedVertices.end(); ++it) {
OptimizableGraph::Vertex* v = static_cast<OptimizableGraph::Vertex*>(*it);
int iBase = v->colInHessian();
v->copyB(_underlyingSolver->b() + iBase);
}
_solverInterface->solve(_underlyingSolver->x(), _underlyingSolver->b());
}
update(_underlyingSolver->x());
++cjIterations;
if (verbose()){
computeActiveErrors();
cerr
<< "nodes = " << vertices().size()
<< "\t edges= " << _activeEdges.size()
<< "\t chi2= " << FIXED(activeChi2())
<< endl;
}
if (vizWithGnuplot)
gnuplotVisualization();
if (! ok)
return 0;
return 1;
}