g2o
Loading...
Searching...
No Matches
sparse_optimizer.cpp
Go to the documentation of this file.
1// g2o - General Graph Optimization
2// Copyright (C) 2011 R. Kuemmerle, G. Grisetti, W. Burgard
3// All rights reserved.
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are
7// met:
8//
9// * Redistributions of source code must retain the above copyright notice,
10// this list of conditions and the following disclaimer.
11// * Redistributions in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the distribution.
14//
15// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
16// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
17// TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
18// PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
19// HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21// TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
23// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
24// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
27#include "sparse_optimizer.h"
28
29#include <algorithm>
30#include <cassert>
31#include <iomanip>
32#include <iostream>
33#include <iterator>
34#include <utility>
35
36#include "batch_stats.h"
37#include "estimate_propagator.h"
38#include "g2o/config.h"
40#include "g2o/core/ownership.h"
41#include "g2o/stuff/logger.h"
42#include "g2o/stuff/macros.h"
43#include "g2o/stuff/misc.h"
44#include "g2o/stuff/timeutil.h"
45#include "hyper_graph_action.h"
47#include "robust_kernel.h"
48
49namespace g2o {
50using namespace std;
51
53 : _forceStopFlag(0),
54 _verbose(false),
55 _algorithm(nullptr),
56 _computeBatchStatistics(false) {
58}
59
64
66 // call the callbacks in case there is something registered
68 if (actions.size() > 0) {
69 for (HyperGraphActionSet::iterator it = actions.begin();
70 it != actions.end(); ++it)
71 (*(*it))(this);
72 }
73
74#ifdef G2O_OPENMP
75#pragma omp parallel for default(shared) if (_activeEdges.size() > 50)
76#endif
77 for (int k = 0; k < static_cast<int>(_activeEdges.size()); ++k) {
79 e->computeError();
80 }
81
82#ifndef NDEBUG
83 for (int k = 0; k < static_cast<int>(_activeEdges.size()); ++k) {
85 bool hasNan = arrayHasNaN(e->errorData(), e->dimension());
86 if (hasNan) {
87 G2O_WARN("computeActiveErrors(): found NaN in error for edge {}",
88 static_cast<void*>(e));
89 }
90 }
91#endif
92}
93
95 double chi = 0.0;
96 for (EdgeContainer::const_iterator it = _activeEdges.begin();
97 it != _activeEdges.end(); ++it) {
98 const OptimizableGraph::Edge* e = *it;
99 chi += e->chi2();
100 }
101 return chi;
102}
103
105 Vector3 rho;
106 double chi = 0.0;
107 for (EdgeContainer::const_iterator it = _activeEdges.begin();
108 it != _activeEdges.end(); ++it) {
109 const OptimizableGraph::Edge* e = *it;
110 if (e->robustKernel()) {
111 e->robustKernel()->robustify(e->chi2(), rho);
112 chi += rho[0];
113 } else
114 chi += e->chi2();
115 }
116 return chi;
117}
118
120 if (vertices().empty()) return nullptr;
121
122 int maxDim = maxDimension();
123
125 for (HyperGraph::VertexIDMap::iterator it = vertices().begin();
126 it != vertices().end(); ++it) {
128 static_cast<OptimizableGraph::Vertex*>(it->second);
129 if (v->dimension() == maxDim) {
130 rut = v;
131 break;
132 }
133 }
134 return rut;
135}
136
138 if (vertices().empty()) return false;
139
140 int maxDim = maxDimension();
141
142 for (HyperGraph::VertexIDMap::iterator it = vertices().begin();
143 it != vertices().end(); ++it) {
145 static_cast<OptimizableGraph::Vertex*>(it->second);
146 if (v->dimension() == maxDim) {
147 // test for fixed vertex
148 if (v->fixed()) {
149 return false;
150 }
151 // test for full dimension prior
152 for (HyperGraph::EdgeSet::const_iterator eit = v->edges().begin();
153 eit != v->edges().end(); ++eit) {
154 OptimizableGraph::Edge* e = static_cast<OptimizableGraph::Edge*>(*eit);
155 if (e->vertices().size() == 1 && e->dimension() == maxDim) return false;
156 }
157 }
158 }
159 return true;
160}
161
164 if (!vlist.size()) {
165 _ivMap.clear();
166 return false;
167 }
168
169 _ivMap.resize(vlist.size());
170 size_t i = 0;
171 for (int k = 0; k < 2; k++)
172 for (VertexContainer::iterator it = vlist.begin(); it != vlist.end();
173 ++it) {
175 if (!v->fixed()) {
176 if (static_cast<int>(v->marginalized()) == k) {
177 v->setHessianIndex(i);
178 _ivMap[i] = v;
179 i++;
180 }
181 } else {
182 v->setHessianIndex(-1);
183 }
184 }
185 _ivMap.resize(i);
186 return true;
187}
188
190 for (size_t i = 0; i < _ivMap.size(); ++i) {
191 _ivMap[i]->setHessianIndex(-1);
192 _ivMap[i] = 0;
193 }
194}
195
198 for (VertexIDMap::iterator it = vertices().begin(); it != vertices().end();
199 ++it)
200 vset.insert(it->second);
201 return initializeOptimization(vset, level);
202}
203
205 int level) {
206 if (edges().size() == 0) {
207 G2O_WARN("Attempt to initialize an empty graph");
208 return false;
209 }
210 preIteration(-1);
211 bool workspaceAllocated = _jacobianWorkspace.allocate();
212 (void)workspaceAllocated;
213 assert(workspaceAllocated &&
214 "Error while allocating memory for the Jacobians");
216 _activeVertices.clear();
217 _activeVertices.reserve(vset.size());
218 _activeEdges.clear();
219 set<Edge*> auxEdgeSet; // temporary structure to avoid duplicates
220 for (HyperGraph::VertexSet::iterator it = vset.begin(); it != vset.end();
221 ++it) {
223 const OptimizableGraph::EdgeSet& vEdges = v->edges();
224 // count if there are edges in that level. If not remove from the pool
225 int levelEdges = 0;
226 for (OptimizableGraph::EdgeSet::const_iterator it = vEdges.begin();
227 it != vEdges.end(); ++it) {
229 reinterpret_cast<OptimizableGraph::Edge*>(*it);
230 if (level < 0 || e->level() == level) {
231 bool allVerticesOK = true;
232 for (vector<HyperGraph::Vertex*>::const_iterator vit =
233 e->vertices().begin();
234 vit != e->vertices().end(); ++vit) {
235 if (vset.find(*vit) == vset.end()) {
236 allVerticesOK = false;
237 break;
238 }
239 }
240 if (allVerticesOK && !e->allVerticesFixed()) {
241 auxEdgeSet.insert(e);
242 levelEdges++;
243 }
244 }
245 }
246 if (levelEdges) {
247 _activeVertices.push_back(v);
248
249 // test for NANs in the current estimate if we are debugging
250#ifndef NDEBUG
251 int estimateDim = v->estimateDimension();
252 if (estimateDim > 0) {
253 VectorX estimateData(estimateDim);
254 if (v->getEstimateData(estimateData.data()) == true) {
255 int k;
256 bool hasNan = arrayHasNaN(estimateData.data(), estimateDim, &k);
257 if (hasNan)
258 G2O_WARN("{}: Vertex {} contains a nan entry at index {}",
259 __PRETTY_FUNCTION__, v->id(), k);
260 }
261 }
262#endif
263 }
264 }
265
266 _activeEdges.reserve(auxEdgeSet.size());
267 for (set<Edge*>::iterator it = auxEdgeSet.begin(); it != auxEdgeSet.end();
268 ++it)
269 _activeEdges.push_back(*it);
270
272 bool indexMappingStatus = buildIndexMapping(_activeVertices);
273 postIteration(-1);
274 return indexMappingStatus;
275}
276
278 preIteration(-1);
279 bool workspaceAllocated = _jacobianWorkspace.allocate();
280 (void)workspaceAllocated;
281 assert(workspaceAllocated &&
282 "Error while allocating memory for the Jacobians");
284 _activeVertices.clear();
285 _activeEdges.clear();
286 _activeEdges.reserve(eset.size());
287 set<Vertex*> auxVertexSet; // temporary structure to avoid duplicates
288 for (HyperGraph::EdgeSet::iterator it = eset.begin(); it != eset.end();
289 ++it) {
291 if (e->numUndefinedVertices()) continue;
292 for (vector<HyperGraph::Vertex*>::const_iterator vit =
293 e->vertices().begin();
294 vit != e->vertices().end(); ++vit) {
295 auxVertexSet.insert(static_cast<OptimizableGraph::Vertex*>(*vit));
296 }
297 _activeEdges.push_back(reinterpret_cast<OptimizableGraph::Edge*>(*it));
298 }
299
300 _activeVertices.reserve(auxVertexSet.size());
301 for (set<Vertex*>::iterator it = auxVertexSet.begin();
302 it != auxVertexSet.end(); ++it)
303 _activeVertices.push_back(*it);
304
306 bool indexMappingStatus = buildIndexMapping(_activeVertices);
307 postIteration(-1);
308 return indexMappingStatus;
309}
310
312 for (VertexIDMap::iterator it = vertices().begin(); it != vertices().end();
313 ++it) {
315 static_cast<OptimizableGraph::Vertex*>(it->second);
316 v->setToOrigin();
317 }
318}
319
324
326 EstimatePropagatorCost& costFunction) {
328 std::set<Vertex*> backupVertices;
329 HyperGraph::VertexSet fixedVertices; // these are the root nodes where to
330 // start the initialization
331 for (EdgeContainer::iterator it = _activeEdges.begin();
332 it != _activeEdges.end(); ++it) {
333 OptimizableGraph::Edge* e = *it;
334 for (size_t i = 0; i < e->vertices().size(); ++i) {
336 static_cast<OptimizableGraph::Vertex*>(e->vertex(i));
337 if (!v) continue;
338 if (v->fixed())
339 fixedVertices.insert(v);
340 else { // check for having a prior which is able to fully initialize a
341 // vertex
342 for (EdgeSet::const_iterator vedgeIt = v->edges().begin();
343 vedgeIt != v->edges().end(); ++vedgeIt) {
345 static_cast<OptimizableGraph::Edge*>(*vedgeIt);
346 if (vedge->vertices().size() == 1 &&
347 vedge->initialEstimatePossible(emptySet, v) > 0.) {
348 vedge->initialEstimate(emptySet, v);
349 fixedVertices.insert(v);
350 }
351 }
352 }
353 if (v->hessianIndex() == -1) {
354 std::set<Vertex*>::const_iterator foundIt = backupVertices.find(v);
355 if (foundIt == backupVertices.end()) {
356 v->push();
357 backupVertices.insert(v);
358 }
359 }
360 }
361 }
362
363 EstimatePropagator estimatePropagator(this);
364 estimatePropagator.propagate(fixedVertices, costFunction);
365
366 // restoring the vertices that should not be initialized
367 for (std::set<Vertex*>::iterator it = backupVertices.begin();
368 it != backupVertices.end(); ++it) {
369 Vertex* v = *it;
370 v->pop();
371 }
372 if (verbose()) {
374 cerr << "iteration= -1\t chi2= " << activeChi2() << "\t time= 0.0"
375 << "\t cumTime= 0.0"
376 << "\t (using initial guess from " << costFunction.name() << ")"
377 << endl;
378 }
379}
380
381int SparseOptimizer::optimize(int iterations, bool online) {
382 if (_ivMap.size() == 0) {
383 G2O_WARN(
384 "{}: 0 vertices to optimize, maybe forgot to call "
385 "initializeOptimization()",
387 return -1;
388 }
389
390 int cjIterations = 0;
391 double cumTime = 0;
392 bool ok = true;
393
394 ok = _algorithm->init(online);
395 if (!ok) {
396 G2O_ERROR("{}: Error while initializing", __PRETTY_FUNCTION__);
397 return -1;
398 }
399
400 _batchStatistics.clear();
401 if (_computeBatchStatistics) _batchStatistics.resize(iterations);
402
403 OptimizationAlgorithm::SolverResult result = OptimizationAlgorithm::OK;
404 for (int i = 0; i < iterations && !terminate() && ok; i++) {
405 preIteration(i);
406
410 cstat.iteration = i;
411 cstat.numEdges = _activeEdges.size();
412 cstat.numVertices = _activeVertices.size();
413 }
414
415 double ts = get_monotonic_time();
416 result = _algorithm->solve(i, online);
417 ok = (result == OptimizationAlgorithm::OK);
418
419 bool errorComputed = false;
422 errorComputed = true;
424 _batchStatistics[i].timeIteration = get_monotonic_time() - ts;
425 }
426
427 if (verbose()) {
428 double dts = get_monotonic_time() - ts;
429 cumTime += dts;
430 if (!errorComputed) computeActiveErrors();
431 cerr << "iteration= " << i << "\t chi2= " << FIXED(activeRobustChi2())
432 << "\t time= " << dts << "\t cumTime= " << cumTime
433 << "\t edges= " << _activeEdges.size();
435 cerr << endl;
436 }
437 ++cjIterations;
438 postIteration(i);
439 }
440 if (result == OptimizationAlgorithm::Fail) {
441 return 0;
442 }
443 return cjIterations;
444}
445
446void SparseOptimizer::update(const double* update) {
447 // update the graph by calling oplus on the vertices
448 for (size_t i = 0; i < _ivMap.size(); ++i) {
450#ifndef NDEBUG
451 bool hasNan = arrayHasNaN(update, v->dimension());
452 if (hasNan)
453 G2O_WARN("{}: Update contains a nan for vertex {}", __PRETTY_FUNCTION__,
454 v->id());
455#endif
456 v->oplus(update);
457 update += v->dimension();
458 }
459}
460
468
470 HyperGraph::EdgeSet& eset) {
471 std::vector<HyperGraph::Vertex*> newVertices;
472 newVertices.reserve(vset.size());
473 _activeVertices.reserve(_activeVertices.size() + vset.size());
474 _activeEdges.reserve(_activeEdges.size() + eset.size());
475 for (HyperGraph::EdgeSet::iterator it = eset.begin(); it != eset.end();
476 ++it) {
477 OptimizableGraph::Edge* e = static_cast<OptimizableGraph::Edge*>(*it);
478 if (!e->allVerticesFixed()) _activeEdges.push_back(e);
479 }
480
481 // update the index mapping
482 size_t next = _ivMap.size();
483 for (HyperGraph::VertexSet::iterator it = vset.begin(); it != vset.end();
484 ++it) {
486 if (!v->fixed()) {
487 if (!v->marginalized()) {
488 v->setHessianIndex(next);
489 _ivMap.push_back(v);
490 newVertices.push_back(v);
491 _activeVertices.push_back(v);
492 next++;
493 } else // not supported right now
494 abort();
495 } else {
496 v->setHessianIndex(-1);
497 }
498 }
499
500 if (newVertices.size() != vset.size()) {
501 G2O_ERROR("{}: something went wrong, size mismatch {} != {}", vset.size(),
502 newVertices.size());
503 }
504 return _algorithm->updateStructure(newVertices, eset);
505}
506
508 // sort vector structures to get deterministic ordering based on IDs
509 sort(_activeVertices.begin(), _activeVertices.end(), VertexIDCompare());
510 sort(_activeEdges.begin(), _activeEdges.end(), EdgeIDCompare());
511}
512
514 _ivMap.clear();
515 _activeVertices.clear();
516 _activeEdges.clear();
518}
519
520SparseOptimizer::VertexContainer::const_iterator
522 VertexContainer::const_iterator lower = lower_bound(
523 _activeVertices.begin(), _activeVertices.end(), v, VertexIDCompare());
524 if (lower == _activeVertices.end()) return _activeVertices.end();
525 if ((*lower) == v) return lower;
526 return _activeVertices.end();
527}
528
529SparseOptimizer::EdgeContainer::const_iterator SparseOptimizer::findActiveEdge(
530 const OptimizableGraph::Edge* e) const {
531 EdgeContainer::const_iterator lower =
532 lower_bound(_activeEdges.begin(), _activeEdges.end(), e, EdgeIDCompare());
533 if (lower == _activeEdges.end()) return _activeEdges.end();
534 if ((*lower) == e) return lower;
535 return _activeEdges.end();
536}
537
539 for (VertexContainer::iterator it = vlist.begin(); it != vlist.end(); ++it)
540 (*it)->push();
541}
542
544 for (VertexContainer::iterator it = vlist.begin(); it != vlist.end(); ++it)
545 (*it)->pop();
546}
547
551
555
557 for (VertexContainer::iterator it = vlist.begin(); it != vlist.end(); ++it)
558 (*it)->discardTop();
559}
560
562
564 if (_algorithm) {
565 // reset the optimizer for the formerly used solver in case release() won't
566 // delete it
567 _algorithm->setOptimizer(nullptr);
569 }
570
572
574}
575
578 const std::vector<std::pair<int, int> >& blockIndices) {
579 return _algorithm->computeMarginals(spinv, blockIndices);
580}
581
583 const Vertex* vertex) {
584 if (vertex->hessianIndex() < 0) {
585 return false;
586 }
587 std::vector<std::pair<int, int> > index{
588 std::make_pair<int, int>(vertex->hessianIndex(), vertex->hessianIndex())};
589 return computeMarginals(spinv, index);
590}
591
593 const VertexContainer& vertices) {
594 std::vector<std::pair<int, int> > indices;
595 indices.reserve(vertices.size());
596 for (VertexContainer::const_iterator it = vertices.begin();
597 it != vertices.end(); ++it) {
598 indices.emplace_back((*it)->hessianIndex(), (*it)->hessianIndex());
599 }
600 return computeMarginals(spinv, indices);
601}
602
604
607 if (vv->hessianIndex() >= 0) {
609 _ivMap.clear();
610 }
611 return HyperGraph::removeVertex(v, detach);
612}
613
615 std::pair<HyperGraphActionSet::iterator, bool> insertResult =
616 _graphActions[AT_COMPUTEACTIVERROR].insert(action);
617 return insertResult.second;
618}
619
623
625
627
629
630} // namespace g2o
cost for traversing along active edges in the optimizer
virtual std::string_view name() const
propagation of an initial guess
void propagate(OptimizableGraph::Vertex *v, const EstimatePropagator::PropagateCost &cost, const EstimatePropagator::PropagateAction &action=PropagateAction(), double maxDistance=std::numeric_limits< double >::max(), double maxEdgeCost=std::numeric_limits< double >::max())
Abstract action that operates on an entire graph.
int numUndefinedVertices() const
const VertexContainer & vertices() const
const Vertex * vertex(size_t i) const
virtual void resize(size_t size)
abstract Vertex, your types must derive from that one
int id() const
returns the id
const EdgeSet & edges() const
returns the set of hyper-edges that are leaving/entering in this vertex
virtual bool removeVertex(Vertex *v, bool detach=false)
std::set< Edge * > EdgeSet
virtual void clear()
clears the graph and empties all structures.
std::set< Vertex * > VertexSet
std::vector< Vertex * > VertexContainer
const EdgeSet & edges() const
const VertexIDMap & vertices() const
int dimension() const
returns the dimensions of the error function
virtual double chi2() const =0
virtual double initialEstimatePossible(const OptimizableGraph::VertexSet &from, OptimizableGraph::Vertex *to)
virtual void computeError()=0
virtual bool allVerticesFixed() const =0
RobustKernel * robustKernel() const
if NOT NULL, error of this edge will be robustifed with the kernel
virtual const double * errorData() const =0
returns the error vector cached after calling the computeError;
virtual void initialEstimate(const OptimizableGraph::VertexSet &from, OptimizableGraph::Vertex *to)=0
A general case Vertex for optimization.
bool marginalized() const
true => this node is marginalized out during the optimization
void setHessianIndex(int ti)
set the temporary index of the vertex in the parameter blocks
virtual bool getEstimateData(double *estimate) const
int dimension() const
dimension of the estimated state belonging to this node
bool fixed() const
true => this node is fixed during the optimization
virtual void push()=0
backup the position of the vertex to a stack
void setToOrigin()
sets the node to the origin (used in the multilevel stuff)
virtual int estimateDimension() const
Generic interface for a non-linear solver operating on a graph.
virtual bool computeMarginals(SparseBlockMatrix< MatrixX > &spinv, const std::vector< std::pair< int, int > > &blockIndices)=0
virtual void printVerbose(std::ostream &os) const
virtual bool updateStructure(const std::vector< HyperGraph::Vertex * > &vset, const HyperGraph::EdgeSet &edges)=0
virtual SolverResult solve(int iteration, bool online=false)=0
virtual bool init(bool online=false)=0
void setOptimizer(SparseOptimizer *optimizer)
virtual void robustify(double squaredError, Vector3 &rho) const =0
Sparse matrix which uses blocks.
int optimize(int iterations, bool online=false)
const OptimizationAlgorithm * algorithm() const
the solver used by the optimizer
void discardTop()
same as above, but for the active vertices
BatchStatisticsContainer _batchStatistics
virtual bool removeVertex(HyperGraph::Vertex *v, bool detach=false)
EdgeContainer _activeEdges
sorted according to EdgeIDCompare
void setForceStopFlag(bool *flag)
EdgeContainer::const_iterator findActiveEdge(const OptimizableGraph::Edge *e) const
void setVerbose(bool verbose)
bool addComputeErrorAction(HyperGraphAction *action)
add an action to be executed before the error vectors are computed
void push()
push all the active vertices onto a stack
virtual bool initializeOptimization(HyperGraph::EdgeSet &eset)
virtual void computeInitialGuess()
void setAlgorithm(OptimizationAlgorithm *algorithm)
OptimizationAlgorithm * _algorithm
void setComputeBatchStatistics(bool computeBatchStatistics)
VertexContainer::const_iterator findActiveVertex(const OptimizableGraph::Vertex *v) const
VertexContainer _activeVertices
sorted according to VertexIDCompare
void update(const double *update)
bool buildIndexMapping(SparseOptimizer::VertexContainer &vlist)
bool verbose() const
verbose information during optimization
double activeRobustChi2() const
virtual Vertex * findGauge()
finds a gauge in the graph to remove the undefined dof.
bool removeComputeErrorAction(HyperGraphAction *action)
void pop()
pop (restore) the estimate of the active vertices from the stack
bool computeBatchStatistics() const
bool terminate()
if external stop flag is given, return its state. False otherwise
virtual bool updateInitialization(HyperGraph::VertexSet &vset, HyperGraph::EdgeSet &eset)
bool computeMarginals(SparseBlockMatrix< MatrixX > &spinv, const std::vector< std::pair< int, int > > &blockIndices)
#define G2O_ERROR(...)
Definition logger.h:89
#define G2O_WARN(...)
Definition logger.h:88
#define __PRETTY_FUNCTION__
Definition macros.h:90
some general case utility functions
bool arrayHasNaN(const double *array, int size, int *nanIndex=0)
Definition misc.h:168
VectorN< 3 > Vector3
Definition eigen_types.h:51
void release(T *obj)
Definition ownership.h:8
double get_monotonic_time()
Definition timeutil.cpp:43
VectorN< Eigen::Dynamic > VectorX
Definition eigen_types.h:55
Definition jet.h:876
statistics about the optimization
Definition batch_stats.h:40
int numVertices
how many vertices are involved
Definition batch_stats.h:43
int iteration
which iteration
Definition batch_stats.h:42
static void setGlobalStats(G2OBatchStatistics *b)
int numEdges
how many edges
Definition batch_stats.h:44
order edges based on the internal ID, which is assigned to the edge in addEdge()
order vertices based on their ID
JacobianWorkspace _jacobianWorkspace
virtual void push()
push the estimate of all variables onto a stack
std::vector< OptimizableGraph::Vertex * > VertexContainer
vector container for vertices
std::vector< HyperGraphActionSet > _graphActions
virtual void pop()
pop (restore) the estimate of all variables from the stack
std::set< HyperGraphAction * > HyperGraphActionSet
virtual void postIteration(int)
virtual void preIteration(int)
int maxDimension() const
return the maximum dimension of all vertices in the graph
Vertex * vertex(int id)
returns the vertex number id appropriately casted
utility functions for handling time related stuff