g2o
Loading...
Searching...
No Matches
solver_slam2d_linear.cpp
Go to the documentation of this file.
1// g2o - General Graph Optimization
2// Copyright (C) 2011 R. Kuemmerle, G. Grisetti, W. Burgard
3// All rights reserved.
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are
7// met:
8//
9// * Redistributions of source code must retain the above copyright notice,
10// this list of conditions and the following disclaimer.
11// * Redistributions in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the distribution.
14//
15// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
16// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
17// TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
18// PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
19// HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21// TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
23// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
24// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
28
29#include <Eigen/Core>
30#include <cassert>
31
33#include "g2o/core/solver.h"
37#include "g2o/stuff/logger.h"
38#include "g2o/stuff/misc.h"
40
41using namespace std;
42
43namespace g2o {
44
49 public:
50 explicit ThetaTreeAction(VectorX& theta)
51 : HyperDijkstra::TreeAction(), _thetaGuess(theta) {}
52 virtual double perform(HyperGraph::Vertex* v, HyperGraph::Vertex* vParent,
54 if (!vParent) return 0.;
55 EdgeSE2* odom = static_cast<EdgeSE2*>(e);
56 VertexSE2* from = static_cast<VertexSE2*>(vParent);
57 VertexSE2* to = static_cast<VertexSE2*>(v);
58 assert(to->hessianIndex() >= 0);
59 double fromTheta =
60 from->hessianIndex() < 0 ? 0. : _thetaGuess[from->hessianIndex()];
61 bool direct = odom->vertices()[0] == from;
62 if (direct)
64 fromTheta + odom->measurement().rotation().angle();
65 else
67 fromTheta - odom->measurement().rotation().angle();
68 return 1.;
69 }
70
71 protected:
73};
74
75SolverSLAM2DLinear::SolverSLAM2DLinear(std::unique_ptr<Solver> solver)
76 : OptimizationAlgorithmGaussNewton(std::move(solver)) {}
77
79
80OptimizationAlgorithm::SolverResult SolverSLAM2DLinear::solve(int iteration,
81 bool online) {
82 if (iteration == 0) {
83 bool status = solveOrientation();
84 if (!status) return OptimizationAlgorithm::Fail;
85 }
86
87 return OptimizationAlgorithmGaussNewton::solve(iteration, online);
88}
89
91 assert(_optimizer->indexMapping().size() + 1 ==
92 _optimizer->vertices().size() &&
93 "Needs to operate on full graph");
94 assert(_optimizer->vertex(0)->fixed() && "Graph is not fixed by vertex 0");
95 VectorX b, x; // will be used for theta and x/y update
96 b.setZero(_optimizer->indexMapping().size());
97 x.setZero(_optimizer->indexMapping().size());
98
99 typedef Eigen::Matrix<double, 1, 1, Eigen::ColMajor> ScalarMatrix;
100
101 std::vector<int> blockIndices(_optimizer->indexMapping().size());
102 for (size_t i = 0; i < _optimizer->indexMapping().size(); ++i)
103 blockIndices[i] = i + 1;
104
105 SparseBlockMatrix<ScalarMatrix> H(blockIndices.data(), blockIndices.data(),
106 _optimizer->indexMapping().size(),
107 _optimizer->indexMapping().size());
108
109 // building the structure, diagonal for each active vertex
110 for (size_t i = 0; i < _optimizer->indexMapping().size(); ++i) {
112 int poseIdx = v->hessianIndex();
113 ScalarMatrix* m = H.block(poseIdx, poseIdx, true);
114 m->setZero();
115 }
116
117 HyperGraph::VertexSet fixedSet;
118
119 // off diagonal for each edge
120 for (SparseOptimizer::EdgeContainer::const_iterator it =
121 _optimizer->activeEdges().begin();
122 it != _optimizer->activeEdges().end(); ++it) {
123#ifndef NDEBUG
124 EdgeSE2* e = dynamic_cast<EdgeSE2*>(*it);
125 assert(e && "Active edges contain non-odometry edge"); //
126#else
127 EdgeSE2* e = static_cast<EdgeSE2*>(*it);
128#endif
130 static_cast<OptimizableGraph::Vertex*>(e->vertices()[0]);
132 static_cast<OptimizableGraph::Vertex*>(e->vertices()[1]);
133
134 int ind1 = from->hessianIndex();
135 int ind2 = to->hessianIndex();
136 if (ind1 == -1 || ind2 == -1) {
137 if (ind1 == -1) fixedSet.insert(from); // collect the fixed vertices
138 if (ind2 == -1) fixedSet.insert(to);
139 continue;
140 }
141
142 bool transposedBlock = ind1 > ind2;
143 if (transposedBlock) { // make sure, we allocate the upper triangle block
144 std::swap(ind1, ind2);
145 }
146
147 ScalarMatrix* m = H.block(ind1, ind2, true);
148 m->setZero();
149 }
150
151 // walk along the Minimal Spanning Tree to compute the guess for the robot
152 // orientation
153 assert(fixedSet.size() == 1);
154 VertexSE2* root = static_cast<VertexSE2*>(*fixedSet.begin());
155 VectorX thetaGuess;
156 thetaGuess.setZero(_optimizer->indexMapping().size());
157 UniformCostFunction uniformCost;
158 HyperDijkstra hyperDijkstra(_optimizer);
159 hyperDijkstra.shortestPaths(root, &uniformCost);
160
162 ThetaTreeAction thetaTreeAction(thetaGuess);
164 &thetaTreeAction);
165
166 // construct for the orientation
167 for (SparseOptimizer::EdgeContainer::const_iterator it =
168 _optimizer->activeEdges().begin();
169 it != _optimizer->activeEdges().end(); ++it) {
170 EdgeSE2* e = static_cast<EdgeSE2*>(*it);
171 VertexSE2* from = static_cast<VertexSE2*>(e->vertices()[0]);
172 VertexSE2* to = static_cast<VertexSE2*>(e->vertices()[1]);
173
174 double omega = e->information()(2, 2);
175
176 double fromThetaGuess =
177 from->hessianIndex() < 0 ? 0. : thetaGuess[from->hessianIndex()];
178 double toThetaGuess =
179 to->hessianIndex() < 0 ? 0. : thetaGuess[to->hessianIndex()];
180 double error = normalize_theta(-e->measurement().rotation().angle() +
181 toThetaGuess - fromThetaGuess);
182
183 bool fromNotFixed = !(from->fixed());
184 bool toNotFixed = !(to->fixed());
185
186 if (fromNotFixed || toNotFixed) {
187 double omega_r = -omega * error;
188 if (fromNotFixed) {
189 b(from->hessianIndex()) -= omega_r;
190 (*H.block(from->hessianIndex(), from->hessianIndex()))(0, 0) += omega;
191 if (toNotFixed) {
192 if (from->hessianIndex() > to->hessianIndex())
193 (*H.block(to->hessianIndex(), from->hessianIndex()))(0, 0) -= omega;
194 else
195 (*H.block(from->hessianIndex(), to->hessianIndex()))(0, 0) -= omega;
196 }
197 }
198 if (toNotFixed) {
199 b(to->hessianIndex()) += omega_r;
200 (*H.block(to->hessianIndex(), to->hessianIndex()))(0, 0) += omega;
201 }
202 }
203 }
204
205 // solve orientation
206 typedef LinearSolverEigen<ScalarMatrix> SystemSolver;
207 SystemSolver linearSystemSolver;
208 linearSystemSolver.init();
209 bool ok = linearSystemSolver.solve(H, x.data(), b.data());
210 if (!ok) {
211 G2O_ERROR("{} Failure while solving linear system", __PRETTY_FUNCTION__);
212 return false;
213 }
214
215 // update the orientation of the 2D poses and set translation to 0, GN shall
216 // solve that
217 root->setToOrigin();
218 for (size_t i = 0; i < _optimizer->indexMapping().size(); ++i) {
219 VertexSE2* v = static_cast<VertexSE2*>(_optimizer->indexMapping()[i]);
220 int poseIdx = v->hessianIndex();
221 SE2 poseUpdate(0, 0, normalize_theta(thetaGuess(poseIdx) + x(poseIdx)));
222 v->setEstimate(poseUpdate);
223 }
224
225 return true;
226}
227
228} // namespace g2o
EIGEN_STRONG_INLINE const Measurement & measurement() const
accessor functions for the measurement represented by the edge
Definition base_edge.h:119
EIGEN_STRONG_INLINE const InformationType & information() const
information matrix of the constraint
Definition base_edge.h:107
void setEstimate(const EstimateType &et)
set the estimate for the vertex also calls updateCache()
2D edge between two Vertex2
Definition edge_se2.h:41
const VertexContainer & vertices() const
abstract Vertex, your types must derive from that one
std::set< Vertex * > VertexSet
const VertexIDMap & vertices() const
linear solver which uses the sparse Cholesky solver from Eigen
A general case Vertex for optimization.
bool fixed() const
true => this node is fixed during the optimization
void setToOrigin()
sets the node to the origin (used in the multilevel stuff)
Implementation of the Gauss Newton Algorithm.
virtual SolverResult solve(int iteration, bool online=false)
SparseOptimizer * _optimizer
the optimizer the solver is working on
represent SE2
Definition se2.h:43
virtual OptimizationAlgorithm::SolverResult solve(int iteration, bool online=false)
SolverSLAM2DLinear(std::unique_ptr< Solver > solver)
Sparse matrix which uses blocks.
SparseMatrixBlock * block(int r, int c, bool alloc=false)
const EdgeContainer & activeEdges() const
the edges active in the current optimization
const VertexContainer & indexMapping() const
the index mapping of the vertices
compute the initial guess of theta while traveling along the MST
virtual double perform(HyperGraph::Vertex *v, HyperGraph::Vertex *vParent, HyperGraph::Edge *e)
2D pose Vertex, (x,y,theta)
Definition vertex_se2.h:41
#define G2O_ERROR(...)
Definition logger.h:89
#define __PRETTY_FUNCTION__
Definition macros.h:90
some general case utility functions
double normalize_theta(double theta)
Definition misc.h:103
VectorN< Eigen::Dynamic > VectorX
Definition eigen_types.h:55
Definition jet.h:876
static void computeTree(AdjacencyMap &amap)
static void visitAdjacencyMap(AdjacencyMap &amap, TreeAction *action, bool useDistance=false)
void shortestPaths(HyperGraph::Vertex *v, HyperDijkstra::CostFunction *cost, double maxDistance=std::numeric_limits< double >::max(), double comparisonConditioner=1e-3, bool directed=false, double maxEdgeCost=std::numeric_limits< double >::max())
AdjacencyMap & adjacencyMap()
Vertex * vertex(int id)
returns the vertex number id appropriately casted