g2o
Loading...
Searching...
No Matches
Public Member Functions | Protected Member Functions | Protected Attributes | List of all members
g2o::EdgeLabeler Struct Reference

#include <edge_labeler.h>

Collaboration diagram for g2o::EdgeLabeler:
Collaboration graph
[legend]

Public Member Functions

 EdgeLabeler (SparseOptimizer *optimizer)
 
int labelEdges (std::set< OptimizableGraph::Edge * > &edges)
 

Protected Member Functions

void augmentSparsePattern (std::set< std::pair< int, int > > &pattern, OptimizableGraph::Edge *e)
 
bool computePartialInverse (SparseBlockMatrix< Eigen::MatrixXd > &spinv, const std::set< std::pair< int, int > > &pattern)
 
bool labelEdge (const SparseBlockMatrix< Eigen::MatrixXd > &spinv, OptimizableGraph::Edge *e)
 

Protected Attributes

SparseOptimizer_optimizer
 

Detailed Description

This class implements the functions to label an edge (measurement) based on the actual configuration of the nodes. It does so by

Definition at line 46 of file edge_labeler.h.

Constructor & Destructor Documentation

◆ EdgeLabeler()

g2o::EdgeLabeler::EdgeLabeler ( SparseOptimizer optimizer)

constructs an edge labeler that operates on the optimizer passed as argument

Parameters
optimizerthe optimizer

Definition at line 41 of file edge_labeler.cpp.

41{ _optimizer = optimizer; }
SparseOptimizer * _optimizer

References _optimizer.

Member Function Documentation

◆ augmentSparsePattern()

void g2o::EdgeLabeler::augmentSparsePattern ( std::set< std::pair< int, int > > &  pattern,
OptimizableGraph::Edge e 
)
protected

helper function that augments the sparse pattern of the inverse based on an edge

Parameters
patternthe blocks of the inverse covered by the edge
ethe edge

Definition at line 69 of file edge_labeler.cpp.

70 {
71 for (size_t i = 0; i < e->vertices().size(); i++) {
72 const OptimizableGraph::Vertex* v =
73 (const OptimizableGraph::Vertex*)e->vertices()[i];
74 int ti = v->hessianIndex();
75 if (ti == -1) continue;
76 for (size_t j = i; j < e->vertices().size(); j++) {
77 const OptimizableGraph::Vertex* v =
78 (const OptimizableGraph::Vertex*)e->vertices()[j];
79 int tj = v->hessianIndex();
80 if (tj == -1) continue;
81 if (tj < ti) swap(ti, tj);
82 pattern.insert(std::make_pair(ti, tj));
83 }
84 }
85}

References g2o::OptimizableGraph::Vertex::hessianIndex(), and g2o::HyperGraph::Edge::vertices().

Referenced by labelEdges().

◆ computePartialInverse()

bool g2o::EdgeLabeler::computePartialInverse ( SparseBlockMatrix< Eigen::MatrixXd > &  spinv,
const std::set< std::pair< int, int > > &  pattern 
)
protected

helper function that computes the inverse based on the sparse pattenrn

Parameters
spinvthe output block inverse
patternthe blocks of the inverse covered by the edge
Returns
true on successm, false on failure, .

Definition at line 87 of file edge_labeler.cpp.

89 {
90 std::vector<std::pair<int, int> > blockIndices(pattern.size());
91 // Why this does not work???
92 // std::copy(pattern.begin(),pattern.end(),blockIndices.begin());
93
94 int k = 0;
95 for (std::set<std::pair<int, int> >::const_iterator it = pattern.begin();
96 it != pattern.end(); ++it) {
97 blockIndices[k++] = *it;
98 }
99
100 // cerr << "sparse pattern contains " << blockIndices.size() << " blocks" <<
101 // endl;
102 return _optimizer->computeMarginals(spinv, blockIndices);
103}
bool computeMarginals(SparseBlockMatrix< MatrixX > &spinv, const std::vector< std::pair< int, int > > &blockIndices)

References _optimizer, and g2o::SparseOptimizer::computeMarginals().

Referenced by labelEdges().

◆ labelEdge()

bool g2o::EdgeLabeler::labelEdge ( const SparseBlockMatrix< Eigen::MatrixXd > &  spinv,
OptimizableGraph::Edge e 
)
protected

helper function that labes a specific edge based on the marginals in the sparse block inverse

Returns
true on success, false on failure

Definition at line 105 of file edge_labeler.cpp.

106 {
107 Eigen::Map<MatrixXd> info(e->informationData(), e->dimension(),
108 e->dimension());
109 // cerr << "original information matrix" << endl;
110 // cerr << info << endl;
111
112 int maxDim = 0;
113 for (size_t i = 0; i < e->vertices().size(); i++) {
114 const OptimizableGraph::Vertex* v =
115 (const OptimizableGraph::Vertex*)e->vertices()[i];
116 int ti = v->hessianIndex();
117 if (ti == -1) continue;
118 maxDim += v->minimalEstimateDimension();
119 }
120
121 // cerr << "maxDim= " << maxDim << endl;
122 MatrixXd cov(maxDim, maxDim);
123 int cumRow = 0;
124 for (size_t i = 0; i < e->vertices().size(); i++) {
125 const OptimizableGraph::Vertex* vr =
126 (const OptimizableGraph::Vertex*)e->vertices()[i];
127 int ti = vr->hessianIndex();
128 if (ti > -1) {
129 int cumCol = 0;
130 for (size_t j = 0; j < e->vertices().size(); j++) {
131 const OptimizableGraph::Vertex* vc =
132 (const OptimizableGraph::Vertex*)e->vertices()[j];
133 int tj = vc->hessianIndex();
134 if (tj > -1) {
135 // cerr << "ti=" << ti << " tj=" << tj
136 // << " cumRow=" << cumRow << " cumCol=" << cumCol << endl;
137 if (ti <= tj) {
138 assert(spinv.block(ti, tj));
139 // cerr << "cblock_ptr" << spinv.block(ti, tj) << endl;
140 // cerr << "cblock.size=" << spinv.block(ti, tj)->rows() << "," <<
141 // spinv.block(ti, tj)->cols() << endl; cerr << "cblock" << endl;
142 // cerr << *spinv.block(ti, tj) << endl;
143 cov.block(cumRow, cumCol, vr->minimalEstimateDimension(),
144 vc->minimalEstimateDimension()) = *spinv.block(ti, tj);
145 } else {
146 assert(spinv.block(tj, ti));
147 // cerr << "cblock.size=" << spinv.block(tj, ti)->cols() << "," <<
148 // spinv.block(tj, ti)->rows() << endl; cerr << "cblock" << endl;
149 // cerr << spinv.block(tj, ti)->transpose() << endl;
150 cov.block(cumRow, cumCol, vr->minimalEstimateDimension(),
151 vc->minimalEstimateDimension()) =
152 spinv.block(tj, ti)->transpose();
153 }
154 cumCol += vc->minimalEstimateDimension();
155 }
156 }
157 cumRow += vr->minimalEstimateDimension();
158 }
159 }
160
161 // cerr << "covariance assembled" << endl;
162 // cerr << cov << endl;
163 // now cov contains the aggregate marginals of the state variables in the edge
164 VectorXd incMean(maxDim);
165 incMean.fill(0);
166 std::vector<MySigmaPoint> incrementPoints;
167 if (!sampleUnscented(incrementPoints, incMean, cov)) {
168 cerr << "sampleUnscented fail" << endl;
169 return false;
170 }
171 // now determine the zero-error measure by applying the error function of the
172 // edge with a zero measurement
173 // TODO!!!
174 bool smss = e->setMeasurementFromState();
175 if (!smss) {
176 cerr << "FATAL: Edge::setMeasurementFromState() not implemented" << endl;
177 }
178 assert(smss && "Edge::setMeasurementFromState() not implemented");
179
180 // std::vector<MySigmaPoint> globalPoints(incrementPoints.size());
181 std::vector<MySigmaPoint> errorPoints(incrementPoints.size());
182
183 // for each sigma point, project it to the global space, by considering those
184 // variables that are involved
185 // cerr << "sigma points are extracted, remapping to measurement space" <<
186 // endl;
187 for (size_t i = 0; i < incrementPoints.size(); i++) {
188 int cumPos = 0;
189 // VectorXd globalPoint(maxDim);
190
191 // push all the "active" state variables
192 for (size_t j = 0; j < e->vertices().size(); j++) {
193 OptimizableGraph::Vertex* vr =
194 (OptimizableGraph::Vertex*)e->vertices()[j];
195 int tj = vr->hessianIndex();
196 if (tj == -1) continue;
197 vr->push();
198 }
199
200 for (size_t j = 0; j < e->vertices().size(); j++) {
201 OptimizableGraph::Vertex* vr =
202 (OptimizableGraph::Vertex*)e->vertices()[j];
203 int tj = vr->hessianIndex();
204 if (tj == -1) continue;
205 vr->oplus(&incrementPoints[i]._sample[cumPos]);
206 // assert(vr->getMinimalEstimateData(&globalPoint[cumPos]) &&
207 // "Vertex::getMinimalEstimateData(...) not implemented");
208 cumPos += vr->minimalEstimateDimension();
209 }
210
211 // construct the sigma point in the global space
212 // globalPoints[i]._sample=globalPoint;
213 // globalPoints[i]._wi=incrementPoints[i]._wi;
214 // globalPoints[i]._wp=incrementPoints[i]._wp;
215
216 // construct the sigma point in the error space
217 e->computeError();
218 Map<VectorXd> errorPoint(e->errorData(), e->dimension());
219
220 errorPoints[i]._sample = errorPoint;
221 errorPoints[i]._wi = incrementPoints[i]._wi;
222 errorPoints[i]._wp = incrementPoints[i]._wp;
223
224 // pop all the "active" state variables
225 for (size_t j = 0; j < e->vertices().size(); j++) {
226 OptimizableGraph::Vertex* vr =
227 (OptimizableGraph::Vertex*)e->vertices()[j];
228 int tj = vr->hessianIndex();
229 if (tj == -1) continue;
230 vr->pop();
231 }
232 }
233
234 // reconstruct the covariance of the error by the sigma points
235 MatrixXd errorCov(e->dimension(), e->dimension());
236 VectorXd errorMean(e->dimension());
237 reconstructGaussian(errorMean, errorCov, errorPoints);
238
239 // info=errorCov.inverse();
240 // cerr << "remapped information matrix" << endl;
241 // cerr << info << endl;
242 return true;
243}
SparseMatrixBlock * block(int r, int c, bool alloc=false)
void reconstructGaussian(SampleType &mean, CovarianceType &covariance, const std::vector< SigmaPoint< SampleType > > &sigmaPoints)
Definition unscented.h:77
bool sampleUnscented(std::vector< SigmaPoint< SampleType > > &sigmaPoints, const SampleType &mean, const CovarianceType &covariance)
Definition unscented.h:48

References g2o::SparseBlockMatrix< MatrixType >::block(), g2o::OptimizableGraph::Edge::computeError(), g2o::OptimizableGraph::Edge::dimension(), g2o::OptimizableGraph::Edge::errorData(), g2o::OptimizableGraph::Vertex::hessianIndex(), g2o::OptimizableGraph::Edge::informationData(), g2o::OptimizableGraph::Vertex::minimalEstimateDimension(), g2o::OptimizableGraph::Vertex::oplus(), g2o::OptimizableGraph::Vertex::pop(), g2o::OptimizableGraph::Vertex::push(), g2o::reconstructGaussian(), g2o::sampleUnscented(), g2o::OptimizableGraph::Edge::setMeasurementFromState(), and g2o::HyperGraph::Edge::vertices().

Referenced by labelEdges().

◆ labelEdges()

int g2o::EdgeLabeler::labelEdges ( std::set< OptimizableGraph::Edge * > &  edges)

Labels the set of edges passed as argument. It computes the cholesky information matrix. This method only woorks aftec having called an optimize(...) in the connected optimizer. The labeling is performed based on the actual configuration of the nodes in the optimized subgraph.

Parameters
edgesthe edges to label
Returns
-1 if the inverse cholesky cannot be computed, otherwise the number of edges where the labeling was successful

Definition at line 43 of file edge_labeler.cpp.

43 {
44 // assume the system is "solved"
45 // compute the sparse pattern of the inverse
46 std::set<std::pair<int, int> > pattern;
47 for (std::set<OptimizableGraph::Edge*>::iterator it = edges.begin();
48 it != edges.end(); ++it) {
49 augmentSparsePattern(pattern, *it);
50 }
51
52 SparseBlockMatrix<MatrixXd> spInv;
53
54 bool result = computePartialInverse(spInv, pattern);
55 // cerr << "partial inverse computed = " << result << endl;
56 // cerr << "non zero blocks" << spInv.nonZeroBlocks() << endl;
57
58 if (!result) {
59 return -1;
60 }
61 int count = 0;
62 for (std::set<OptimizableGraph::Edge*>::iterator it = edges.begin();
63 it != edges.end(); ++it) {
64 count += labelEdge(spInv, *it) ? 1 : 0;
65 }
66 return count;
67}
void augmentSparsePattern(std::set< std::pair< int, int > > &pattern, OptimizableGraph::Edge *e)
bool computePartialInverse(SparseBlockMatrix< Eigen::MatrixXd > &spinv, const std::set< std::pair< int, int > > &pattern)
bool labelEdge(const SparseBlockMatrix< Eigen::MatrixXd > &spinv, OptimizableGraph::Edge *e)

References augmentSparsePattern(), computePartialInverse(), and labelEdge().

Referenced by g2o::Star::labelStarEdges().

Member Data Documentation

◆ _optimizer

SparseOptimizer* g2o::EdgeLabeler::_optimizer
protected

Definition at line 81 of file edge_labeler.h.

Referenced by computePartialInverse(), and EdgeLabeler().


The documentation for this struct was generated from the following files: