g2o
Loading...
Searching...
No Matches
sparse_optimizer_terminate_action.cpp
Go to the documentation of this file.
1// g2o - General Graph Optimization
2// Copyright (C) 2011 R. Kuemmerle, G. Grisetti, H. Strasdat, W. Burgard
3// All rights reserved.
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are
7// met:
8//
9// * Redistributions of source code must retain the above copyright notice,
10// this list of conditions and the following disclaimer.
11// * Redistributions in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the distribution.
14//
15// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
16// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
17// TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
18// PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
19// HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21// TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
23// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
24// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
28
29#include <cassert>
30#include <limits>
31
32#include "sparse_optimizer.h"
33
34namespace g2o {
35
38 _gainThreshold(cst(1e-6)),
39 _lastChi(0),
40 _auxTerminateFlag(false),
41 _maxIterations(std::numeric_limits<int>::max()) {}
42
46
48 const HyperGraph* graph, Parameters* parameters) {
49 assert(dynamic_cast<const SparseOptimizer*>(graph) &&
50 "graph is not a SparseOptimizer");
51 assert(dynamic_cast<HyperGraphAction::ParametersIteration*>(parameters) &&
52 "error casting parameters");
53
54 const SparseOptimizer* optimizer = static_cast<const SparseOptimizer*>(graph);
56 static_cast<HyperGraphAction::ParametersIteration*>(parameters);
57
58 const_cast<SparseOptimizer*>(optimizer)->computeActiveErrors();
59 if (params->iteration < 0) {
60 // let the optimizer run for at least one iteration
61 // Hence, we reset the stop flag
62 setOptimizerStopFlag(optimizer, false);
63 } else if (params->iteration == 0) {
64 // first iteration, just store the chi2 value
65 _lastChi = optimizer->activeRobustChi2();
66 } else {
67 // compute the gain and stop the optimizer in case the
68 // gain is below the threshold or we reached the max
69 // number of iterations
70 bool stopOptimizer = false;
71 if (params->iteration < _maxIterations) {
72 double currentChi = optimizer->activeRobustChi2();
73 double gain = (_lastChi - currentChi) / currentChi;
74 _lastChi = currentChi;
75 if (gain >= 0 && gain < _gainThreshold) stopOptimizer = true;
76 } else {
77 stopOptimizer = true;
78 }
79 if (stopOptimizer) { // tell the optimizer to stop
80 setOptimizerStopFlag(optimizer, true);
81 }
82 }
83 return this;
84}
85
89
91 const SparseOptimizer* optimizer, bool stop) {
92 if (optimizer->forceStopFlag()) {
93 *(optimizer->forceStopFlag()) = stop;
94 } else {
95 _auxTerminateFlag = stop;
96 const_cast<SparseOptimizer*>(optimizer)->setForceStopFlag(
98 }
99}
100
101} // namespace g2o
Abstract action that operates on an entire graph.
virtual HyperGraphAction * operator()(const HyperGraph *graph, Parameters *parameters=0)
void setOptimizerStopFlag(const SparseOptimizer *optimizer, bool stop)
double activeRobustChi2() const
bool * forceStopFlag() const
constexpr double cst(long double v)
Definition misc.h:60
Definition jet.h:876