43 {
45 int nlandmarks;
46 int simSteps;
47 double worldSize;
48 bool hasOdom;
49 bool hasPoseSensor;
50 bool hasPointSensor;
51 bool hasPointDepthSensor;
52 bool hasPointDisparitySensor;
53 bool hasCompass;
54 bool hasGPS;
55
56 std::string outputFilename;
57 arg.
param(
"simSteps", simSteps, 100,
"number of simulation steps");
58 arg.
param(
"nLandmarks", nlandmarks, 1000,
"number of landmarks");
59 arg.
param(
"worldSize", worldSize, 25.0,
"size of the world");
60 arg.
param(
"hasOdom", hasOdom,
false,
"the robot has an odometry");
61 arg.
param(
"hasPointSensor", hasPointSensor,
false,
62 "the robot has a point sensor");
63 arg.
param(
"hasPointDepthSensor", hasPointDepthSensor,
false,
64 "the robot has a point sensor");
65 arg.
param(
"hasPointDisparitySensor", hasPointDisparitySensor,
false,
66 "the robot has a point sensor");
67 arg.
param(
"hasPoseSensor", hasPoseSensor,
false,
68 "the robot has a pose sensor");
69 arg.
param(
"hasCompass", hasCompass,
false,
"the robot has a compass");
70 arg.
param(
"hasGPS", hasGPS,
false,
"the robot has a GPS");
71 arg.
paramLeftOver(
"graph-output", outputFilename,
"simulator_out.g2o",
72 "graph file which will be written", true);
73
75
76 std::mt19937 generator;
79 for (int i = 0; i < nlandmarks; i++) {
84 landmark->vertex()->setEstimate(Vector3d(x, y, z));
85 world.addWorldObject(landmark);
86 }
87 Robot3D robot(&world,
"myRobot");
88 world.addRobot(&robot);
89
90 stringstream ss;
91 ss << "-ws" << worldSize;
92 ss << "-nl" << nlandmarks;
93 ss << "-steps" << simSteps;
94
95 if (hasOdom) {
97 robot.addSensor(odometrySensor);
98 ss << "-odom";
99 }
100
101 if (hasPointSensor) {
103 pointSensor->
setFov(M_PI / 4);
104 robot.addSensor(pointSensor);
105 Eigen::Isometry3d cameraPose;
106 Eigen::Matrix3d R;
107 R << 0, 0, 1, -1, 0, 0, 0, -1, 0;
109 cameraPose = R;
110 cameraPose.translation() = Vector3d(0., 0., 0.3);
112 ss << "-pointXYZ";
113 }
114
115 if (hasPointDisparitySensor) {
118 disparitySensor->
setFov(M_PI / 4);
121 robot.addSensor(disparitySensor);
122 Eigen::Isometry3d cameraPose;
123 Eigen::Matrix3d R;
124 R << 0, 0, 1, -1, 0, 0, 0, -1, 0;
125 cameraPose = R;
126 cameraPose.translation() = Vector3d(0., 0., 0.3);
128 ss << "-disparity";
129 }
130
131 if (hasPointDepthSensor) {
133 depthSensor->
setFov(M_PI / 4);
136 robot.addSensor(depthSensor);
137 Eigen::Isometry3d cameraPose;
138 Eigen::Matrix3d R;
139 R << 0, 0, 1, -1, 0, 0, 0, -1, 0;
140 cameraPose = R;
141 cameraPose.translation() = Vector3d(0., 0., 0.3);
143 ss << "-depth";
144 }
145
146 if (hasPoseSensor) {
148 robot.addSensor(poseSensor);
150 ss << "-pose";
151 }
152
153#ifdef _POSE_PRIOR_SENSOR
155 robot.addSensor(&posePriorSensor);
156 {
157 Eigen::Isometry3d cameraPose;
158 Eigen::Matrix3d R;
159 R << 0, 0, 1, -1, 0, 0, 0, -1, 0;
160 cameraPose = R;
161 cameraPose.translation() = Vector3d(0., 0., 0.3);
162 posePriorSensor.offsetParam()->setOffset(cameraPose);
163 }
164#endif
165
166#ifdef _POSE_SENSOR_OFFSET
168 poseSensor.
setFov(M_PI / 4);
171 robot.addSensor(&poseSensor);
172 if (0) {
173 Eigen::Isometry3d cameraPose;
174 Eigen::Matrix3d R;
175 R << 0, 0, 1, -1, 0, 0, 0, -1, 0;
176 cameraPose = R;
177 cameraPose.translation() = Vector3d(0., 0., 0.3);
178 poseSensor.offsetParam1()->setOffset(cameraPose);
179 poseSensor.offsetParam2()->setOffset(cameraPose);
180 }
181#endif
182
183 robot.move(Eigen::Isometry3d::Identity());
184 double pStraight = 0.7;
185 Eigen::Isometry3d moveStraight = Eigen::Isometry3d::Identity();
186 moveStraight.translation() = Vector3d(1., 0., 0.);
187 double pLeft = 0.15;
188 Eigen::Isometry3d moveLeft = Eigen::Isometry3d::Identity();
189 moveLeft = AngleAxisd(M_PI / 2, Vector3d::UnitZ());
190
191 Eigen::Isometry3d moveRight = Eigen::Isometry3d::Identity();
192 moveRight = AngleAxisd(-M_PI / 2, Vector3d::UnitZ());
193
194 Eigen::Matrix3d dtheta = Eigen::Matrix3d::Identity();
195 for (int i = 0; i < simSteps; i++) {
196 bool boundariesReached = true;
197 cerr << "m";
198 Vector3d dt;
199 const Eigen::Isometry3d& pose = robot.pose();
200 if (pose.translation().x() < -.5 * worldSize) {
201 dtheta = AngleAxisd(0, Vector3d::UnitZ());
202 } else if (pose.translation().x() > .5 * worldSize) {
203 dtheta = AngleAxisd(-M_PI, Vector3d::UnitZ());
204 } else if (pose.translation().y() < -.5 * worldSize) {
205 dtheta = AngleAxisd(M_PI / 2, Vector3d::UnitZ());
206 } else if (pose.translation().y() > .5 * worldSize) {
207 dtheta = AngleAxisd(-M_PI / 2, Vector3d::UnitZ());
208 } else {
209 boundariesReached = false;
210 }
211
212 Eigen::Isometry3d move = Eigen::Isometry3d::Identity();
213 if (boundariesReached) {
214 Eigen::Matrix3d mTheta = pose.rotation().inverse() * dtheta;
215 move = mTheta;
216 AngleAxisd aa(mTheta);
217 if (aa.angle() < std::numeric_limits<double>::epsilon()) {
218 move.translation() = Vector3d(1., 0., 0.);
219 }
220 } else {
222 if (sampled < pStraight)
223 move = moveStraight;
224 else if (sampled < pStraight + pLeft)
225 move = moveLeft;
226 else
227 move = moveRight;
228 }
229
230
231 robot.relativeMove(move);
232
233 cerr << "s";
234 robot.sense();
235 }
236
237
238 ofstream testStream(outputFilename.c_str());
239 graph.
save(testStream);
240}
Command line parsing of argc and argv.
bool parseArgs(int argc, char **argv, bool exitOnError=true)
void paramLeftOver(const std::string &name, std::string &p, const std::string &defValue, const std::string &desc, bool optional=false)
void param(const std::string &name, bool &p, bool defValue, const std::string &desc)
void setOffset(const Isometry3 &offset_=Isometry3::Identity())
void setMinRange(double minRange_)
void setMaxRange(double maxRange_)
ParameterSE3Offset * offsetParam()
ParameterSE3Offset * offsetParam()
ParameterSE3Offset * offsetParam()
double sampleUniform(double min, double max, std::mt19937 *generator)
WorldObject< VertexPointXYZ > WorldObjectTrackXYZ
Robot< WorldObjectSE3 > Robot3D
virtual bool save(std::ostream &os, int level=0) const
save the graph to a stream. Again uses the Factory system.