1 /****************************************************************************
2 *
3 * ViSP, open source Visual Servoing Platform software.
4 * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5 *
6 * This software is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 * See the file LICENSE.txt at the root directory of this source
11 * distribution for additional information about the GNU GPL.
12 *
13 * For using ViSP with software that can not be combined with the GNU
14 * GPL, please contact Inria about acquiring a ViSP Professional
15 * Edition License.
16 *
17 * See http://visp.inria.fr for more information.
18 *
19 * This software was developed at:
20 * Inria Rennes - Bretagne Atlantique
21 * Campus Universitaire de Beaulieu
22 * 35042 Rennes Cedex
23 * France
24 *
25 * If you have questions regarding the use of this file, please contact
26 * Inria at visp@inria.fr
27 *
28 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30 *
31 * Description:
32 * tests the control law
33 * eye-in-hand control
34 * velocity computed in the camera frame
35 *
36 * Authors:
37 * Nicolas Melchior
38 *
39 *****************************************************************************/
40
41 /*!
42
43 \file servoAfma6Cylinder2DCamVelocitySecondaryTask.cpp
44
45 \example servoAfma6Cylinder2DCamVelocitySecondaryTask.cpp
46
47 Example of eye-in-hand control law. We control here a real robot,
48 the Afma6 robot (cartesian robot, with 6 degrees of freedom). The
49 velocity is computed in the camera frame. Visual features are the
50 two lines corresponding to the edges of a cylinder.
51
52 This example illustrates in one hand a classical visual servoing
53 with a cylinder. And in the other hand it illustrates the behaviour
54 of the robot when adding a secondary task.
55
56 */
57
58 #include <cmath> // std::fabs
59 #include <limits> // numeric_limits
60 #include <stdlib.h>
61 #include <visp3/core/vpConfig.h>
62 #include <visp3/core/vpDebug.h> // Debug trace
63 #if (defined(VISP_HAVE_AFMA6) && defined(VISP_HAVE_DC1394))
64
65 #include <visp3/core/vpDisplay.h>
66 #include <visp3/core/vpImage.h>
67 #include <visp3/gui/vpDisplayGTK.h>
68 #include <visp3/gui/vpDisplayOpenCV.h>
69 #include <visp3/gui/vpDisplayX.h>
70 #include <visp3/io/vpImageIo.h>
71 #include <visp3/sensor/vp1394TwoGrabber.h>
72
73 #include <visp3/core/vpCylinder.h>
74 #include <visp3/core/vpHomogeneousMatrix.h>
75 #include <visp3/core/vpMath.h>
76 #include <visp3/me/vpMeLine.h>
77 #include <visp3/visual_features/vpFeatureBuilder.h>
78 #include <visp3/visual_features/vpFeatureLine.h>
79 #include <visp3/vs/vpServo.h>
80
81 #include <visp3/robot/vpRobotAfma6.h>
82
83 // Exception
84 #include <visp3/core/vpException.h>
85 #include <visp3/vs/vpServoDisplay.h>
86
main()87 int main()
88 {
89 try {
90 vpImage<unsigned char> I;
91
92 vp1394TwoGrabber g;
93 g.setVideoMode(vp1394TwoGrabber::vpVIDEO_MODE_640x480_MONO8);
94 g.setFramerate(vp1394TwoGrabber::vpFRAMERATE_60);
95 g.open(I);
96
97 g.acquire(I);
98
99 #ifdef VISP_HAVE_X11
100 vpDisplayX display(I, 100, 100, "Current image");
101 #elif defined(VISP_HAVE_OPENCV)
102 vpDisplayOpenCV display(I, 100, 100, "Current image");
103 #elif defined(VISP_HAVE_GTK)
104 vpDisplayGTK display(I, 100, 100, "Current image");
105 #endif
106
107 vpDisplay::display(I);
108 vpDisplay::flush(I);
109
110 vpServo task;
111
112 std::cout << std::endl;
113 std::cout << "-------------------------------------------------------" << std::endl;
114 std::cout << " Test program for vpServo " << std::endl;
115 std::cout << " Eye-in-hand task control, velocity computed in the camera frame" << std::endl;
116 std::cout << " Simulation " << std::endl;
117 std::cout << " task : servo a point " << std::endl;
118 std::cout << "-------------------------------------------------------" << std::endl;
119 std::cout << std::endl;
120
121 int i;
122 int nbline = 2;
123 vpMeLine line[nbline];
124
125 vpMe me;
126 me.setRange(20);
127 me.setPointsToTrack(100);
128 me.setThreshold(2000);
129 me.setSampleStep(10);
130
131 // Initialize the tracking of the two edges of the cylinder
132 for (i = 0; i < nbline; i++) {
133 line[i].setDisplay(vpMeSite::RANGE_RESULT);
134 line[i].setMe(&me);
135
136 line[i].initTracking(I);
137 line[i].track(I);
138 }
139
140 vpRobotAfma6 robot;
141 // robot.move("zero.pos") ;
142
143 vpCameraParameters cam;
144 // Update camera parameters
145 robot.getCameraParameters(cam, I);
146
147 vpTRACE("sets the current position of the visual feature ");
148 vpFeatureLine p[nbline];
149 for (i = 0; i < nbline; i++)
150 vpFeatureBuilder::create(p[i], cam, line[i]);
151
152 vpTRACE("sets the desired position of the visual feature ");
153 vpCylinder cyld(0, 1, 0, 0, 0, 0, 0.04);
154
155 vpHomogeneousMatrix cMo(0, 0, 0.5, 0, 0, vpMath::rad(0));
156
157 cyld.project(cMo);
158
159 vpFeatureLine pd[nbline];
160 vpFeatureBuilder::create(pd[0], cyld, vpCylinder::line1);
161 vpFeatureBuilder::create(pd[1], cyld, vpCylinder::line2);
162
163 // Those lines are needed to keep the conventions define in vpMeLine
164 // (Those in vpLine are less restrictive) Another way to have the
165 // coordinates of the desired features is to learn them before executing
166 // the program.
167 pd[0].setRhoTheta(-fabs(pd[0].getRho()), 0);
168 pd[1].setRhoTheta(-fabs(pd[1].getRho()), M_PI);
169
170 vpTRACE("define the task");
171 vpTRACE("\t we want an eye-in-hand control law");
172 vpTRACE("\t robot is controlled in the camera frame");
173 task.setServo(vpServo::EYEINHAND_CAMERA);
174 task.setInteractionMatrixType(vpServo::DESIRED, vpServo::PSEUDO_INVERSE);
175
176 vpTRACE("\t we want to see a point on a point..");
177 std::cout << std::endl;
178 for (i = 0; i < nbline; i++)
179 task.addFeature(p[i], pd[i]);
180
181 vpTRACE("\t set the gain");
182 task.setLambda(0.3);
183
184 vpTRACE("Display task information ");
185 task.print();
186
187 robot.setRobotState(vpRobot::STATE_VELOCITY_CONTROL);
188
189 unsigned int iter = 0;
190 vpTRACE("\t loop");
191 vpColVector v;
192 vpImage<vpRGBa> Ic;
193 double lambda_av = 0.05;
194 double alpha = 0.02;
195 double beta = 3;
196 double erreur = 1;
197
198 // First loop to reach the convergence position
199 while (erreur > 0.00001) {
200 std::cout << "---------------------------------------------" << iter << std::endl;
201
202 try {
203 g.acquire(I);
204 vpDisplay::display(I);
205
206 // Track the two edges and update the features
207 for (i = 0; i < nbline; i++) {
208 line[i].track(I);
209 line[i].display(I, vpColor::red);
210
211 vpFeatureBuilder::create(p[i], cam, line[i]);
212
213 p[i].display(cam, I, vpColor::red);
214 pd[i].display(cam, I, vpColor::green);
215 }
216
217 vpDisplay::flush(I);
218
219 // Adaptative gain
220 double gain;
221 {
222 if (std::fabs(alpha) <= std::numeric_limits<double>::epsilon())
223 gain = lambda_av;
224 else {
225 gain = alpha * exp(-beta * (task.getError()).sumSquare()) + lambda_av;
226 }
227 }
228 task.setLambda(gain);
229
230 v = task.computeControlLaw();
231
232 if (iter == 0)
233 vpDisplay::getClick(I);
234 } catch (...) {
235 v = 0;
236 robot.setVelocity(vpRobot::CAMERA_FRAME, v);
237 robot.stopMotion();
238 exit(1);
239 }
240
241 robot.setVelocity(vpRobot::CAMERA_FRAME, v);
242 erreur = (task.getError()).sumSquare();
243 vpTRACE("\t\t || s - s* || = %f ", (task.getError()).sumSquare());
244 iter++;
245 }
246
247 /**********************************************************************************************/
248
249 // Second loop is to compute the control while taking into account the
250 // secondary task.
251 vpColVector e1(6);
252 e1 = 0;
253 vpColVector e2(6);
254 e2 = 0;
255 vpColVector proj_e1;
256 vpColVector proj_e2;
257 iter = 0;
258 double rapport = 0;
259 double vitesse = 0.02;
260 unsigned int tempo = 1200;
261
262 for (;;) {
263 std::cout << "---------------------------------------------" << iter << std::endl;
264
265 try {
266 g.acquire(I);
267 vpDisplay::display(I);
268
269 // Track the two edges and update the features
270 for (i = 0; i < nbline; i++) {
271 line[i].track(I);
272 line[i].display(I, vpColor::red);
273
274 vpFeatureBuilder::create(p[i], cam, line[i]);
275
276 p[i].display(cam, I, vpColor::red);
277 pd[i].display(cam, I, vpColor::green);
278 }
279
280 vpDisplay::flush(I);
281
282 v = task.computeControlLaw();
283
284 // Compute the new control law corresponding to the secondary task
285 if (iter % tempo < 400 /*&& iter%tempo >= 0*/) {
286 e2 = 0;
287 e1[0] = fabs(vitesse);
288 proj_e1 = task.secondaryTask(e1);
289 rapport = vitesse / proj_e1[0];
290 proj_e1 *= rapport;
291 v += proj_e1;
292 if (iter == 199)
293 iter += 200; // This line is needed to make on ly an half turn
294 // during the first cycle
295 }
296
297 if (iter % tempo < 600 && iter % tempo >= 400) {
298 e1 = 0;
299 e2[1] = fabs(vitesse);
300 proj_e2 = task.secondaryTask(e2);
301 rapport = vitesse / proj_e2[1];
302 proj_e2 *= rapport;
303 v += proj_e2;
304 }
305
306 if (iter % tempo < 1000 && iter % tempo >= 600) {
307 e2 = 0;
308 e1[0] = -fabs(vitesse);
309 proj_e1 = task.secondaryTask(e1);
310 rapport = -vitesse / proj_e1[0];
311 proj_e1 *= rapport;
312 v += proj_e1;
313 }
314
315 if (iter % tempo < 1200 && iter % tempo >= 1000) {
316 e1 = 0;
317 e2[1] = -fabs(vitesse);
318 proj_e2 = task.secondaryTask(e2);
319 rapport = -vitesse / proj_e2[1];
320 proj_e2 *= rapport;
321 v += proj_e2;
322 }
323
324 robot.setVelocity(vpRobot::CAMERA_FRAME, v);
325 } catch (...) {
326 v = 0;
327 robot.setVelocity(vpRobot::CAMERA_FRAME, v);
328 robot.stopMotion();
329 exit(1);
330 }
331
332 vpTRACE("\t\t || s - s* || = %f ", (task.getError()).sumSquare());
333 iter++;
334 }
335
336 vpTRACE("Display task information ");
337 task.print();
338 return EXIT_SUCCESS;
339 }
340 catch (const vpException &e) {
341 std::cout << "Test failed with exception: " << e << std::endl;
342 return EXIT_FAILURE;
343 }
344 }
345
346 #else
main()347 int main()
348 {
349 std::cout << "You do not have an afma6 robot connected to your computer..." << std::endl;
350 return EXIT_SUCCESS;
351 }
352
353 #endif
354