1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  *   tests the control law
33  *   eye-in-hand control
34  *   velocity computed in the camera frame
35  *
36  * Authors:
37  * Eric Marchand
38  * Fabien Spindler
39  *
40  *****************************************************************************/
41 
42 /*!
43   \example servoAfma4Point2DCamVelocityKalman.cpp
44 
45   \brief Example of eye-in-hand control law. We control here a real
46   robot, the Afma4 robot (cylindrical robot, with 4 degrees of
47   freedom). The velocity is computed in the camera frame. The visual
48   feature is the center of gravity of a point.
49 
50   In this example we estimate the velocity of the target in order to reduce
51   the tracking error when the target is moving. The velocity of the target is
52   filtered by a Kalman filter with a constant velocity state model, or a
53   constant acceleration state model.
54 */
55 
56 #include <stdlib.h>
57 #include <visp3/core/vpConfig.h>
58 #include <visp3/core/vpDebug.h> // Debug trace
59 #if (defined(VISP_HAVE_AFMA4) && defined(VISP_HAVE_DC1394))
60 
61 #include <visp3/core/vpDisplay.h>
62 #include <visp3/core/vpImage.h>
63 #include <visp3/gui/vpDisplayGTK.h>
64 #include <visp3/gui/vpDisplayOpenCV.h>
65 #include <visp3/gui/vpDisplayX.h>
66 #include <visp3/sensor/vp1394TwoGrabber.h>
67 
68 #include <visp3/blob/vpDot2.h>
69 #include <visp3/core/vpException.h>
70 #include <visp3/core/vpHomogeneousMatrix.h>
71 #include <visp3/core/vpIoTools.h>
72 #include <visp3/core/vpLinearKalmanFilterInstantiation.h>
73 #include <visp3/core/vpMath.h>
74 #include <visp3/core/vpPoint.h>
75 #include <visp3/io/vpParseArgv.h>
76 #include <visp3/robot/vpRobotAfma4.h>
77 #include <visp3/visual_features/vpFeatureBuilder.h>
78 #include <visp3/visual_features/vpFeaturePoint.h>
79 #include <visp3/vs/vpAdaptiveGain.h>
80 #include <visp3/vs/vpServo.h>
81 #include <visp3/vs/vpServoDisplay.h>
82 
83 // List of allowed command line options
84 #define GETOPTARGS "hK:l:"
85 
86 typedef enum { K_NONE, K_VELOCITY, K_ACCELERATION } KalmanType;
87 
88 /*!
89 
90   Print the program options.
91 
92   \param name : Program name.
93   \param badparam : Bad parameter name.
94   \param kalman : Kalman state model selector.
95 
96 */
usage(const char * name,const char * badparam,KalmanType & kalman)97 void usage(const char *name, const char *badparam, KalmanType &kalman)
98 {
99   fprintf(stdout, "\n\
100 Tests a control law with the following characteristics:\n\
101 - eye-in-hand control\n\
102 - camera velocity are computed\n\
103 - servo on 1 points.\n\
104 - Kalman filtering\n\
105           \n\
106 SYNOPSIS\n\
107   %s [-K <0|1|2|3>] [-h]\n", name);
108 
109   fprintf(stdout, "\n\
110 OPTIONS:                                               Default\n\
111   -l <%%f>                                               \n\
112      Set the constant gain. By default adaptive gain. \n\
113                   \n\
114   -K <0|1|2>                                             %d\n\
115      Kalman filtering:\n\
116        0: none\n\
117        1: velocity model\n\
118        2: acceleration model\n\
119                   \n\
120   -h\n\
121      Print the help.\n", (int)kalman);
122 
123   if (badparam) {
124     fprintf(stderr, "ERROR: \n");
125     fprintf(stderr, "\nBad parameter [%s]\n", badparam);
126   }
127 }
128 
129 /*!
130 
131 Set the program options.
132 
133   \param argc : Command line number of parameters.
134   \param argv : Array of command line parameters.
135   \param kalman : Kalman state model selector.
136   \param doAdaptativeGain : If true use an adaptive gain. Otherwise,
137   the gain is constant.
138   \param lambda : Constant visual servo gain.
139 
140   \return false if the program has to be stopped, true otherwise.
141 
142 */
getOptions(int argc,const char ** argv,KalmanType & kalman,bool & doAdaptativeGain,vpAdaptiveGain & lambda)143 bool getOptions(int argc, const char **argv, KalmanType &kalman, bool &doAdaptativeGain,
144                 vpAdaptiveGain &lambda) // gain lambda
145 {
146   const char *optarg;
147   int c;
148   while ((c = vpParseArgv::parse(argc, argv, GETOPTARGS, &optarg)) > 1) {
149 
150     switch (c) {
151     case 'K':
152       kalman = (KalmanType)atoi(optarg);
153       break;
154     case 'l':
155       doAdaptativeGain = false;
156       lambda.initFromConstant(atof(optarg));
157       break;
158     case 'h':
159       usage(argv[0], NULL, kalman);
160       return false;
161       break;
162 
163     default:
164       usage(argv[0], optarg, kalman);
165       return false;
166       break;
167     }
168   }
169 
170   if ((c == 1) || (c == -1)) {
171     // standalone param or error
172     usage(argv[0], NULL, kalman);
173     std::cerr << "ERROR: " << std::endl;
174     std::cerr << "  Bad argument " << optarg << std::endl << std::endl;
175     return false;
176   }
177 
178   return true;
179 }
180 
main(int argc,const char ** argv)181 int main(int argc, const char **argv)
182 {
183   try {
184     KalmanType opt_kalman = K_NONE;
185     vpAdaptiveGain lambda;        // Gain de la commande
186     bool doAdaptativeGain = true; // Compute adaptative gain
187     lambda.initStandard(4, 0.2, 40);
188     int opt_cam_frequency = 60; // 60 Hz
189 
190     // Read the command line options
191     if (getOptions(argc, argv, opt_kalman, doAdaptativeGain, lambda) == false) {
192       return (-1);
193     }
194 
195     // Log file creation in /tmp/$USERNAME/log.dat
196     // This file contains by line:
197     // - the 6 computed cam velocities (m/s, rad/s) to achieve the task
198     // - the 6 mesured joint velocities (m/s, rad/s)
199     // - the 6 mesured joint positions (m, rad)
200     // - the 2 values of s - s*
201     std::string username;
202     // Get the user login name
203     vpIoTools::getUserName(username);
204 
205     // Create a log filename to save velocities...
206     std::string logdirname;
207     logdirname = "/tmp/" + username;
208 
209     // Test if the output path exist. If no try to create it
210     if (vpIoTools::checkDirectory(logdirname) == false) {
211       try {
212         // Create the dirname
213         vpIoTools::makeDirectory(logdirname);
214       } catch (...) {
215         std::cerr << std::endl << "ERROR:" << std::endl;
216         std::cerr << "  Cannot create " << logdirname << std::endl;
217         exit(-1);
218       }
219     }
220     std::string logfilename;
221     logfilename = logdirname + "/log.dat";
222 
223     // Open the log file name
224     std::ofstream flog(logfilename.c_str());
225 
226     vpServo task;
227 
228     vpImage<unsigned char> I;
229     vp1394TwoGrabber g(false);
230     g.setVideoMode(vp1394TwoGrabber::vpVIDEO_MODE_640x480_MONO8);
231     switch (opt_cam_frequency) {
232     case 15:
233       g.setFramerate(vp1394TwoGrabber::vpFRAMERATE_15);
234       break;
235     case 30:
236       g.setFramerate(vp1394TwoGrabber::vpFRAMERATE_30);
237       break;
238     case 60:
239       g.setFramerate(vp1394TwoGrabber::vpFRAMERATE_60);
240       break;
241     }
242     g.open(I);
243 
244     for (int i = 0; i < 10; i++) //  10 acquisition to warm up the camera
245       g.acquire(I);
246 
247 #ifdef VISP_HAVE_X11
248     vpDisplayX display(I, 100, 100, "Current image");
249 #elif defined(VISP_HAVE_OPENCV)
250     vpDisplayOpenCV display(I, 100, 100, "Current image");
251 #elif defined(VISP_HAVE_GTK)
252     vpDisplayGTK display(I, 100, 100, "Current image");
253 #endif
254 
255     vpDisplay::display(I);
256     vpDisplay::flush(I);
257 
258     std::cout << std::endl;
259     std::cout << "-------------------------------------------------------" << std::endl;
260     std::cout << "Test program for target motion compensation using a Kalman "
261                  "filter "
262               << std::endl;
263     std::cout << "Eye-in-hand task control, velocity computed in the camera frame" << std::endl;
264     std::cout << "Task : servo a point \n" << std::endl;
265 
266     // Kalman filtering
267     switch (opt_kalman) {
268     case K_NONE:
269       std::cout << "Servo with no target motion compensation (see -K option)\n";
270       break;
271     case K_VELOCITY:
272       std::cout << "Servo with target motion compensation using a Kalman filter\n"
273                 << "with constant velocity modelization (see -K option)\n";
274       break;
275     case K_ACCELERATION:
276       std::cout << "Servo with target motion compensation using a Kalman filter\n"
277                 << "with constant acceleration modelization (see -K option)\n";
278       break;
279     }
280     std::cout << "-------------------------------------------------------" << std::endl;
281     std::cout << std::endl;
282 
283     vpDot2 dot;
284 
285     std::cout << "Click on the dot..." << std::endl;
286     dot.setGraphics(true);
287     dot.initTracking(I);
288     vpImagePoint cog;
289     cog = dot.getCog();
290     vpDisplay::displayCross(I, cog, 10, vpColor::blue);
291     vpDisplay::flush(I);
292 
293     vpRobotAfma4 robot;
294 
295     double px = 1000;
296     double py = 1000;
297     double u0 = I.getWidth() / 2.;
298     double v0 = I.getHeight() / 2.;
299 
300     vpCameraParameters cam(px, py, u0, v0);
301 
302     // Sets the current position of the visual feature
303     vpFeaturePoint p;
304     vpFeatureBuilder::create(p, cam, dot);
305 
306     // Sets the desired position of the visual feature
307     vpFeaturePoint pd;
308     pd.buildFrom(0, 0, 1);
309 
310     // Define the task
311     // - we want an eye-in-hand control law
312     // - robot is controlled in the camera frame
313     task.setServo(vpServo::EYEINHAND_CAMERA);
314 
315     // - we want to see a point on a point
316     std::cout << std::endl;
317     task.addFeature(p, pd);
318 
319     // - set the gain
320     task.setLambda(lambda);
321 
322     // Display task information
323     // task.print() ;
324 
325     //--------------------------------------------------------------------------
326     //!---------------------------Init Kalman Filter--------------------------
327     //--------------------------------------------------------------------------
328 
329     //! Initialize filter
330     vpLinearKalmanFilterInstantiation kalman;
331 
332     // Initialize the kalman filter
333     unsigned int nsignal = 2; // The two values of dedt
334     double rho = 0.3;
335     vpColVector sigma_state;
336     vpColVector sigma_measure(nsignal);
337     unsigned int state_size = 0; // Kalman state vector size
338 
339     switch (opt_kalman) {
340     case K_VELOCITY: {
341       // Set the constant velocity state model used for the filtering
342       kalman.setStateModel(vpLinearKalmanFilterInstantiation::stateConstVelWithColoredNoise_MeasureVel);
343       state_size = kalman.getStateSize();
344       sigma_state.resize(state_size * nsignal);
345       sigma_state = 0.00001; // Same state variance for all signals
346       sigma_measure = 0.05;  // Same measure variance for all the signals
347       double dummy = 0;      // non used parameter dt for the velocity state model
348       kalman.initFilter(nsignal, sigma_state, sigma_measure, rho, dummy);
349 
350       break;
351     }
352     case K_ACCELERATION: {
353       // Set the constant acceleration state model used for the filtering
354       kalman.setStateModel(vpLinearKalmanFilterInstantiation::stateConstAccWithColoredNoise_MeasureVel);
355       state_size = kalman.getStateSize();
356       sigma_state.resize(state_size * nsignal);
357       sigma_state = 0.00001; // Same variance for all the signals
358       sigma_measure = 0.05;  // Same measure variance for all the signals
359       double dt = 1. / opt_cam_frequency;
360       kalman.initFilter(nsignal, sigma_state, sigma_measure, rho, dt);
361       break;
362     }
363     default:
364       break;
365     }
366 
367     robot.setRobotState(vpRobot::STATE_VELOCITY_CONTROL);
368 
369     int iter = 0;
370 
371     double t_1, Tv_0;
372     vpColVector vm(6), vm_0(6);
373     vpColVector v(6), v1(6), v2(6); // robot velocities
374     // task error
375     vpColVector err(2), err_0(2), err_1(2);
376     vpColVector dedt_filt(2), dedt_mes(2);
377 
378     t_1 = vpTime::measureTimeMs(); // t_1: time at previous iter
379 
380     Tv_0 = 0;
381 
382     //
383     // Warning: In all varaible names,
384     //   _0 means the value for the current iteration (t=0)
385     //   _1 means the value for the previous iteration (t=-1)
386     //   _2 means the value for the previous previous iteration (t=-2)
387     //
388     std::cout << "\nHit CTRL-C to stop the loop...\n" << std::flush;
389     for (;;) {
390       double t_0 = vpTime::measureTimeMs(); // t_0: current time
391       // Temps de la boucle d'asservissement
392       double Tv = (double)(t_0 - t_1) / 1000.0; // temps d'une iteration en s
393                                                 // !
394       //     std::cout << "time iter : " << Tv << std::endl;
395 
396       // Update time for next iteration
397       t_1 = t_0;
398 
399       vm = robot.getVelocity(vpRobot::CAMERA_FRAME);
400 
401       // Acquire a new image from the camera
402       g.acquire(I);
403 
404       // Display this image
405       vpDisplay::display(I);
406 
407       // Achieve the tracking of the dot in the image
408       dot.track(I);
409       vpImagePoint cog = dot.getCog();
410 
411       // Display a green cross at the center of gravity position in the image
412       vpDisplay::displayCross(I, cog, 10, vpColor::green);
413 
414       // Update the point feature from the dot location
415       vpFeatureBuilder::create(p, cam, dot);
416 
417       //----------------------------------------------------------------------
418       //!-------------------- Update displacements and time ------------------
419       //----------------------------------------------------------------------
420       vm_0 = vm;
421 
422       // Update current loop time and previous one
423       double Tv_1 = Tv_0;
424       Tv_0 = Tv;
425 
426       // Compute the visual servoing skew vector
427       v1 = task.computeControlLaw();
428 
429       err = task.error;
430 
431       //! terme correctif : de/dt = Delta s / Delta t - L*vc
432       if (iter == 0) {
433         err_0 = 0;
434         err_1 = 0;
435         dedt_mes = 0;
436         dedt_filt = 0;
437       } else {
438         err_1 = err_0;
439         err_0 = err;
440 
441         dedt_mes = (err_0 - err_1) / (Tv_1)-task.J1 * vm_0;
442       }
443       //! pour corriger pb de iter = 1
444       if (iter <= 1) {
445         dedt_mes = 0;
446       }
447 
448       //----------------------------------------------------------------------
449       //----------------------- Kalman Filter Equations ----------------------
450       //----------------------------------------------------------------------
451       // Kalman filtering
452       switch (opt_kalman) {
453       case K_NONE:
454         dedt_filt = 0;
455         break;
456       case K_VELOCITY:
457       case K_ACCELERATION:
458         kalman.filter(dedt_mes);
459         for (unsigned int i = 0; i < nsignal; i++) {
460           dedt_filt[i] = kalman.Xest[i * state_size];
461         }
462         break;
463       }
464 
465       //! Modified control law
466       vpMatrix J1p = task.getTaskJacobianPseudoInverse();
467       v2 = -J1p * dedt_filt;
468       //   std::cout << "task J1p: " <<  J1p.t() << std::endl  ;
469       //   std::cout << "dedt_filt: " <<  dedt_filt.t() << std::endl  ;
470 
471       v = v1 + v2;
472 
473       // Display the current and desired feature points in the image display
474       vpServoDisplay::display(task, cam, I);
475 
476       //    std::cout << "v2 : " << v2.t() << std::endl  ;
477       //   std::cout << "v1 : " << v1.t() << std::endl  ;
478 
479       //   std::cout << "v : " << v.t();
480 
481       // Apply the camera velocities to the robot
482       robot.setVelocity(vpRobot::CAMERA_FRAME, v);
483 
484       // Save loop time
485       flog << Tv_0 << " ";
486 
487       // Save velocities applied to the robot in the log file
488       // v[0], v[1], v[2] correspond to camera translation velocities in m/s
489       // v[3], v[4], v[5] correspond to camera rotation velocities in rad/s
490       flog << v[0] << " " << v[1] << " " << v[2] << " " << v[3] << " " << v[4] << " " << v[5] << " ";
491 
492       // Save feature error (s-s*) for the feature point. For this feature
493       // point, we have 2 errors (along x and y axis).  This error is
494       // expressed in meters in the camera frame
495       flog << task.error[0] << " " << task.error[1] << " ";
496 
497       // Save feature error (s-s*) in pixels in the image.
498       flog << cog.get_u() - cam.get_u0() << " " << cog.get_v() - cam.get_v0() << " ";
499 
500       // Save de/dt
501       flog << dedt_mes[0] << " " << dedt_mes[1] << " ";
502 
503       // Save de/dt filtered
504       flog << dedt_filt[0] << " " << dedt_filt[1] << " ";
505 
506       flog << std::endl;
507 
508       // Flush the display
509       vpDisplay::flush(I);
510 
511       iter++;
512     }
513 
514     flog.close(); // Close the log file
515 
516     // Display task information
517     task.print();
518 
519     return EXIT_SUCCESS;
520   } catch (const vpException &e) {
521     std::cout << "Catch a ViSP exception: " << e << std::endl;
522     return EXIT_FAILURE;
523   }
524 }
525 
526 #else
main()527 int main()
528 {
529   std::cout << "You do not have an afma4 robot connected to your computer..." << std::endl;
530   return EXIT_SUCCESS;
531 }
532 #endif
533