1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  *   tests the control law
33  *   eye-in-hand control
34  *   velocity computed in the camera frame
35  *
36  * Authors:
37  * Fabien Spindler
38  *
39  *****************************************************************************/
40 /*!
41   \example servoViper850FourPoints2DCamVelocityLs_cur.cpp
42 
43   \brief Example of eye-in-hand control law. We control here a real robot, the
44   Viper S850 robot (arm with 6 degrees of freedom). The velocity is
45   computed in camera frame. The inverse jacobian that converts cartesian
46   velocities in joint velocities is implemented in the robot low level
47   controller. Visual features are the image coordinates of 4 points. The
48   target is made of 4 dots arranged as a 10cm by 10cm square.
49 
50 */
51 
52 #include <visp3/core/vpConfig.h>
53 #include <visp3/core/vpDebug.h> // Debug trace
54 
55 #include <fstream>
56 #include <iostream>
57 #include <sstream>
58 #include <stdio.h>
59 #include <stdlib.h>
60 #if (defined(VISP_HAVE_VIPER850) && defined(VISP_HAVE_DC1394))
61 
62 #include <visp3/blob/vpDot2.h>
63 #include <visp3/core/vpDisplay.h>
64 #include <visp3/core/vpHomogeneousMatrix.h>
65 #include <visp3/core/vpImage.h>
66 #include <visp3/core/vpIoTools.h>
67 #include <visp3/core/vpMath.h>
68 #include <visp3/core/vpPoint.h>
69 #include <visp3/gui/vpDisplayGTK.h>
70 #include <visp3/gui/vpDisplayOpenCV.h>
71 #include <visp3/gui/vpDisplayX.h>
72 #include <visp3/robot/vpRobotViper850.h>
73 #include <visp3/sensor/vp1394TwoGrabber.h>
74 #include <visp3/vision/vpPose.h>
75 #include <visp3/visual_features/vpFeatureBuilder.h>
76 #include <visp3/visual_features/vpFeaturePoint.h>
77 #include <visp3/vs/vpServo.h>
78 #include <visp3/vs/vpServoDisplay.h>
79 
80 #define L 0.05 // to deal with a 10cm by 10cm square
81 
82 /*!
83 
84   Compute the pose \e cMo from the 3D coordinates of the points \e point and
85   their corresponding 2D coordinates \e dot. The pose is computed using a Lowe
86   non linear method.
87 
88   \param point : 3D coordinates of the points.
89 
90   \param dot : 2D coordinates of the points.
91 
92   \param ndot : Number of points or dots used for the pose estimation.
93 
94   \param cam : Intrinsic camera parameters.
95 
96   \param cMo : Homogeneous matrix in output describing the transformation
97   between the camera and object frame.
98 
99   \param cto : Translation in ouput extracted from \e cMo.
100 
101   \param cro : Rotation in ouput extracted from \e cMo.
102 
103   \param init : Indicates if the we have to estimate an initial pose with
104   Lagrange or Dementhon methods.
105 
106 */
compute_pose(vpPoint point[],vpDot2 dot[],int ndot,vpCameraParameters cam,vpHomogeneousMatrix & cMo,vpTranslationVector & cto,vpRxyzVector & cro,bool init)107 void compute_pose(vpPoint point[], vpDot2 dot[], int ndot, vpCameraParameters cam, vpHomogeneousMatrix &cMo,
108                   vpTranslationVector &cto, vpRxyzVector &cro, bool init)
109 {
110   vpHomogeneousMatrix cMo_dementhon; // computed pose with dementhon
111   vpHomogeneousMatrix cMo_lagrange;  // computed pose with dementhon
112   vpRotationMatrix cRo;
113   vpPose pose;
114   vpImagePoint cog;
115   for (int i = 0; i < ndot; i++) {
116 
117     double x = 0, y = 0;
118     cog = dot[i].getCog();
119     vpPixelMeterConversion::convertPoint(cam, cog, x,
120                                          y); // pixel to meter conversion
121     point[i].set_x(x);                       // projection perspective          p
122     point[i].set_y(y);
123     pose.addPoint(point[i]);
124   }
125 
126   if (init == true) {
127     pose.computePose(vpPose::DEMENTHON, cMo_dementhon);
128     // Compute and return the residual expressed in meter for the pose matrix
129     // 'cMo'
130     double residual_dementhon = pose.computeResidual(cMo_dementhon);
131     pose.computePose(vpPose::LAGRANGE, cMo_lagrange);
132     double residual_lagrange = pose.computeResidual(cMo_lagrange);
133 
134     // Select the best pose to initialize the lowe pose computation
135     if (residual_lagrange < residual_dementhon)
136       cMo = cMo_lagrange;
137     else
138       cMo = cMo_dementhon;
139 
140   } else { // init = false; use of the previous pose to initialise LOWE
141     cRo.buildFrom(cro);
142     cMo.buildFrom(cto, cRo);
143   }
144   pose.computePose(vpPose::LOWE, cMo);
145   cMo.extract(cto);
146   cMo.extract(cRo);
147   cro.buildFrom(cRo);
148 }
149 
main()150 int main()
151 {
152   // Log file creation in /tmp/$USERNAME/log.dat
153   // This file contains by line:
154   // - the 6 computed joint velocities (m/s, rad/s) to achieve the task
155   // - the 6 mesured joint velocities (m/s, rad/s)
156   // - the 6 mesured joint positions (m, rad)
157   // - the 8 values of s - s*
158   std::string username;
159   // Get the user login name
160   vpIoTools::getUserName(username);
161 
162   // Create a log filename to save velocities...
163   std::string logdirname;
164   logdirname = "/tmp/" + username;
165 
166   // Test if the output path exist. If no try to create it
167   if (vpIoTools::checkDirectory(logdirname) == false) {
168     try {
169       // Create the dirname
170       vpIoTools::makeDirectory(logdirname);
171     } catch (...) {
172       std::cerr << std::endl << "ERROR:" << std::endl;
173       std::cerr << "  Cannot create " << logdirname << std::endl;
174       return (-1);
175     }
176   }
177   std::string logfilename;
178   logfilename = logdirname + "/log.dat";
179 
180   // Open the log file name
181   std::ofstream flog(logfilename.c_str());
182 
183   try {
184     vpRobotViper850 robot;
185     // Load the end-effector to camera frame transformation obtained
186     // using a camera intrinsic model with distortion
187     vpCameraParameters::vpCameraParametersProjType projModel = vpCameraParameters::perspectiveProjWithDistortion;
188     robot.init(vpRobotViper850::TOOL_PTGREY_FLEA2_CAMERA, projModel);
189 
190     vpServo task;
191 
192     vpImage<unsigned char> I;
193     int i;
194 
195     bool reset = false;
196     vp1394TwoGrabber g(reset);
197     g.setVideoMode(vp1394TwoGrabber::vpVIDEO_MODE_640x480_MONO8);
198     g.setFramerate(vp1394TwoGrabber::vpFRAMERATE_60);
199     g.open(I);
200 
201     g.acquire(I);
202 
203 #ifdef VISP_HAVE_X11
204     vpDisplayX display(I, 100, 100, "Current image");
205 #elif defined(VISP_HAVE_OPENCV)
206     vpDisplayOpenCV display(I, 100, 100, "Current image");
207 #elif defined(VISP_HAVE_GTK)
208     vpDisplayGTK display(I, 100, 100, "Current image");
209 #endif
210 
211     vpDisplay::display(I);
212     vpDisplay::flush(I);
213 
214     std::cout << std::endl;
215     std::cout << "-------------------------------------------------------" << std::endl;
216     std::cout << " Test program for vpServo " << std::endl;
217     std::cout << " Eye-in-hand task control, velocity computed in the camera space" << std::endl;
218     std::cout << " Use of the Viper850 robot " << std::endl;
219     std::cout << " task : servo 4 points on a square with dimention " << L << " meters" << std::endl;
220     std::cout << "-------------------------------------------------------" << std::endl;
221     std::cout << std::endl;
222 
223     vpDot2 dot[4];
224     vpImagePoint cog;
225 
226     std::cout << "Click on the 4 dots clockwise starting from upper/left dot..." << std::endl;
227 
228     for (i = 0; i < 4; i++) {
229       dot[i].setGraphics(true);
230       dot[i].initTracking(I);
231       cog = dot[i].getCog();
232       vpDisplay::displayCross(I, cog, 10, vpColor::blue);
233       vpDisplay::flush(I);
234     }
235 
236     vpCameraParameters cam;
237 
238     // Update camera parameters
239     robot.getCameraParameters(cam, I);
240 
241     cam.printParameters();
242 
243     // Sets the current position of the visual feature
244     vpFeaturePoint p[4];
245     for (i = 0; i < 4; i++)
246       vpFeatureBuilder::create(p[i], cam, dot[i]); // retrieve x,y  of the vpFeaturePoint structure
247 
248     // Set the position of the square target in a frame which origin is
249     // centered in the middle of the square
250     vpPoint point[4];
251     point[0].setWorldCoordinates(-L, -L, 0);
252     point[1].setWorldCoordinates(L, -L, 0);
253     point[2].setWorldCoordinates(L, L, 0);
254     point[3].setWorldCoordinates(-L, L, 0);
255 
256     // Initialise a desired pose to compute s*, the desired 2D point features
257     vpHomogeneousMatrix cMo;
258     vpTranslationVector cto(0, 0, 0.5); // tz = 0.5 meter
259     vpRxyzVector cro(vpMath::rad(10), vpMath::rad(30), vpMath::rad(20));
260     vpRotationMatrix cRo(cro); // Build the rotation matrix
261     cMo.buildFrom(cto, cRo);   // Build the homogeneous matrix
262 
263     // Sets the desired position of the 2D visual feature
264     vpFeaturePoint pd[4];
265     // Compute the desired position of the features from the desired pose
266     for (int i = 0; i < 4; i++) {
267       vpColVector cP, p;
268       point[i].changeFrame(cMo, cP);
269       point[i].projection(cP, p);
270 
271       pd[i].set_x(p[0]);
272       pd[i].set_y(p[1]);
273       pd[i].set_Z(cP[2]);
274     }
275 
276     // We want to see a point on a point
277     for (i = 0; i < 4; i++)
278       task.addFeature(p[i], pd[i]);
279 
280     // Set the proportional gain
281     task.setLambda(0.3);
282 
283     // Display task information
284     task.print();
285 
286     // Define the task
287     // - we want an eye-in-hand control law
288     // - articular velocity are computed
289     task.setServo(vpServo::EYEINHAND_CAMERA);
290     task.setInteractionMatrixType(vpServo::CURRENT, vpServo::PSEUDO_INVERSE);
291     task.print();
292 
293     // Initialise the velocity control of the robot
294     robot.setRobotState(vpRobot::STATE_VELOCITY_CONTROL);
295 
296     std::cout << "\nHit CTRL-C to stop the loop...\n" << std::flush;
297     for (;;) {
298       // Acquire a new image from the camera
299       g.acquire(I);
300 
301       // Display this image
302       vpDisplay::display(I);
303 
304       try {
305         // For each point...
306         for (i = 0; i < 4; i++) {
307           // Achieve the tracking of the dot in the image
308           dot[i].track(I);
309           // Display a green cross at the center of gravity position in the
310           // image
311           cog = dot[i].getCog();
312           vpDisplay::displayCross(I, cog, 10, vpColor::green);
313         }
314       } catch (...) {
315         flog.close(); // Close the log file
316         vpTRACE("Error detected while tracking visual features");
317         robot.stopMotion();
318         return (1);
319       }
320 
321       // During the servo, we compute the pose using LOWE method. For the
322       // initial pose used in the non linear minimisation we use the pose
323       // computed at the previous iteration.
324       compute_pose(point, dot, 4, cam, cMo, cto, cro, false);
325 
326       for (i = 0; i < 4; i++) {
327         // Update the point feature from the dot location
328         vpFeatureBuilder::create(p[i], cam, dot[i]);
329         // Set the feature Z coordinate from the pose
330         vpColVector cP;
331         point[i].changeFrame(cMo, cP);
332 
333         p[i].set_Z(cP[2]);
334       }
335 
336       vpColVector v;
337       // Compute the visual servoing skew vector
338       v = task.computeControlLaw();
339 
340       // Display the current and desired feature points in the image display
341       vpServoDisplay::display(task, cam, I);
342 
343       // Apply the computed joint velocities to the robot
344       robot.setVelocity(vpRobot::CAMERA_FRAME, v);
345 
346       // Save velocities applied to the robot in the log file
347       // v[0], v[1], v[2] correspond to joint translation velocities in m/s
348       // v[3], v[4], v[5] correspond to joint rotation velocities in rad/s
349       flog << v[0] << " " << v[1] << " " << v[2] << " " << v[3] << " " << v[4] << " " << v[5] << " ";
350 
351       // Get the measured joint velocities of the robot
352       vpColVector qvel;
353       robot.getVelocity(vpRobot::ARTICULAR_FRAME, qvel);
354       // Save measured joint velocities of the robot in the log file:
355       // - qvel[0], qvel[1], qvel[2] correspond to measured joint translation
356       //   velocities in m/s
357       // - qvel[3], qvel[4], qvel[5] correspond to measured joint rotation
358       //   velocities in rad/s
359       flog << qvel[0] << " " << qvel[1] << " " << qvel[2] << " " << qvel[3] << " " << qvel[4] << " " << qvel[5] << " ";
360 
361       // Get the measured joint positions of the robot
362       vpColVector q;
363       robot.getPosition(vpRobot::ARTICULAR_FRAME, q);
364       // Save measured joint positions of the robot in the log file
365       // - q[0], q[1], q[2] correspond to measured joint translation
366       //   positions in m
367       // - q[3], q[4], q[5] correspond to measured joint rotation
368       //   positions in rad
369       flog << q[0] << " " << q[1] << " " << q[2] << " " << q[3] << " " << q[4] << " " << q[5] << " ";
370 
371       // Save feature error (s-s*) for the 4 feature points. For each feature
372       // point, we have 2 errors (along x and y axis).  This error is
373       // expressed in meters in the camera frame
374       flog << task.getError() << std::endl;
375 
376       // Flush the display
377       vpDisplay::flush(I);
378 
379       // std::cout << "|| s - s* || = "  << ( task.getError() ).sumSquare() <<
380       // std::endl;
381     }
382 
383     std::cout << "Display task information: " << std::endl;
384     task.print();
385     flog.close(); // Close the log file
386     return EXIT_SUCCESS;
387   }
388   catch (const vpException &e) {
389     flog.close(); // Close the log file
390     std::cout << "Catch an exception: " << e.getMessage() << std::endl;
391     return EXIT_FAILURE;
392   }
393 }
394 
395 #else
main()396 int main()
397 {
398   std::cout << "You do not have an Viper 850 robot connected to your computer..." << std::endl;
399   return EXIT_SUCCESS;
400 }
401 #endif
402