1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * IBVS on Pioneer P3DX mobile platform
33  *
34  * Authors:
35  * Fabien Spindler
36  *
37  *****************************************************************************/
38 #include <iostream>
39 
40 #include <visp3/robot/vpRobotPioneer.h> // Include first to avoid build issues with Status, None, isfinite
41 #include <visp3/blob/vpDot2.h>
42 #include <visp3/core/vpCameraParameters.h>
43 #include <visp3/core/vpConfig.h>
44 #include <visp3/core/vpHomogeneousMatrix.h>
45 #include <visp3/core/vpImage.h>
46 #include <visp3/core/vpImageConvert.h>
47 #include <visp3/core/vpVelocityTwistMatrix.h>
48 #include <visp3/gui/vpDisplayGDI.h>
49 #include <visp3/gui/vpDisplayX.h>
50 #include <visp3/sensor/vp1394CMUGrabber.h>
51 #include <visp3/sensor/vp1394TwoGrabber.h>
52 #include <visp3/sensor/vpOpenCVGrabber.h>
53 #include <visp3/sensor/vpV4l2Grabber.h>
54 #include <visp3/visual_features/vpFeatureBuilder.h>
55 #include <visp3/visual_features/vpFeatureDepth.h>
56 #include <visp3/visual_features/vpFeaturePoint.h>
57 
58 #if defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_CMU1394) ||                              \
59     (VISP_HAVE_OPENCV_VERSION >= 0x020100)
60 #if defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)
61 #if defined(VISP_HAVE_PIONEER)
62 #define TEST_COULD_BE_ACHIEVED
63 #endif
64 #endif
65 #endif
66 
67 #undef VISP_HAVE_OPENCV // To use a firewire camera
68 #undef VISP_HAVE_V4L2   // To use a firewire camera
69 
70 /*!
71   \example servoPioneerPoint2DDepthWithoutVpServo.cpp
72 
73   Example that shows how to control the Pioneer mobile robot by IBVS visual
74   servoing with respect to a blob. The current visual features that are used
75   are s = (x, log(Z/Z*)). The desired one are s* = (x*, 0), with:
76   - x the abscisse of the point corresponding to the blob center of gravity
77   measured at each iteration,
78   - x* the desired abscisse position of the point (x* = 0)
79   - Z the depth of the point measured at each iteration
80   - Z* the desired depth of the point equal to the initial one.
81 
82   The degrees of freedom that are controlled are (vx, wz), where wz is the
83   rotational velocity and vx the translational velocity of the mobile platform
84   at point M located at the middle between the two wheels.
85 
86   The feature x allows to control wy, while log(Z/Z*) allows to control vz.
87   The value of x is measured thanks to a blob tracker.
88   The value of Z is estimated from the surface of the blob that is
89   proportional to the depth Z.
90 
91   */
92 #ifdef TEST_COULD_BE_ACHIEVED
main(int argc,char ** argv)93 int main(int argc, char **argv)
94 {
95   try {
96     vpImage<unsigned char> I; // Create a gray level image container
97     double depth = 1.;
98     double lambda = 0.6;
99     double coef = 1. / 6.77; // Scale parameter used to estimate the depth Z
100                              // of the blob from its surface
101 
102     vpRobotPioneer robot;
103     ArArgumentParser parser(&argc, argv);
104     parser.loadDefaultArguments();
105 
106     // ArRobotConnector connects to the robot, get some initial data from it
107     // such as type and name, and then loads parameter files for this robot.
108     ArRobotConnector robotConnector(&parser, &robot);
109     if (!robotConnector.connectRobot()) {
110       ArLog::log(ArLog::Terse, "Could not connect to the robot.");
111       if (parser.checkHelpAndWarnUnparsed()) {
112         Aria::logOptions();
113         Aria::exit(1);
114       }
115     }
116     if (!Aria::parseArgs()) {
117       Aria::logOptions();
118       Aria::shutdown();
119       return false;
120     }
121 
122     // Wait 3 sec to be sure that the low level Aria thread used to control
123     // the robot is started. Without this delay we experienced a delay
124     // (arround 2.2 sec) between the velocity send to the robot and the
125     // velocity that is really applied to the wheels.
126     vpTime::sleepMs(3000);
127 
128     std::cout << "Robot connected" << std::endl;
129 
130     // Camera parameters. In this experiment we don't need a precise
131     // calibration of the camera
132     vpCameraParameters cam;
133 
134 // Create the camera framegrabber
135 #if defined(VISP_HAVE_OPENCV)
136     int device = 1;
137     std::cout << "Use device: " << device << std::endl;
138     cv::VideoCapture g(device); // open the default camera
139     g.set(CV_CAP_PROP_FRAME_WIDTH, 640);
140     g.set(CV_CAP_PROP_FRAME_HEIGHT, 480);
141     if (!g.isOpened()) // check if we succeeded
142       return -1;
143     cv::Mat frame;
144     g >> frame; // get a new frame from camera
145     vpImageConvert::convert(frame, I);
146 
147     // Logitec sphere parameters
148     cam.initPersProjWithoutDistortion(558, 555, 312, 210);
149 #elif defined(VISP_HAVE_V4L2)
150     // Create a grabber based on v4l2 third party lib (for usb cameras under
151     // Linux)
152     vpV4l2Grabber g;
153     g.setScale(1);
154     g.setInput(0);
155     g.setDevice("/dev/video1");
156     g.open(I);
157     // Logitec sphere parameters
158     cam.initPersProjWithoutDistortion(558, 555, 312, 210);
159 #elif defined(VISP_HAVE_DC1394)
160     // Create a grabber based on libdc1394-2.x third party lib (for firewire
161     // cameras under Linux)
162     vp1394TwoGrabber g(false);
163     g.setVideoMode(vp1394TwoGrabber::vpVIDEO_MODE_640x480_MONO8);
164     g.setFramerate(vp1394TwoGrabber::vpFRAMERATE_30);
165     // AVT Pike 032C parameters
166     cam.initPersProjWithoutDistortion(800, 795, 320, 216);
167 #elif defined(VISP_HAVE_CMU1394)
168     // Create a grabber based on CMU 1394 third party lib (for firewire
169     // cameras under windows)
170     vp1394CMUGrabber g;
171     g.setVideoMode(0, 5); // 640x480 MONO8
172     g.setFramerate(4);    // 30 Hz
173     g.open(I);
174     // AVT Pike 032C parameters
175     cam.initPersProjWithoutDistortion(800, 795, 320, 216);
176 #endif
177 
178 // Acquire an image from the grabber
179 #if defined(VISP_HAVE_OPENCV)
180     g >> frame; // get a new frame from camera
181     vpImageConvert::convert(frame, I);
182 #else
183     g.acquire(I);
184 #endif
185 
186 // Create an image viewer
187 #if defined(VISP_HAVE_X11)
188     vpDisplayX d(I, 10, 10, "Current frame");
189 #elif defined(VISP_HAVE_GDI)
190     vpDisplayGDI d(I, 10, 10, "Current frame");
191 #endif
192     vpDisplay::display(I);
193     vpDisplay::flush(I);
194 
195     // Create a blob tracker
196     vpDot2 dot;
197     dot.setGraphics(true);
198     dot.setComputeMoments(true);
199     dot.setEllipsoidShapePrecision(0.);       // to track a blob without any constraint on the shape
200     dot.setGrayLevelPrecision(0.9);           // to set the blob gray level bounds for binarisation
201     dot.setEllipsoidBadPointsPercentage(0.5); // to be accept 50% of bad inner
202                                               // and outside points with bad
203                                               // gray level
204     dot.initTracking(I);
205     vpDisplay::flush(I);
206 
207     // Current and desired visual feature associated to the x coordinate of
208     // the point
209     vpFeaturePoint s_x, s_xd;
210 
211     // Create the current x visual feature
212     vpFeatureBuilder::create(s_x, cam, dot);
213 
214     // Create the desired x* visual feature
215     s_xd.buildFrom(0, 0, depth);
216     vpMatrix L_x = s_xd.interaction(vpFeaturePoint::selectX());
217 
218     // Create the current log(Z/Z*) visual feature
219     vpFeatureDepth s_Z;
220     // Surface of the blob estimated from the image moment m00 and converted
221     // in meters
222     double surface = 1. / sqrt(dot.m00 / (cam.get_px() * cam.get_py()));
223     double Z, Zd;
224     // Initial depth of the blob in from of the camera
225     Z = coef * surface;
226     // Desired depth Z* of the blob. This depth is learned and equal to the
227     // initial depth
228     Zd = Z;
229     s_Z.buildFrom(s_x.get_x(), s_x.get_y(), Z,
230                   0); // log(Z/Z*) = 0 that's why the last parameter is 0
231     vpMatrix L_Z = s_Z.interaction();
232 
233     vpVelocityTwistMatrix cVe = robot.get_cVe();
234     vpMatrix eJe; // pioneer jacobian
235     robot.get_eJe(eJe);
236 
237     vpMatrix L;   // Interaction matrix
238     L.stack(L_x); // constant since build with the desired feature
239     L.stack(L_Z); // not constant since it corresponds to log(Z/Z*) that
240                   // evolves at each iteration
241 
242     vpColVector v; // vz, wx
243 
244     vpFeatureDepth s_Zd;
245     s_Zd.buildFrom(0, 0, 1, 0); // The value of s* is 0 with Z=1 meter.
246 
247     while (1) {
248 // Acquire a new image
249 #if defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100)
250       g >> frame; // get a new frame from camera
251       vpImageConvert::convert(frame, I);
252 #else
253       g.acquire(I);
254 #endif
255       // Set the image as background of the viewer
256       vpDisplay::display(I);
257 
258       // Does the blob tracking
259       dot.track(I);
260       // Update the current x feature
261       vpFeatureBuilder::create(s_x, cam, dot);
262 
263       // Update log(Z/Z*) feature. Since the depth Z change, we need to update
264       // the intection matrix
265       surface = 1. / sqrt(dot.m00 / (cam.get_px() * cam.get_py()));
266       Z = coef * surface;
267       s_Z.buildFrom(s_x.get_x(), s_x.get_y(), Z, log(Z / Zd));
268       L_Z = s_Z.interaction();
269 
270       // Update the global interaction matrix
271       vpMatrix L;
272       L.stack(L_x); // constant since build with the desired feature
273       L.stack(L_Z); // not constant since it corresponds to log(Z/Z*) that
274                     // evolves at each iteration
275 
276       // Update the global error s-s*
277       vpColVector error;
278       error.stack(s_x.error(s_xd, vpFeaturePoint::selectX()));
279       error.stack(s_Z.error(s_Zd));
280 
281       // Compute the control law. Velocities are computed in the mobile robot
282       // reference frame
283       v = -lambda * (L * cVe * eJe).pseudoInverse() * error;
284 
285       std::cout << "Send velocity to the pionner: " << v[0] << " m/s " << vpMath::deg(v[1]) << " deg/s" << std::endl;
286 
287       // Send the velocity to the robot
288       robot.setVelocity(vpRobot::REFERENCE_FRAME, v);
289 
290       // Draw a vertical line which corresponds to the desired x coordinate of
291       // the dot cog
292       vpDisplay::displayLine(I, 0, 320, 479, 320, vpColor::red);
293       vpDisplay::flush(I);
294 
295       // A click in the viewer to exit
296       if (vpDisplay::getClick(I, false))
297         break;
298     }
299 
300     std::cout << "Ending robot thread..." << std::endl;
301     robot.stopRunning();
302 
303     // wait for the thread to stop
304     robot.waitForRunExit();
305     return EXIT_SUCCESS;
306   } catch (const vpException &e) {
307     std::cout << "Catch an exception: " << e << std::endl;
308     return EXIT_FAILURE;
309   }
310 }
311 #else
main()312 int main()
313 {
314   std::cout << "You don't have the right 3rd party libraries to run this example..." << std::endl;
315   return EXIT_SUCCESS;
316 }
317 #endif
318