1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Make the complete tracking of an object by using its CAD model
33  *
34  * Authors:
35  * Nicolas Melchior
36  * Romain Tallonneau
37  * Eric Marchand
38  *
39  *****************************************************************************/
40 
41 /*!
42  \file vpMbEdgeTracker.h
43  \brief Make the complete tracking of an object by using its CAD model.
44 */
45 
46 #ifndef vpMbEdgeTracker_HH
47 #define vpMbEdgeTracker_HH
48 
49 #include <visp3/core/vpPoint.h>
50 #include <visp3/mbt/vpMbTracker.h>
51 #include <visp3/mbt/vpMbtDistanceCircle.h>
52 #include <visp3/mbt/vpMbtDistanceCylinder.h>
53 #include <visp3/mbt/vpMbtDistanceLine.h>
54 #include <visp3/mbt/vpMbtMeLine.h>
55 #include <visp3/me/vpMe.h>
56 
57 #include <fstream>
58 #include <iostream>
59 #include <list>
60 #include <vector>
61 
62 #if defined(VISP_HAVE_COIN3D)
63 // Inventor includes
64 #include <Inventor/VRMLnodes/SoVRMLCoordinate.h>
65 #include <Inventor/VRMLnodes/SoVRMLGroup.h>
66 #include <Inventor/VRMLnodes/SoVRMLIndexedFaceSet.h>
67 #include <Inventor/VRMLnodes/SoVRMLIndexedLineSet.h>
68 #include <Inventor/VRMLnodes/SoVRMLShape.h>
69 #include <Inventor/actions/SoGetMatrixAction.h>
70 #include <Inventor/actions/SoGetPrimitiveCountAction.h>
71 #include <Inventor/actions/SoSearchAction.h>
72 #include <Inventor/actions/SoToVRML2Action.h>
73 #include <Inventor/actions/SoWriteAction.h>
74 #include <Inventor/misc/SoChildList.h>
75 #include <Inventor/nodes/SoSeparator.h>
76 #endif
77 
78 #ifdef VISP_HAVE_OPENCV
79 #if VISP_HAVE_OPENCV_VERSION >= 0x020101
80 #include <opencv2/core/core.hpp>
81 #include <opencv2/imgproc/imgproc.hpp>
82 #include <opencv2/imgproc/imgproc_c.h>
83 #else
84 #include <cv.h>
85 #endif
86 #endif
87 
88 /*!
89   \class vpMbEdgeTracker
90   \ingroup group_mbt_trackers
91   \brief Make the complete tracking of an object by using its CAD model.
92   \warning This class is deprecated for user usage. You should rather use the high level
93   vpMbGenericTracker class.
94 
95   This class allows to track an object or a scene given its 3D model. A
96   video can be found on YouTube \e https://www.youtube.com/watch?v=UK10KMMJFCI
97 . The \ref tutorial-tracking-mb-deprecated is also a good starting point to use this
98 class.
99 
100   The tracker requires the knowledge of the 3D model that could be provided in
101 a vrml or in a cao file. The cao format is described in loadCAOModel(). It may
102 also use an xml file used to tune the behavior of the tracker and an init file
103 used to compute the pose at the very first image.
104 
105   The following code shows the simplest way to use the tracker.
106 
107 \code
108 #include <visp3/core/vpCameraParameters.h>
109 #include <visp3/core/vpHomogeneousMatrix.h>
110 #include <visp3/core/vpImage.h>
111 #include <visp3/gui/vpDisplayX.h>
112 #include <visp3/io/vpImageIo.h>
113 #include <visp3/mbt/vpMbEdgeTracker.h>
114 
115 int main()
116 {
117   vpMbEdgeTracker tracker; // Create a model based tracker.
118   vpImage<unsigned char> I;
119   vpHomogeneousMatrix cMo; // Pose computed using the tracker.
120   vpCameraParameters cam;
121 
122   // Acquire an image
123   vpImageIo::read(I, "cube.pgm");
124 
125 #if defined VISP_HAVE_X11
126   vpDisplayX display;
127   display.init(I,100,100,"Mb Edge Tracker");
128 #endif
129 
130   tracker.loadConfigFile("cube.xml"); // Load the configuration of the tracker
131   tracker.getCameraParameters(cam);   // Get the camera parameters used by the tracker (from the configuration file).
132   tracker.loadModel("cube.cao");      // Load the 3d model in cao format. No 3rd party library is required
133   // Initialise manually the pose by clicking on the image points associated to the 3d points contained in the
134   // cube.init file.
135   tracker.initClick(I, "cube.init");
136 
137   while(true){
138     // Acquire a new image
139     vpDisplay::display(I);
140     tracker.track(I);     // Track the object on this image
141     tracker.getPose(cMo); // Get the pose
142 
143     tracker.display(I, cMo, cam, vpColor::darkRed, 1); // Display the model at the computed pose.
144    vpDisplay::flush(I);
145   }
146 
147   return 0;
148 }
149 \endcode
150 
151   For application with large inter-images displacement, multi-scale tracking
152 is also possible, by setting the number of scales used and by activating (or
153 not) them using a vector of booleans, as presented in the following code:
154 
155 \code
156   ...
157   vpHomogeneousMatrix cMo; // Pose computed using the tracker.
158   vpCameraParameters cam;
159 
160   std::vector< bool > scales(3); //Three scales used
161   scales.push_back(true); //First scale : active
162   scales.push_back(false); //Second scale (/2) : not active
163   scales.push_back(true); //Third scale (/4) : active
164   tracker.setScales(scales); // Set active scales for multi-scale tracking
165 
166   tracker.loadConfigFile("cube.xml"); // Load the configuration of the tracker
167   tracker.getCameraParameters(cam); // Get the camera parameters used by the tracker (from the configuration file).
168   ...
169 \endcode
170 
171   The tracker can also be used without display, in that case the initial pose
172   must be known (object always at the same initial pose for example) or
173 computed using another method:
174 
175 \code
176 #include <visp3/core/vpCameraParameters.h>
177 #include <visp3/core/vpHomogeneousMatrix.h>
178 #include <visp3/core/vpImage.h>
179 #include <visp3/io/vpImageIo.h>
180 #include <visp3/mbt/vpMbEdgeTracker.h>
181 
182 int main()
183 {
184   vpMbEdgeTracker tracker; // Create a model based tracker.
185   vpImage<unsigned char> I;
186   vpHomogeneousMatrix cMo; // Pose used in entry (has to be defined), then computed using the tracker.
187 
188   //acquire an image
189   vpImageIo::read(I, "cube.pgm"); // Example of acquisition
190 
191   tracker.loadConfigFile("cube.xml"); // Load the configuration of the tracker
192   // load the 3d model, to read .wrl model coin is required, if coin is not installed .cao file can be used.
193   tracker.loadModel("cube.cao");
194   tracker.initFromPose(I, cMo); // initialize the tracker with the given pose.
195 
196   while(true){
197     // acquire a new image
198     tracker.track(I); // track the object on this image
199     tracker.getPose(cMo); // get the pose
200   }
201 
202   return 0;
203 }
204 \endcode
205 
206   Finally it can be used not to track an object but just to display a model at
207 a given pose:
208 
209 \code
210 #include <visp3/core/vpCameraParameters.h>
211 #include <visp3/core/vpHomogeneousMatrix.h>
212 #include <visp3/core/vpImage.h>
213 #include <visp3/gui/vpDisplayX.h>
214 #include <visp3/io/vpImageIo.h>
215 #include <visp3/mbt/vpMbEdgeTracker.h>
216 
217 int main()
218 {
219   vpMbEdgeTracker tracker; // Create a model based tracker.
220   vpImage<unsigned char> I;
221   vpHomogeneousMatrix cMo; // Pose used to display the model.
222   vpCameraParameters cam;
223 
224   // Acquire an image
225   vpImageIo::read(I, "cube.pgm");
226 
227 #if defined VISP_HAVE_X11
228   vpDisplayX display;
229   display.init(I,100,100,"Mb Edge Tracker");
230 #endif
231 
232   tracker.loadConfigFile("cube.xml"); // Load the configuration of the tracker
233   tracker.getCameraParameters(cam); // Get the camera parameters used by the tracker (from the configuration file).
234   // load the 3d model, to read .wrl model coin is required, if coin is not installed
235   // .cao file can be used.
236   tracker.loadModel("cube.cao");
237 
238   while(true){
239     // acquire a new image
240     // Get the pose using any method
241     vpDisplay::display(I);
242     tracker.display(I, cMo, cam, vpColor::darkRed, 1, true); // Display the model at the computed pose.
243     vpDisplay::flush(I);
244   }
245 
246   return 0;
247 }
248 \endcode
249 
250 */
251 
252 class VISP_EXPORT vpMbEdgeTracker : public virtual vpMbTracker
253 {
254 protected:
255   //! The moving edges parameters.
256   vpMe me;
257   //! Vector of list of all the lines tracked (each line is linked to a list
258   //! of moving edges). Each element of the vector is for a scale (element 0 =
259   //! level 0 = no subsampling).
260   std::vector<std::list<vpMbtDistanceLine *> > lines;
261 
262   //! Vector of the tracked circles.
263   std::vector<std::list<vpMbtDistanceCircle *> > circles;
264 
265   //! Vector of the tracked cylinders.
266   std::vector<std::list<vpMbtDistanceCylinder *> > cylinders;
267 
268   //! Index of the polygon to add, and total number of polygon extracted so
269   //! far.
270   unsigned int nline;
271 
272   //! Index of the circle to add, and total number of circles extracted so
273   //! far.
274   unsigned int ncircle;
275 
276   //! Index of the cylinder to add, and total number of cylinders extracted so
277   //! far.
278   unsigned int ncylinder;
279 
280   //! Number of polygon (face) currently visible.
281   unsigned int nbvisiblepolygone;
282 
283   //! Percentage of good points over total number of points below which
284   //! tracking is supposed to have failed.
285   double percentageGdPt;
286 
287   //! Vector of scale level to use for the multi-scale tracking.
288   std::vector<bool> scales;
289 
290   //! Pyramid of image associated to the current image. This pyramid is
291   //! computed in the init() and in the track() methods.
292   std::vector<const vpImage<unsigned char> *> Ipyramid;
293 
294   //! Current scale level used. This attribute must not be modified outside of
295   //! the downScale() and upScale() methods, as it used to specify to some
296   //! methods which set of distanceLine use.
297   unsigned int scaleLevel;
298 
299   //! Number of features used in the computation of the projection error
300   unsigned int nbFeaturesForProjErrorComputation;
301 
302   /// Edge VVS variables
303   vpColVector m_factor;
304   vpRobust m_robustLines;
305   vpRobust m_robustCylinders;
306   vpRobust m_robustCircles;
307   vpColVector m_wLines;
308   vpColVector m_wCylinders;
309   vpColVector m_wCircles;
310   vpColVector m_errorLines;
311   vpColVector m_errorCylinders;
312   vpColVector m_errorCircles;
313   //! Interaction matrix
314   vpMatrix m_L_edge;
315   //! (s - s*)
316   vpColVector m_error_edge;
317   //! Robust weights
318   vpColVector m_w_edge;
319   //! Weighted error
320   vpColVector m_weightedError_edge;
321   //! Robust
322   vpRobust m_robust_edge;
323   //! Display features
324   std::vector<std::vector<double> > m_featuresToBeDisplayedEdge;
325 
326 public:
327   vpMbEdgeTracker();
328   virtual ~vpMbEdgeTracker();
329 
330   /** @name Inherited functionalities from vpMbEdgeTracker */
331   //@{
332 
333   virtual void display(const vpImage<unsigned char> &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam,
334                        const vpColor &col, unsigned int thickness = 1, bool displayFullModel = false);
335   virtual void display(const vpImage<vpRGBa> &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam,
336                        const vpColor &col, unsigned int thickness = 1, bool displayFullModel = false);
337 
338   void getLline(std::list<vpMbtDistanceLine *> &linesList, unsigned int level = 0) const;
339   void getLcircle(std::list<vpMbtDistanceCircle *> &circlesList, unsigned int level = 0) const;
340   void getLcylinder(std::list<vpMbtDistanceCylinder *> &cylindersList, unsigned int level = 0) const;
341 
342   virtual std::vector<std::vector<double> > getModelForDisplay(unsigned int width, unsigned int height,
343                                                                const vpHomogeneousMatrix &cMo,
344                                                                const vpCameraParameters &cam,
345                                                                bool displayFullModel=false);
346 
347   /*!
348     Get the moving edge parameters.
349 
350     \param p_me [out] : an instance of the moving edge parameters used by the
351     tracker.
352   */
getMovingEdge(vpMe & p_me)353   virtual inline void getMovingEdge(vpMe &p_me) const { p_me = this->me; }
354   /*!
355     Get the moving edge parameters.
356 
357     \return an instance of the moving edge parameters used by the tracker.
358   */
getMovingEdge()359   virtual inline vpMe getMovingEdge() const { return this->me; }
360 
361   virtual unsigned int getNbPoints(unsigned int level = 0) const;
362 
363   /*!
364     Return the scales levels used for the tracking.
365 
366     \return The scales levels used for the tracking.
367   */
getScales()368   std::vector<bool> getScales() const { return scales; }
369   /*!
370      \return The threshold value between 0 and 1 over good moving edges ratio.
371      It allows to decide if the tracker has enough valid moving edges to
372      compute a pose. 1 means that all moving edges should be considered as
373      good to have a valid pose, while 0.1 means that 10% of the moving edge
374      are enough to declare a pose valid.
375 
376      \sa setGoodMovingEdgesRatioThreshold()
377    */
getGoodMovingEdgesRatioThreshold()378   inline double getGoodMovingEdgesRatioThreshold() const { return percentageGdPt; }
379 
getError()380   virtual inline vpColVector getError() const { return m_error_edge; }
381 
getRobustWeights()382   virtual inline vpColVector getRobustWeights() const { return m_w_edge; }
383 
384   virtual void loadConfigFile(const std::string &configFile, bool verbose=true);
385 
386   virtual void reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
387                            const vpHomogeneousMatrix &cMo, bool verbose = false,
388                            const vpHomogeneousMatrix &T=vpHomogeneousMatrix());
389   void resetTracker();
390 
391   /*!
392     Set the camera parameters.
393 
394     \param cam : The new camera parameters.
395   */
setCameraParameters(const vpCameraParameters & cam)396   virtual void setCameraParameters(const vpCameraParameters &cam)
397   {
398     m_cam = cam;
399 
400     for (unsigned int i = 0; i < scales.size(); i += 1) {
401       if (scales[i]) {
402         for (std::list<vpMbtDistanceLine *>::const_iterator it = lines[i].begin(); it != lines[i].end(); ++it) {
403           (*it)->setCameraParameters(m_cam);
404         }
405 
406         for (std::list<vpMbtDistanceCylinder *>::const_iterator it = cylinders[i].begin(); it != cylinders[i].end();
407              ++it) {
408           (*it)->setCameraParameters(m_cam);
409         }
410 
411         for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles[i].begin(); it != circles[i].end(); ++it) {
412           (*it)->setCameraParameters(m_cam);
413         }
414       }
415     }
416   }
417 
418   virtual void setClipping(const unsigned int &flags);
419 
420   virtual void setFarClippingDistance(const double &dist);
421 
422   virtual void setNearClippingDistance(const double &dist);
423 
424   /*!
425     Use Ogre3D for visibility tests
426 
427     \warning This function has to be called before the initialization of the
428     tracker.
429 
430     \param v : True to use it, False otherwise
431   */
setOgreVisibilityTest(const bool & v)432   virtual void setOgreVisibilityTest(const bool &v)
433   {
434     vpMbTracker::setOgreVisibilityTest(v);
435 #ifdef VISP_HAVE_OGRE
436     faces.getOgreContext()->setWindowName("MBT Edge");
437 #endif
438   }
439 
440   /*!
441     Use Scanline algorithm for visibility tests
442 
443     \param v : True to use it, False otherwise
444   */
setScanLineVisibilityTest(const bool & v)445   virtual void setScanLineVisibilityTest(const bool &v)
446   {
447     vpMbTracker::setScanLineVisibilityTest(v);
448 
449     for (unsigned int i = 0; i < scales.size(); i += 1) {
450       if (scales[i]) {
451         for (std::list<vpMbtDistanceLine *>::const_iterator it = lines[i].begin(); it != lines[i].end(); ++it) {
452           (*it)->useScanLine = v;
453         }
454       }
455     }
456   }
457 
458   /*!
459      Set the threshold value between 0 and 1 over good moving edges ratio. It
460      allows to decide if the tracker has enough valid moving edges to compute
461      a pose. 1 means that all moving edges should be considered as good to
462      have a valid pose, while 0.1 means that 10% of the moving edge are enough
463      to declare a pose valid.
464 
465      \param threshold : Value between 0 and 1 that corresponds to the ratio of
466      good moving edges that is necessary to consider that the estimated pose
467      is valid. Default value is 0.4.
468 
469      \sa getGoodMovingEdgesRatioThreshold()
470    */
setGoodMovingEdgesRatioThreshold(double threshold)471   void setGoodMovingEdgesRatioThreshold(double threshold) { percentageGdPt = threshold; }
472 
473   void setMovingEdge(const vpMe &me);
474 
475   virtual void setPose(const vpImage<unsigned char> &I, const vpHomogeneousMatrix &cdMo);
476   virtual void setPose(const vpImage<vpRGBa> &I_color, const vpHomogeneousMatrix &cdMo);
477 
478   void setScales(const std::vector<bool> &_scales);
479 
480   void setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking);
481 
482   virtual void track(const vpImage<unsigned char> &I);
483   virtual void track(const vpImage<vpRGBa> &I);
484   //@}
485 
486 protected:
487   /** @name Protected Member Functions Inherited from vpMbEdgeTracker */
488   //@{
489   void addCircle(const vpPoint &P1, const vpPoint &P2, const vpPoint &P3, double r, int idFace = -1,
490                  const std::string &name = "");
491   void addCylinder(const vpPoint &P1, const vpPoint &P2, double r, int idFace = -1, const std::string &name = "");
492   void addLine(vpPoint &p1, vpPoint &p2, int polygon = -1, std::string name = "");
493   void addPolygon(vpMbtPolygon &p);
494 
495   void cleanPyramid(std::vector<const vpImage<unsigned char> *> &_pyramid);
496   void computeProjectionError(const vpImage<unsigned char> &_I);
497 
498   void computeVVS(const vpImage<unsigned char> &_I, unsigned int lvl);
499   void computeVVSFirstPhase(const vpImage<unsigned char> &I, unsigned int iter, double &count,
500                             unsigned int lvl = 0);
501   void computeVVSFirstPhaseFactor(const vpImage<unsigned char> &I, unsigned int lvl = 0);
502   void computeVVSFirstPhasePoseEstimation(unsigned int iter, bool &isoJoIdentity_);
503   virtual void computeVVSInit();
504   virtual void computeVVSInteractionMatrixAndResidu();
505   virtual void computeVVSInteractionMatrixAndResidu(const vpImage<unsigned char> &I);
506   virtual void computeVVSWeights();
507   using vpMbTracker::computeVVSWeights;
508 
509   void displayFeaturesOnImage(const vpImage<unsigned char> &I);
510   void displayFeaturesOnImage(const vpImage<vpRGBa> &I);
511   void downScale(const unsigned int _scale);
512   virtual std::vector<std::vector<double> > getFeaturesForDisplayEdge();
513   virtual void init(const vpImage<unsigned char> &I);
514   virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius,
515                           int idFace = 0, const std::string &name = "");
516   virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace = 0,
517                             const std::string &name = "");
518   virtual void initFaceFromCorners(vpMbtPolygon &polygon);
519   virtual void initFaceFromLines(vpMbtPolygon &polygon);
520   unsigned int initMbtTracking(unsigned int &nberrors_lines, unsigned int &nberrors_cylinders,
521                                unsigned int &nberrors_circles);
522   void initMovingEdge(const vpImage<unsigned char> &I, const vpHomogeneousMatrix &_cMo);
523   void initPyramid(const vpImage<unsigned char> &_I, std::vector<const vpImage<unsigned char> *> &_pyramid);
524   void reInitLevel(const unsigned int _lvl);
525   void reinitMovingEdge(const vpImage<unsigned char> &I, const vpHomogeneousMatrix &_cMo);
526   void removeCircle(const std::string &name);
527   void removeCylinder(const std::string &name);
528   void removeLine(const std::string &name);
529   void resetMovingEdge();
530   virtual void testTracking();
531   void trackMovingEdge(const vpImage<unsigned char> &I);
532   void updateMovingEdge(const vpImage<unsigned char> &I);
533   void updateMovingEdgeWeights();
534   void upScale(const unsigned int _scale);
535   void visibleFace(const vpImage<unsigned char> &_I, const vpHomogeneousMatrix &_cMo, bool &newvisibleline);
536   //@}
537 };
538 
539 #endif
540