1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Implementation of a simple augmented reality application using the vpAROgre
33  * class.
34  *
35  * Authors:
36  * Bertrand Delabarre
37  *
38  *****************************************************************************/
39 
40 /*!
41   \example AROgre.cpp
42   Example of augmented reality based on Ogre3D.
43 */
44 
45 #include <iostream>
46 #include <visp3/core/vpConfig.h>
47 
48 //#if defined(VISP_HAVE_OGRE) && (defined(VISP_HAVE_OPENCV) ||
49 // defined(VISP_HAVE_GDI) || defined(VISP_HAVE_D3D9) || defined(VISP_HAVE_GTK)
50 //|| (defined(VISP_HAVE_X11) && ! defined(APPLE)))
51 #if defined(VISP_HAVE_OGRE) &&                                                                                         \
52     (defined(VISP_HAVE_OPENCV) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_D3D9) || defined(VISP_HAVE_GTK) ||       \
53      (defined(VISP_HAVE_X11) && !(defined(__APPLE__) && defined(__MACH__))))
54 
55 //#if defined(VISP_HAVE_X11) && ! defined(APPLE)
56 #if defined(VISP_HAVE_X11) && !(defined(__APPLE__) && defined(__MACH__))
57 // produce an error on OSX: ‘typedef int Cursor’
58 // /usr/X11R6/include/X11/X.h:108: error: ‘Cursor’ has a previous
59 // declaration as ‘typedef XID Cursor’. That's why it should not be
60 // used on APPLE platforms
61 #include <visp3/gui/vpDisplayX.h>
62 #endif
63 #include <visp3/ar/vpAROgre.h>
64 #include <visp3/blob/vpDot2.h>
65 #include <visp3/core/vpDebug.h>
66 #include <visp3/core/vpImagePoint.h>
67 #include <visp3/core/vpIoTools.h>
68 #include <visp3/core/vpPixelMeterConversion.h>
69 #include <visp3/core/vpPoint.h>
70 #include <visp3/gui/vpDisplayD3D.h>
71 #include <visp3/gui/vpDisplayGDI.h>
72 #include <visp3/gui/vpDisplayGTK.h>
73 #include <visp3/gui/vpDisplayOpenCV.h>
74 #include <visp3/io/vpParseArgv.h>
75 #include <visp3/io/vpVideoReader.h>
76 #include <visp3/vision/vpPose.h>
77 
78 // List of allowed command line options
79 #define GETOPTARGS "ci:p:h"
80 
81 /*!
82 
83   Print the program options.
84 
85   \param name : Program name.
86   \param badparam : Bad parameter name.
87   \param ipath : Input image path.
88   \param ppath : Personal image path.
89 
90 
91 */
usage(const char * name,const char * badparam,std::string ipath,std::string ppath)92 void usage(const char *name, const char *badparam, std::string ipath, std::string ppath)
93 {
94   fprintf(stdout, "\n\
95 Test augmented reality using the vpAROgre class.\n\
96 \n\
97 SYNOPSIS\n\
98   %s [-i <test image path>] [-p <personal image path>]\n\
99      [-c] [-h]\n", name);
100 
101   fprintf(stdout, "\n\
102 OPTIONS:                                               Default\n\
103   -i <input image path>                                %s\n\
104      Set image input path.\n\
105      From this path read images \n\
106      \"mire-2/image.%%04d.pgm\". These \n\
107      images come from ViSP-images-x.y.z.tar.gz available \n\
108      on the ViSP website.\n\
109      Setting the VISP_INPUT_IMAGE_PATH environment\n\
110      variable produces the same behaviour than using\n\
111      this option.\n\
112  \n\
113  -p <personal image path>                             %s\n\
114      Specify a personal sequence containing images \n\
115      to process.\n\
116      By image sequence, we mean one file per image.\n\
117      The following image file formats PNM (PGM P5, PPM P6)\n\
118      are supported. The format is selected by analysing \n\
119      the filename extension.\n\
120      Example : \"/Temp/ViSP-images/cube/image.%%04d.pgm\"\n\
121      %%04d is for the image numbering.\n\
122 \n\
123   -c\n\
124      Disable the mouse click. Useful to automaze the \n\
125      execution of this program without humain intervention.\n\
126 \n\
127   -h\n\
128      Print the help.\n", ipath.c_str(), ppath.c_str());
129 
130   if (badparam)
131     fprintf(stdout, "\nERROR: Bad parameter [%s]\n", badparam);
132 }
133 /*!
134 
135   Set the program options.
136 
137   \param argc : Command line number of parameters.
138   \param argv : Array of command line parameters.
139   \param ipath : Input image path.
140   \param ppath : Personal image path.
141   \param click_allowed : Mouse click activation.
142 
143   \return false if the program has to be stopped, true otherwise.
144 
145 */
getOptions(int argc,const char ** argv,std::string & ipath,std::string & ppath,bool & click_allowed)146 bool getOptions(int argc, const char **argv, std::string &ipath, std::string &ppath, bool &click_allowed)
147 {
148   const char *optarg_;
149   int c;
150   while ((c = vpParseArgv::parse(argc, argv, GETOPTARGS, &optarg_)) > 1) {
151 
152     switch (c) {
153     case 'c':
154       click_allowed = false;
155       break;
156     case 'i':
157       ipath = optarg_;
158       break;
159     case 'p':
160       ppath = optarg_;
161       break;
162     case 'h':
163       usage(argv[0], NULL, ipath, ppath);
164       return false;
165       break;
166 
167     default:
168       usage(argv[0], optarg_, ipath, ppath);
169       return false;
170       break;
171     }
172   }
173 
174   if ((c == 1) || (c == -1)) {
175     // standalone param or error
176     usage(argv[0], NULL, ipath, ppath);
177     std::cerr << "ERROR: " << std::endl;
178     std::cerr << "  Bad argument " << optarg_ << std::endl << std::endl;
179     return false;
180   }
181 
182   return true;
183 }
184 
185 #ifndef DOXYGEN_SHOULD_SKIP_THIS
186 
187 class vpAROgreExample : public vpAROgre
188 {
189 public:
190   // The constructor doesn't change here
vpAROgreExample(const vpCameraParameters & cam=vpCameraParameters (),unsigned int width=640,unsigned int height=480,const char * resourcePath=NULL)191   vpAROgreExample(const vpCameraParameters &cam = vpCameraParameters(), unsigned int width = 640,
192                   unsigned int height = 480, const char *resourcePath = NULL)
193     : vpAROgre(cam, width, height)
194   {
195     // Direction vectors
196     if (resourcePath)
197       mResourcePath = resourcePath;
198     std::cout << "mResourcePath: " << mResourcePath << std::endl;
199     vecDevant = Ogre::Vector3(0, -1, 0);
200     robot = NULL;
201     mAnimationState = NULL;
202   }
203 
204 protected:
205   // Attributes
206   // Vector to move
207   Ogre::Vector3 vecDevant;
208   // Animation attribute
209   Ogre::AnimationState *mAnimationState;
210   // The entity representing the robot
211   Ogre::Entity *robot;
212 
213   // Our scene will just be a plane
createScene()214   void createScene()
215   {
216     // Lumieres
217     mSceneMgr->setAmbientLight(Ogre::ColourValue((float)0.6, (float)0.6, (float)0.6)); // Default value of lightning
218     Ogre::Light *light = mSceneMgr->createLight();
219     light->setDiffuseColour(1.0, 1.0, 1.0);  // scaled RGB values
220     light->setSpecularColour(1.0, 1.0, 1.0); // scaled RGB values
221     // Lumiere ponctuelle
222     light->setPosition(-5, -5, 10);
223     light->setType(Ogre::Light::LT_POINT);
224     light->setAttenuation((Ogre::Real)100, (Ogre::Real)1.0, (Ogre::Real)0.045, (Ogre::Real)0.0075);
225     // Ombres
226     light->setCastShadows(true);
227 
228     // Create the Entity
229     robot = mSceneMgr->createEntity("Robot", "robot.mesh");
230     // Attach robot to scene graph
231     Ogre::SceneNode *RobotNode = mSceneMgr->getRootSceneNode()->createChildSceneNode("Robot");
232     RobotNode->attachObject(robot);
233     RobotNode->scale((Ogre::Real)0.001, (Ogre::Real)0.001, (Ogre::Real)0.001);
234     RobotNode->pitch(Ogre::Degree(90));
235     RobotNode->yaw(Ogre::Degree(-90));
236     robot->setCastShadows(true);
237     mSceneMgr->setShadowTechnique(Ogre::SHADOWTYPE_STENCIL_MODULATIVE);
238 
239     // Add an animation
240     // Set the good animation
241     mAnimationState = robot->getAnimationState("Idle");
242     // Start over when finished
243     mAnimationState->setLoop(true);
244     // Animation enabled
245     mAnimationState->setEnabled(true);
246 
247     // Add a ground
248     Ogre::Plane plan;
249     plan.d = 0;
250     plan.normal = Ogre::Vector3::UNIT_Z;
251     Ogre::MeshManager::getSingleton().createPlane("sol", Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, plan,
252                                                   (Ogre::Real)0.22, (Ogre::Real)0.16, 10, 10, true, 1, 1, 1);
253     Ogre::Entity *ent = mSceneMgr->createEntity("Entitesol", "sol");
254     Ogre::SceneNode *PlaneNode = mSceneMgr->getRootSceneNode()->createChildSceneNode("Entitesol");
255     PlaneNode->attachObject(ent);
256     ent->setMaterialName("Examples/GrassFloor");
257   }
258 
customframeEnded(const Ogre::FrameEvent & evt)259   bool customframeEnded(const Ogre::FrameEvent &evt)
260   {
261     // Update animation
262     // To move, we add it the time since last frame
263     mAnimationState->addTime(evt.timeSinceLastFrame);
264     return true;
265   }
266 
267 #ifdef VISP_HAVE_OIS
processInputEvent(const Ogre::FrameEvent &)268   bool processInputEvent(const Ogre::FrameEvent & /*evt*/)
269   {
270     mKeyboard->capture();
271     Ogre::Matrix3 rotmy;
272     double angle = -M_PI / 8;
273     if (mKeyboard->isKeyDown(OIS::KC_ESCAPE))
274       return false;
275 
276     // Event telling that we will have to move, setting the animation to
277     // "walk", if false, annimation goes to "Idle"
278     bool event = false;
279     // Check entries
280     if (mKeyboard->isKeyDown(OIS::KC_Z) || mKeyboard->isKeyDown(OIS::KC_UP)) {
281       mSceneMgr->getSceneNode("Robot")->setPosition(mSceneMgr->getSceneNode("Robot")->getPosition() +
282                                                     (Ogre::Real)0.003 * vecDevant);
283       event = true;
284     }
285     if (mKeyboard->isKeyDown(OIS::KC_S) || mKeyboard->isKeyDown(OIS::KC_DOWN)) {
286       mSceneMgr->getSceneNode("Robot")->setPosition(mSceneMgr->getSceneNode("Robot")->getPosition() -
287                                                     (Ogre::Real)0.003 * vecDevant);
288       event = true;
289     }
290     if (mKeyboard->isKeyDown(OIS::KC_Q) || mKeyboard->isKeyDown(OIS::KC_LEFT)) {
291       rotmy = Ogre::Matrix3((Ogre::Real)cos(-angle), (Ogre::Real)sin(-angle), 0, (Ogre::Real)(-sin(-angle)),
292                             (Ogre::Real)cos(-angle), 0, 0, 0, 1);
293       vecDevant = vecDevant * rotmy;
294       mSceneMgr->getSceneNode("Robot")->yaw(Ogre::Radian((Ogre::Real)(-angle)));
295       event = true;
296     }
297     if (mKeyboard->isKeyDown(OIS::KC_D) || mKeyboard->isKeyDown(OIS::KC_RIGHT)) {
298       rotmy = Ogre::Matrix3((Ogre::Real)cos(angle), (Ogre::Real)sin(angle), 0, (Ogre::Real)(-sin(angle)),
299                             (Ogre::Real)cos(angle), 0, 0, 0, 1);
300       vecDevant = vecDevant * rotmy;
301       mSceneMgr->getSceneNode("Robot")->yaw(Ogre::Radian((Ogre::Real)angle));
302       event = true;
303     }
304 
305     // Play the right animation
306     if (event) {
307       mAnimationState = robot->getAnimationState("Walk");
308     } else
309       mAnimationState = robot->getAnimationState("Idle");
310 
311     // Start over when finished
312     mAnimationState->setLoop(true);
313     // Animation enabled
314     mAnimationState->setEnabled(true);
315 
316     return true;
317   }
318 #endif
319 };
320 
321 /*!
322   This function computes a pose from four black points.
323   Here to keep dimensions coherency you will need those four dots to be
324   situated at (-7,6,0),(7,6,0),(7,-6,0),(-7,-6,0) (unit = cm) in your real
325   world
326 */
computeInitialPose(vpCameraParameters * mcam,vpImage<unsigned char> & I,vpPose * mPose,vpDot2 * md,vpImagePoint * mcog,vpHomogeneousMatrix * cMo,vpPoint * mP,const bool & opt_click_allowed)327 void computeInitialPose(vpCameraParameters *mcam, vpImage<unsigned char> &I, vpPose *mPose, vpDot2 *md,
328                         vpImagePoint *mcog, vpHomogeneousMatrix *cMo, vpPoint *mP, const bool &opt_click_allowed)
329 {
330   // ---------------------------------------------------
331   //    Code inspired from ViSP example of camera pose
332   // ----------------------------------------------------
333   bool opt_display = true;
334 
335 //#if defined(VISP_HAVE_X11) && ! defined(APPLE)
336 #if defined(VISP_HAVE_X11) && !(defined(__APPLE__) && defined(__MACH__))
337   // produce an error on OSX: ‘typedef int Cursor’
338   // /usr/X11R6/include/X11/X.h:108: error: ‘Cursor’ has a previous
339   // declaration as ‘typedef XID Cursor’. That's why it should not be
340   // used on APPLE platforms
341   vpDisplayX display;
342 #elif defined VISP_HAVE_GTK
343   vpDisplayGTK display;
344 #elif defined VISP_HAVE_GDI
345   vpDisplayGDI display;
346 #elif defined VISP_HAVE_OPENCV
347   vpDisplayOpenCV display;
348 #elif defined VISP_HAVE_D3D9
349   vpDisplayD3D display;
350 #endif
351   for (unsigned int i = 0; i < 4; i++) {
352     if (opt_display) {
353       md[i].setGraphics(true);
354     } else {
355       md[i].setGraphics(false);
356     }
357   }
358 
359   if (opt_display) {
360     try {
361       // Display size is automatically defined by the image (I) size
362       display.init(I, 100, 100, "Preliminary Pose Calculation");
363       // display the image
364       // The image class has a member that specify a pointer toward
365       // the display that has been initialized in the display declaration
366       // therefore is is no longuer necessary to make a reference to the
367       // display variable.
368       vpDisplay::display(I);
369       // Flush the display
370       vpDisplay::flush(I);
371 
372     } catch (...) {
373       vpERROR_TRACE("Error while displaying the image");
374       return;
375     }
376   }
377 
378   std::cout << "*************************************************************"
379                "***********************"
380             << std::endl;
381   std::cout << "*************************** Preliminary Pose Calculation "
382                "***************************"
383             << std::endl;
384   std::cout << "******************************  Click on the 4 dots  "
385                "*******************************"
386             << std::endl;
387   std::cout << "********Dot1 : (-x,-y,0), Dot2 : (x,-y,0), Dot3 : (x,y,0), "
388                "Dot4 : (-x,y,0)**********"
389             << std::endl;
390   std::cout << "*************************************************************"
391                "***********************"
392             << std::endl;
393 
394   try {
395     vpImagePoint ip[4];
396     if (!opt_click_allowed) {
397       ip[0].set_i(265);
398       ip[0].set_j(93);
399       ip[1].set_i(248);
400       ip[1].set_j(242);
401       ip[2].set_i(166);
402       ip[2].set_j(215);
403       ip[3].set_i(178);
404       ip[3].set_j(85);
405     }
406     for (unsigned int i = 0; i < 4; i++) {
407       // by using setGraphics, we request to see the edges of the dot
408       // in red on the screen.
409       // It uses the overlay image plane.
410       // The default of this setting is that it is time consumming
411 
412       md[i].setGraphics(true);
413       md[i].setGrayLevelPrecision(0.7);
414       md[i].setSizePrecision(0.5);
415 
416       for (unsigned int j = 0; j < i; j++)
417         md[j].display(I);
418 
419       // flush the display buffer
420       vpDisplay::flush(I);
421       try {
422         if (opt_click_allowed)
423           md[i].initTracking(I);
424         else
425           md[i].initTracking(I, ip[i]);
426       } catch (...) {
427       }
428 
429       mcog[i] = md[i].getCog();
430       // an expcetion is thrown by the track method if
431       //  - dot is lost
432       //  - the number of pixel is too small
433       //  - too many pixels are detected (this is usual when a "big"
434       //  specularity
435       //    occurs. The threshold can be modified using the
436       //    setNbMaxPoint(int) method
437       if (opt_display) {
438         md[i].display(I);
439         // flush the display buffer
440         vpDisplay::flush(I);
441       }
442     }
443   } catch (const vpException &e) {
444     vpERROR_TRACE("Error while tracking dots");
445     vpCTRACE << e;
446     return;
447   }
448 
449   if (opt_display) {
450     // display a red cross (size 10) in the image at the dot center
451     // of gravity location
452     //
453     // WARNING
454     // in the vpDisplay class member's when pixel coordinates
455     // are considered the first element is the row index and the second
456     // is the column index:
457     //   vpDisplay::displayCross(Image, row index, column index, size, color)
458     //   therefore u and v are inverted wrt to the vpDot specification
459     // Alternatively, to avoid this problem another set of member have
460     // been defined in the vpDisplay class.
461     // If the method name is postfixe with _uv the specification is :
462     //   vpDisplay::displayCross_uv(Image, column index, row index, size,
463     //   color)
464 
465     for (unsigned int i = 0; i < 4; i++)
466       vpDisplay::displayCross(I, mcog[i], 10, vpColor::red);
467 
468     // flush the X11 buffer
469     vpDisplay::flush(I);
470   }
471 
472   // --------------------------------------------------------
473   //             Now we will compute the pose
474   // --------------------------------------------------------
475 
476   //  the list of point is cleared (if that's not done before)
477   mPose->clearPoint();
478 
479   // we set the 3D points coordinates (in meter !) in the object/world frame
480   double l = 0.06;
481   double L = 0.07;
482   mP[0].setWorldCoordinates(-L, -l, 0); // (X,Y,Z)
483   mP[1].setWorldCoordinates(L, -l, 0);
484   mP[2].setWorldCoordinates(L, l, 0);
485   mP[3].setWorldCoordinates(-L, l, 0);
486 
487   // pixel-> meter conversion
488   for (unsigned int i = 0; i < 4; i++) {
489     // u[i]. v[i] are expressed in pixel
490     // conversion in meter is achieved using
491     // x = (u-u0)/px
492     // y = (v-v0)/py
493     // where px, py, u0, v0 are the intrinsic camera parameters
494     double x = 0, y = 0;
495     vpPixelMeterConversion::convertPoint(*mcam, mcog[i], x, y);
496     mP[i].set_x(x);
497     mP[i].set_y(y);
498   }
499 
500   // The pose structure is build, we put in the point list the set of point
501   // here both 2D and 3D world coordinates are known
502   for (unsigned int i = 0; i < 4; i++) {
503     mPose->addPoint(mP[i]); // and added to the pose computation point list
504   }
505 
506   // compute the initial pose using Dementhon method followed by a non linear
507   // minimisation method
508 
509   // Pose by Lagrange it provides an initialization of the pose
510   mPose->computePose(vpPose::LAGRANGE, *cMo);
511   // the pose is now refined using the virtual visual servoing approach
512   // Warning: cMo needs to be initialized otherwise it may  diverge
513   mPose->computePose(vpPose::VIRTUAL_VS, *cMo);
514 
515   // Display breifly just to have a glimpse a the ViSP pose
516   //	while(cpt<500){
517   if (opt_display) {
518     // Display the computed pose
519     mPose->display(I, *cMo, *mcam, 0.05, vpColor::red);
520     vpDisplay::flush(I);
521     vpTime::wait(800);
522   }
523 }
524 
525 #endif
526 
main(int argc,const char ** argv)527 int main(int argc, const char **argv)
528 {
529   try {
530     std::string env_ipath;
531     std::string opt_ipath;
532     std::string ipath;
533     std::string opt_ppath;
534     std::string dirname;
535     std::string filename;
536     bool opt_click_allowed = true;
537 
538     // Get the visp-images-data package path or VISP_INPUT_IMAGE_PATH
539     // environment variable value
540     env_ipath = vpIoTools::getViSPImagesDataPath();
541 
542     // Set the default input path
543     if (!env_ipath.empty())
544       ipath = env_ipath;
545 
546     // Read the command line options
547     if (getOptions(argc, argv, opt_ipath, opt_ppath, opt_click_allowed) == false) {
548       exit(-1);
549     }
550 
551     // Get the option values
552     if (!opt_ipath.empty())
553       ipath = opt_ipath;
554 
555     // Compare ipath and env_ipath. If they differ, we take into account
556     // the input path comming from the command line option
557     if (!opt_ipath.empty() && !env_ipath.empty() && opt_ppath.empty()) {
558       if (ipath != env_ipath) {
559         std::cout << std::endl << "WARNING: " << std::endl;
560         std::cout << "  Since -i <visp image path=" << ipath << "> "
561                   << "  is different from VISP_IMAGE_PATH=" << env_ipath << std::endl
562                   << "  we skip the environment variable." << std::endl;
563       }
564     }
565 
566     // Test if an input path is set
567     if (opt_ipath.empty() && env_ipath.empty() && opt_ppath.empty()) {
568       usage(argv[0], NULL, ipath, opt_ppath);
569       std::cerr << std::endl << "ERROR:" << std::endl;
570       std::cerr << "  Use -i <visp image path> option or set VISP_INPUT_IMAGE_PATH " << std::endl
571                 << "  environment variable to specify the location of the " << std::endl
572                 << "  image path where test images are located." << std::endl
573                 << "  Use -p <personal image path> option if you want to " << std::endl
574                 << "  use personal images." << std::endl
575                 << std::endl;
576 
577       exit(-1);
578     }
579 
580     std::ostringstream s;
581 
582     if (opt_ppath.empty()) {
583       // Set the path location of the image sequence
584       dirname = vpIoTools::createFilePath(ipath, "mire-2");
585 
586       // Build the name of the image file
587 
588       s.setf(std::ios::right, std::ios::adjustfield);
589       s << "image.%04d.pgm";
590       filename = vpIoTools::createFilePath(dirname, s.str());
591     } else {
592       filename = opt_ppath;
593     }
594 
595     // We will read a sequence of images
596     vpVideoReader grabber;
597     grabber.setFirstFrameIndex(1);
598     grabber.setFileName(filename.c_str());
599     // Grey level image associated to a display in the initial pose
600     // computation
601     vpImage<unsigned char> Idisplay;
602     // Grey level image to track points
603     vpImage<unsigned char> I;
604     // RGBa image to get background
605     vpImage<vpRGBa> IC;
606     // Matrix representing camera parameters
607     vpHomogeneousMatrix cMo;
608 
609     // Variables used for pose computation purposes
610     vpPose mPose;
611     vpDot2 md[4];
612     vpImagePoint mcog[4];
613     vpPoint mP[4];
614 
615     // CameraParameters we got from calibration
616     // Keep u0 and v0 as center of the screen
617     vpCameraParameters mcam;
618 
619     // Read the PGM image named "filename" on the disk, and put the
620     // bitmap into the image structure I.  I is initialized to the
621     // correct size
622     //
623     // exception readPGM may throw various exception if, for example,
624     // the file does not exist, or if the memory cannot be allocated
625     try {
626       vpCTRACE << "Load: " << filename << std::endl;
627       grabber.open(Idisplay);
628       grabber.acquire(Idisplay);
629       vpCameraParameters mcamTmp(592, 570, grabber.getWidth() / 2, grabber.getHeight() / 2);
630       // Compute the initial pose of the camera
631       computeInitialPose(&mcamTmp, Idisplay, &mPose, md, mcog, &cMo, mP, opt_click_allowed);
632       // Close the framegrabber
633       grabber.close();
634 
635       // Associate the grabber to the RGBa image
636       grabber.open(IC);
637       mcam.init(mcamTmp);
638     } catch (...) {
639       // an exception is thrown if an exception from readPGM has been caught
640       // here this will result in the end of the program
641       // Note that another error message has been printed from readPGM
642       // to give more information about the error
643       std::cerr << std::endl << "ERROR:" << std::endl;
644       std::cerr << "  Cannot read " << filename << std::endl;
645       std::cerr << "  Check your -i " << ipath << " option " << std::endl
646                 << "  or VISP_INPUT_IMAGE_PATH environment variable." << std::endl;
647       exit(-1);
648     }
649 
650     // Create a vpRAOgre object with color background
651     vpAROgreExample ogre(mcam, (unsigned int)grabber.getWidth(), (unsigned int)grabber.getHeight());
652     // Initialize it
653     ogre.init(IC);
654 
655     double t0 = vpTime::measureTimeMs();
656 
657     // Rendering loop
658     while (ogre.continueRendering() && !grabber.end()) {
659       // Acquire a frame
660       grabber.acquire(IC);
661 
662       // Convert it to a grey level image for tracking purpose
663       vpImageConvert::convert(IC, I);
664 
665       // kill the point list
666       mPose.clearPoint();
667 
668       // track the dot
669       for (int i = 0; i < 4; i++) {
670         // track the point
671         md[i].track(I, mcog[i]);
672         md[i].setGrayLevelPrecision(0.90);
673         // pixel->meter conversion
674         {
675           double x = 0, y = 0;
676           vpPixelMeterConversion::convertPoint(mcam, mcog[i], x, y);
677           mP[i].set_x(x);
678           mP[i].set_y(y);
679         }
680 
681         // and added to the pose computation point list
682         mPose.addPoint(mP[i]);
683       }
684       // the pose structure has been updated
685 
686       // the pose is now updated using the virtual visual servoing approach
687       // Dementhon or lagrange is no longuer necessary, pose at the
688       // previous iteration is sufficient
689       mPose.computePose(vpPose::VIRTUAL_VS, cMo);
690 
691       // Display with ogre
692       ogre.display(IC, cMo);
693 
694       // Wait so that the video does not go too fast
695       double t1 = vpTime::measureTimeMs();
696       std::cout << "\r> " << 1000 / (t1 - t0) << " fps";
697       t0 = t1;
698     }
699     // Close the grabber
700     grabber.close();
701 
702     return EXIT_SUCCESS;
703   } catch (const vpException &e) {
704     std::cout << "Catch a ViSP exception: " << e << std::endl;
705     return EXIT_FAILURE;
706   } catch (Ogre::Exception &e) {
707     std::cout << "Catch an Ogre exception: " << e.getDescription() << std::endl;
708     return EXIT_FAILURE;
709   } catch (...) {
710     std::cout << "Catch an exception " << std::endl;
711     return EXIT_FAILURE;
712   }
713 }
714 #else // VISP_HAVE_OGRE && VISP_HAVE_DISPLAY
main()715 int main()
716 {
717 #if (!(defined(VISP_HAVE_X11) || defined(VISP_HAVE_GTK) || defined(VISP_HAVE_GDI)))
718   std::cout << "You do not have X11, or GTK, or GDI (Graphical Device Interface) functionalities to display images..." << std::endl;
719   std::cout << "Tip if you are on a unix-like system:" << std::endl;
720   std::cout << "- Install X11, configure again ViSP using cmake and build again this example" << std::endl;
721   std::cout << "Tip if you are on a windows-like system:" << std::endl;
722   std::cout << "- Install GDI, configure again ViSP using cmake and build again this example" << std::endl;
723 #else
724   std::cout << "You do not have Ogre functionalities" << std::endl;
725   std::cout << "Tip:" << std::endl;
726   std::cout << "- Install Ogre3D, configure again ViSP using cmake and build again this example" << std::endl;
727 #endif
728   return EXIT_SUCCESS;
729 }
730 #endif
731