1 /****************************************************************************
2 *
3 * ViSP, open source Visual Servoing Platform software.
4 * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5 *
6 * This software is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 * See the file LICENSE.txt at the root directory of this source
11 * distribution for additional information about the GNU GPL.
12 *
13 * For using ViSP with software that can not be combined with the GNU
14 * GPL, please contact Inria about acquiring a ViSP Professional
15 * Edition License.
16 *
17 * See http://visp.inria.fr for more information.
18 *
19 * This software was developed at:
20 * Inria Rennes - Bretagne Atlantique
21 * Campus Universitaire de Beaulieu
22 * 35042 Rennes Cedex
23 * France
24 *
25 * If you have questions regarding the use of this file, please contact
26 * Inria at visp@inria.fr
27 *
28 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30 *
31 * Description:
32 * Klt polygon, containing points of interest.
33 *
34 * Authors:
35 * Romain Tallonneau
36 * Aurelien Yol
37 *
38 *****************************************************************************/
39
40 #include <visp3/core/vpPolygon.h>
41 #include <visp3/mbt/vpMbtDistanceKltPoints.h>
42 #include <visp3/me/vpMeTracker.h>
43
44 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
45
46 #if defined(VISP_HAVE_CLIPPER)
47 #include <clipper.hpp> // clipper private library
48 #endif
49
50 #if defined(__APPLE__) && defined(__MACH__) // Apple OSX and iOS (Darwin)
51 #include <TargetConditionals.h> // To detect OSX or IOS using TARGET_OS_IPHONE or TARGET_OS_IOS macro
52 #endif
53
54 /*!
55 Basic constructor.
56
57 */
vpMbtDistanceKltPoints()58 vpMbtDistanceKltPoints::vpMbtDistanceKltPoints()
59 : H(), N(), N_cur(), invd0(1.), cRc0_0n(), initPoints(std::map<int, vpImagePoint>()),
60 curPoints(std::map<int, vpImagePoint>()), curPointsInd(std::map<int, int>()), nbPointsCur(0), nbPointsInit(0),
61 minNbPoint(4), enoughPoints(false), dt(1.), d0(1.), cam(), isTrackedKltPoints(true), polygon(NULL),
62 hiddenface(NULL), useScanLine(false)
63 {
64 }
65
66 /*!
67 Basic destructor.
68
69 */
~vpMbtDistanceKltPoints()70 vpMbtDistanceKltPoints::~vpMbtDistanceKltPoints() {}
71
72 /*!
73 Initialise the face to track. All the points in the map, representing all
74 the map detected in the image, are parsed in order to extract the id of the
75 points that are indeed in the face.
76
77 \param _tracker : ViSP OpenCV KLT Tracker.
78 \param mask: Mask image or NULL if not wanted. Mask values that are set to true are considered in the tracking. To disable a pixel, set false.
79 */
init(const vpKltOpencv & _tracker,const vpImage<bool> * mask)80 void vpMbtDistanceKltPoints::init(const vpKltOpencv &_tracker, const vpImage<bool> *mask)
81 {
82 // extract ids of the points in the face
83 nbPointsInit = 0;
84 nbPointsCur = 0;
85 initPoints = std::map<int, vpImagePoint>();
86 curPoints = std::map<int, vpImagePoint>();
87 curPointsInd = std::map<int, int>();
88 std::vector<vpImagePoint> roi;
89 polygon->getRoiClipped(cam, roi);
90
91 for (unsigned int i = 0; i < static_cast<unsigned int>(_tracker.getNbFeatures()); i++) {
92 long id;
93 float x_tmp, y_tmp;
94 _tracker.getFeature((int)i, id, x_tmp, y_tmp);
95
96 bool add = false;
97
98 // Add points inside visibility mask only
99 if (vpMeTracker::inMask(mask, (unsigned int) y_tmp, (unsigned int) x_tmp)) {
100 if (useScanLine) {
101 if ((unsigned int)y_tmp < hiddenface->getMbScanLineRenderer().getPrimitiveIDs().getHeight() &&
102 (unsigned int)x_tmp < hiddenface->getMbScanLineRenderer().getPrimitiveIDs().getWidth() &&
103 hiddenface->getMbScanLineRenderer().getPrimitiveIDs()[(unsigned int)y_tmp][(unsigned int)x_tmp] ==
104 polygon->getIndex())
105 add = true;
106 }
107 else if (vpPolygon::isInside(roi, y_tmp, x_tmp)) {
108 add = true;
109 }
110 }
111
112 if (add) {
113 #if TARGET_OS_IPHONE
114 initPoints[(int)id] = vpImagePoint(y_tmp, x_tmp);
115 curPoints[(int)id] = vpImagePoint(y_tmp, x_tmp);
116 curPointsInd[(int)id] = (int)i;
117 #else
118 initPoints[id] = vpImagePoint(y_tmp, x_tmp);
119 curPoints[id] = vpImagePoint(y_tmp, x_tmp);
120 curPointsInd[id] = (int)i;
121 #endif
122 }
123 }
124
125 nbPointsInit = (unsigned int)initPoints.size();
126 nbPointsCur = (unsigned int)curPoints.size();
127
128 if (nbPointsCur >= minNbPoint)
129 enoughPoints = true;
130 else
131 enoughPoints = false;
132
133 // initialisation of the value for the computation in SE3
134 vpPlane plan(polygon->getPoint(0), polygon->getPoint(1), polygon->getPoint(2));
135
136 d0 = plan.getD();
137 N = plan.getNormal();
138
139 N.normalize();
140 N_cur = N;
141 invd0 = 1.0 / d0;
142 }
143
144 /*!
145 compute the number of point in this instanciation of the tracker that
146 corresponds to the points of the face
147
148 \param _tracker : the KLT tracker
149 \return the number of points that are tracked in this face and in this
150 instanciation of the tracker
151 \param mask: Mask image or NULL if not wanted. Mask values that are set to true are considered in the tracking. To disable a pixel, set false.
152 */
computeNbDetectedCurrent(const vpKltOpencv & _tracker,const vpImage<bool> * mask)153 unsigned int vpMbtDistanceKltPoints::computeNbDetectedCurrent(const vpKltOpencv &_tracker, const vpImage<bool> *mask)
154 {
155 long id;
156 float x, y;
157 nbPointsCur = 0;
158 curPoints = std::map<int, vpImagePoint>();
159 curPointsInd = std::map<int, int>();
160
161 for (unsigned int i = 0; i < static_cast<unsigned int>(_tracker.getNbFeatures()); i++) {
162 _tracker.getFeature((int)i, id, x, y);
163 if (isTrackedFeature((int)id) && vpMeTracker::inMask(mask, (unsigned int) y, (unsigned int) x)) {
164 #if TARGET_OS_IPHONE
165 curPoints[(int)id] = vpImagePoint(static_cast<double>(y), static_cast<double>(x));
166 curPointsInd[(int)id] = (int)i;
167 #else
168 curPoints[id] = vpImagePoint(static_cast<double>(y), static_cast<double>(x));
169 curPointsInd[id] = (int)i;
170 #endif
171 }
172 }
173
174 nbPointsCur = (unsigned int)curPoints.size();
175
176 if (nbPointsCur >= minNbPoint)
177 enoughPoints = true;
178 else
179 enoughPoints = false;
180
181 return nbPointsCur;
182 }
183
184 /*!
185 Compute the interaction matrix and the residu vector for the face.
186 The method assumes that these two objects are properly sized in order to be
187 able to improve the speed with the use of SubCoVector and subMatrix.
188
189 \warning The function preCompute must be called before the this method.
190
191 \param _R : the residu vector
192 \param _J : the interaction matrix
193 */
computeInteractionMatrixAndResidu(vpColVector & _R,vpMatrix & _J)194 void vpMbtDistanceKltPoints::computeInteractionMatrixAndResidu(vpColVector &_R, vpMatrix &_J)
195 {
196 unsigned int index_ = 0;
197
198 std::map<int, vpImagePoint>::const_iterator iter = curPoints.begin();
199 for (; iter != curPoints.end(); ++iter) {
200 int id(iter->first);
201 double i_cur(iter->second.get_i()), j_cur(iter->second.get_j());
202
203 double x_cur(0), y_cur(0);
204 vpPixelMeterConversion::convertPoint(cam, j_cur, i_cur, x_cur, y_cur);
205
206 vpImagePoint iP0 = initPoints[id];
207 double x0(0), y0(0);
208 vpPixelMeterConversion::convertPoint(cam, iP0, x0, y0);
209
210 double x0_transform,
211 y0_transform; // equivalent x and y in the first image (reference)
212 computeP_mu_t(x0, y0, x0_transform, y0_transform, H);
213
214 double invZ = compute_1_over_Z(x_cur, y_cur);
215
216 _J[2 * index_][0] = -invZ;
217 _J[2 * index_][1] = 0;
218 _J[2 * index_][2] = x_cur * invZ;
219 _J[2 * index_][3] = x_cur * y_cur;
220 _J[2 * index_][4] = -(1 + x_cur * x_cur);
221 _J[2 * index_][5] = y_cur;
222
223 _J[2 * index_ + 1][0] = 0;
224 _J[2 * index_ + 1][1] = -invZ;
225 _J[2 * index_ + 1][2] = y_cur * invZ;
226 _J[2 * index_ + 1][3] = (1 + y_cur * y_cur);
227 _J[2 * index_ + 1][4] = -y_cur * x_cur;
228 _J[2 * index_ + 1][5] = -x_cur;
229
230 _R[2 * index_] = (x0_transform - x_cur);
231 _R[2 * index_ + 1] = (y0_transform - y_cur);
232 index_++;
233 }
234 }
235
compute_1_over_Z(double x,double y)236 double vpMbtDistanceKltPoints::compute_1_over_Z(double x, double y)
237 {
238 double num = cRc0_0n[0] * x + cRc0_0n[1] * y + cRc0_0n[2];
239 double den = -(d0 - dt);
240 return num / den;
241 }
242
243 /*!
244 Compute the new coordinates of a point given by its \f$(x,y)\f$ coordinates
245 after the homography.
246
247 \f$ P_t = {}^{c_t}H_{c_0} . P_0 \f$
248
249 \param x_in : the x coordinates of the input point
250 \param y_in : the y coordinates of the input point
251 \param x_out : the x coordinates of the output point
252 \param y_out : the y coordinates of the output point
253 \param _cHc0 : the homography used to transfer the point
254 */
computeP_mu_t(double x_in,double y_in,double & x_out,double & y_out,const vpMatrix & _cHc0)255 inline void vpMbtDistanceKltPoints::computeP_mu_t(double x_in, double y_in, double &x_out, double &y_out,
256 const vpMatrix &_cHc0)
257 {
258 double p_mu_t_2 = x_in * _cHc0[2][0] + y_in * _cHc0[2][1] + _cHc0[2][2];
259
260 if (fabs(p_mu_t_2) < std::numeric_limits<double>::epsilon()) {
261 x_out = 0.0;
262 y_out = 0.0;
263 throw vpException(vpException::divideByZeroError, "the depth of the point is calculated to zero");
264 }
265
266 x_out = (x_in * _cHc0[0][0] + y_in * _cHc0[0][1] + _cHc0[0][2]) / p_mu_t_2;
267 y_out = (x_in * _cHc0[1][0] + y_in * _cHc0[1][1] + _cHc0[1][2]) / p_mu_t_2;
268 }
269
270 /*!
271 Compute the homography using a displacement matrix.
272
273 the homography is given by:
274
275 \f$ {}^cH_{c_0} = {}^cR_{c_0} + \frac{{}^cT_{c_0} . {}^tN}{d_0} \f$
276
277 Several internal variables are computed (dt, cRc0_0n).
278
279 \param _cTc0 : The displacement matrix of the camera between the initial
280 position of the camera and the current camera position
281 \param _cHc0 : The homography of the plane.
282 */
computeHomography(const vpHomogeneousMatrix & _cTc0,vpHomography & _cHc0)283 void vpMbtDistanceKltPoints::computeHomography(const vpHomogeneousMatrix &_cTc0, vpHomography &_cHc0)
284 {
285 vpRotationMatrix cRc0;
286 vpTranslationVector ctransc0;
287
288 _cTc0.extract(cRc0);
289 _cTc0.extract(ctransc0);
290 vpMatrix cHc0 = _cHc0.convert();
291
292 // vpGEMM(cRc0, 1.0, invd0, cRc0, -1.0, _cHc0, VP_GEMM_A_T);
293 vpGEMM(ctransc0, N, -invd0, cRc0, 1.0, cHc0, VP_GEMM_B_T);
294 cHc0 /= cHc0[2][2];
295
296 H = cHc0;
297
298 // vpQuaternionVector NQuat(N[0], N[1], N[2], 0.0);
299 // vpQuaternionVector RotQuat(cRc0);
300 // vpQuaternionVector RotQuatConj(-RotQuat.x(), -RotQuat.y(),
301 // -RotQuat.z(), RotQuat.w()); vpQuaternionVector partial = RotQuat *
302 // NQuat; vpQuaternionVector resQuat = (partial * RotQuatConj);
303 //
304 // cRc0_0n = vpColVector(3);
305 // cRc0_0n[0] = resQuat.x();
306 // cRc0_0n[1] = resQuat.y();
307 // cRc0_0n[2] = resQuat.z();
308
309 cRc0_0n = cRc0 * N;
310
311 // vpPlane p(corners[0], corners[1], corners[2]);
312 // vpColVector Ncur = p.getNormal();
313 // Ncur.normalize();
314 N_cur = cRc0_0n;
315 dt = 0.0;
316 for (unsigned int i = 0; i < 3; i += 1) {
317 dt += ctransc0[i] * (N_cur[i]);
318 }
319 }
320
321 /*!
322 Test whether the feature with identifier id in paramters is in the list of
323 tracked features.
324
325 \param _id : the id of the current feature to test
326 \return true if the id is in the list of tracked feature
327 */
isTrackedFeature(int _id)328 bool vpMbtDistanceKltPoints::isTrackedFeature(int _id)
329 {
330 // std::map<int, vpImagePoint>::const_iterator iter = initPoints.begin();
331 // while(iter != initPoints.end()){
332 // if(iter->first == _id){
333 // return true;
334 // }
335 // iter++;
336 // }
337
338 std::map<int, vpImagePoint>::iterator iter = initPoints.find(_id);
339 if (iter != initPoints.end())
340 return true;
341
342 return false;
343 }
344
345 /*!
346 Modification of all the pixels that are in the roi to the value of _nb (
347 default is 255).
348
349 \param mask : the mask to update (0, not in the object, _nb otherwise).
350 \param nb : Optionnal value to set to the pixels included in the face.
351 \param shiftBorder : Optionnal shift for the border in pixel (sort of
352 built-in erosion) to avoid to consider pixels near the limits of the face.
353 */
updateMask(cv::Mat & mask,unsigned char nb,unsigned int shiftBorder)354 void vpMbtDistanceKltPoints::updateMask(
355 #if (VISP_HAVE_OPENCV_VERSION >= 0x020408)
356 cv::Mat &mask,
357 #else
358 IplImage *mask,
359 #endif
360 unsigned char nb, unsigned int shiftBorder)
361 {
362 #if (VISP_HAVE_OPENCV_VERSION >= 0x020408)
363 int width = mask.cols;
364 int height = mask.rows;
365 #else
366 int width = mask->width;
367 int height = mask->height;
368 #endif
369
370 int i_min, i_max, j_min, j_max;
371 std::vector<vpImagePoint> roi;
372 polygon->getRoiClipped(cam, roi);
373
374 double shiftBorder_d = (double)shiftBorder;
375
376 #if defined(VISP_HAVE_CLIPPER)
377 std::vector<vpImagePoint> roi_offset;
378
379 ClipperLib::Path path;
380 for (std::vector<vpImagePoint>::const_iterator it = roi.begin(); it != roi.end(); ++it) {
381 path.push_back(ClipperLib::IntPoint((ClipperLib::cInt)it->get_u(), (ClipperLib::cInt)it->get_v()));
382 }
383
384 ClipperLib::Paths solution;
385 ClipperLib::ClipperOffset co;
386 co.AddPath(path, ClipperLib::jtRound, ClipperLib::etClosedPolygon);
387 co.Execute(solution, -shiftBorder_d);
388
389 // Keep biggest polygon by area
390 if (!solution.empty()) {
391 size_t index_max = 0;
392
393 if (solution.size() > 1) {
394 double max_area = 0;
395 vpPolygon polygon_area;
396
397 for (size_t i = 0; i < solution.size(); i++) {
398 std::vector<vpImagePoint> corners;
399
400 for (size_t j = 0; j < solution[i].size(); j++) {
401 corners.push_back(vpImagePoint((double)(solution[i][j].Y), (double)(solution[i][j].X)));
402 }
403
404 polygon_area.buildFrom(corners);
405 if (polygon_area.getArea() > max_area) {
406 max_area = polygon_area.getArea();
407 index_max = i;
408 }
409 }
410 }
411
412 for (size_t i = 0; i < solution[index_max].size(); i++) {
413 roi_offset.push_back(vpImagePoint((double)(solution[index_max][i].Y), (double)(solution[index_max][i].X)));
414 }
415 } else {
416 roi_offset = roi;
417 }
418
419 vpPolygon polygon_test(roi_offset);
420 vpImagePoint imPt;
421 #endif
422
423 #if defined(VISP_HAVE_CLIPPER)
424 vpPolygon3D::getMinMaxRoi(roi_offset, i_min, i_max, j_min, j_max);
425 #else
426 vpPolygon3D::getMinMaxRoi(roi, i_min, i_max, j_min, j_max);
427 #endif
428
429 /* check image boundaries */
430 if (i_min > height) { // underflow
431 i_min = 0;
432 }
433 if (i_max > height) {
434 i_max = height;
435 }
436 if (j_min > width) { // underflow
437 j_min = 0;
438 }
439 if (j_max > width) {
440 j_max = width;
441 }
442
443 #if (VISP_HAVE_OPENCV_VERSION >= 0x020408)
444 for (int i = i_min; i < i_max; i++) {
445 double i_d = (double)i;
446
447 for (int j = j_min; j < j_max; j++) {
448 double j_d = (double)j;
449
450 #if defined(VISP_HAVE_CLIPPER)
451 imPt.set_ij(i_d, j_d);
452 if (polygon_test.isInside(imPt)) {
453 mask.ptr<uchar>(i)[j] = nb;
454 }
455 #else
456 if (shiftBorder != 0) {
457 if (vpPolygon::isInside(roi, i_d, j_d) && vpPolygon::isInside(roi, i_d + shiftBorder_d, j_d + shiftBorder_d) &&
458 vpPolygon::isInside(roi, i_d - shiftBorder_d, j_d + shiftBorder_d) &&
459 vpPolygon::isInside(roi, i_d + shiftBorder_d, j_d - shiftBorder_d) &&
460 vpPolygon::isInside(roi, i_d - shiftBorder_d, j_d - shiftBorder_d)) {
461 mask.at<unsigned char>(i, j) = nb;
462 }
463 } else {
464 if (vpPolygon::isInside(roi, i, j)) {
465 mask.at<unsigned char>(i, j) = nb;
466 }
467 }
468 #endif
469 }
470 }
471 #else
472 unsigned char *ptrData = (unsigned char *)mask->imageData + i_min * mask->widthStep + j_min;
473 for (int i = i_min; i < i_max; i++) {
474 double i_d = (double)i;
475 for (int j = j_min; j < j_max; j++) {
476 double j_d = (double)j;
477 if (shiftBorder != 0) {
478 if (vpPolygon::isInside(roi, i_d, j_d) && vpPolygon::isInside(roi, i_d + shiftBorder_d, j_d + shiftBorder_d) &&
479 vpPolygon::isInside(roi, i_d - shiftBorder_d, j_d + shiftBorder_d) &&
480 vpPolygon::isInside(roi, i_d + shiftBorder_d, j_d - shiftBorder_d) &&
481 vpPolygon::isInside(roi, i_d - shiftBorder_d, j_d - shiftBorder_d)) {
482 *(ptrData++) = nb;
483 } else {
484 ptrData++;
485 }
486 } else {
487 if (vpPolygon::isInside(roi, i, j)) {
488 *(ptrData++) = nb;
489 } else {
490 ptrData++;
491 }
492 }
493 }
494 ptrData += mask->widthStep - j_max + j_min;
495 }
496 #endif
497 }
498
499 /*!
500 This method removes the outliers. A point is considered as outlier when its
501 associated weight is below a given threshold (threshold_outlier).
502
503 \param _w : Vector containing the weight of all the tracked points.
504 \param threshold_outlier : Threshold to specify wether or not a point has to
505 be deleted.
506 */
removeOutliers(const vpColVector & _w,const double & threshold_outlier)507 void vpMbtDistanceKltPoints::removeOutliers(const vpColVector &_w, const double &threshold_outlier)
508 {
509 std::map<int, vpImagePoint> tmp;
510 std::map<int, int> tmp2;
511 unsigned int nbSupp = 0;
512 unsigned int k = 0;
513
514 nbPointsCur = 0;
515 std::map<int, vpImagePoint>::const_iterator iter = curPoints.begin();
516 for (; iter != curPoints.end(); ++iter) {
517 if (_w[k] > threshold_outlier && _w[k + 1] > threshold_outlier) {
518 // if(_w[k] > threshold_outlier || _w[k+1] > threshold_outlier){
519 tmp[iter->first] = vpImagePoint(iter->second.get_i(), iter->second.get_j());
520 tmp2[iter->first] = curPointsInd[iter->first];
521 nbPointsCur++;
522 } else {
523 nbSupp++;
524 initPoints.erase(iter->first);
525 }
526
527 k += 2;
528 }
529
530 if (nbSupp != 0) {
531 curPoints = tmp;
532 curPointsInd = tmp2;
533 if (nbPointsCur >= minNbPoint)
534 enoughPoints = true;
535 else
536 enoughPoints = false;
537 }
538 }
539
540 /*!
541 Display the primitives tracked for the face.
542
543 \param _I : The image where to display.
544 */
displayPrimitive(const vpImage<unsigned char> & _I)545 void vpMbtDistanceKltPoints::displayPrimitive(const vpImage<unsigned char> &_I)
546 {
547 std::map<int, vpImagePoint>::const_iterator iter = curPoints.begin();
548 for (; iter != curPoints.end(); ++iter) {
549 int id(iter->first);
550 vpImagePoint iP;
551 iP.set_i(static_cast<double>(iter->second.get_i()));
552 iP.set_j(static_cast<double>(iter->second.get_j()));
553
554 vpDisplay::displayCross(_I, iP, 10, vpColor::red);
555
556 iP.set_i(vpMath::round(iP.get_i() + 7));
557 iP.set_j(vpMath::round(iP.get_j() + 7));
558 std::stringstream ss;
559 ss << id;
560 vpDisplay::displayText(_I, iP, ss.str(), vpColor::red);
561 }
562 }
563
564 /*!
565 Display the primitives tracked for the face.
566
567 \param _I : The image where to display.
568 */
displayPrimitive(const vpImage<vpRGBa> & _I)569 void vpMbtDistanceKltPoints::displayPrimitive(const vpImage<vpRGBa> &_I)
570 {
571 std::map<int, vpImagePoint>::const_iterator iter = curPoints.begin();
572 for (; iter != curPoints.end(); ++iter) {
573 int id(iter->first);
574 vpImagePoint iP;
575 iP.set_i(static_cast<double>(iter->second.get_i()));
576 iP.set_j(static_cast<double>(iter->second.get_j()));
577
578 vpDisplay::displayCross(_I, iP, 10, vpColor::red);
579
580 iP.set_i(vpMath::round(iP.get_i() + 7));
581 iP.set_j(vpMath::round(iP.get_j() + 7));
582 std::stringstream ss;
583 ss << id;
584 vpDisplay::displayText(_I, iP, ss.str(), vpColor::red);
585 }
586 }
587
display(const vpImage<unsigned char> & I,const vpHomogeneousMatrix &,const vpCameraParameters & camera,const vpColor & col,unsigned int thickness,bool displayFullModel)588 void vpMbtDistanceKltPoints::display(const vpImage<unsigned char> &I, const vpHomogeneousMatrix & /*cMo*/,
589 const vpCameraParameters &camera, const vpColor &col, unsigned int thickness,
590 bool displayFullModel)
591 {
592 std::vector<std::vector<double> > models = getModelForDisplay(camera, displayFullModel);
593
594 for (size_t i = 0; i < models.size(); i++) {
595 vpImagePoint ip1(models[i][1], models[i][2]);
596 vpImagePoint ip2(models[i][3], models[i][4]);
597
598 vpDisplay::displayLine(I, ip1, ip2, col, thickness);
599 }
600 }
601
display(const vpImage<vpRGBa> & I,const vpHomogeneousMatrix &,const vpCameraParameters & camera,const vpColor & col,unsigned int thickness,bool displayFullModel)602 void vpMbtDistanceKltPoints::display(const vpImage<vpRGBa> &I, const vpHomogeneousMatrix & /*cMo*/,
603 const vpCameraParameters &camera, const vpColor &col, unsigned int thickness,
604 bool displayFullModel)
605 {
606 std::vector<std::vector<double> > models = getModelForDisplay(camera, displayFullModel);
607
608 for (size_t i = 0; i < models.size(); i++) {
609 vpImagePoint ip1(models[i][1], models[i][2]);
610 vpImagePoint ip2(models[i][3], models[i][4]);
611
612 vpDisplay::displayLine(I, ip1, ip2, col, thickness);
613 }
614 }
615
616 /*!
617 Return a list of features parameters for display.
618 - Parameters are: `<feature id (here 1 for KLT)>`, `<pt.i()>`, `<pt.j()>`,
619 `<klt_id.i()>`, `<klt_id.j()>`, `<klt_id.id>`
620 */
getFeaturesForDisplay()621 std::vector<std::vector<double> > vpMbtDistanceKltPoints::getFeaturesForDisplay()
622 {
623 std::vector<std::vector<double> > features;
624
625 std::map<int, vpImagePoint>::const_iterator iter = curPoints.begin();
626 for (; iter != curPoints.end(); ++iter) {
627 int id(iter->first);
628 vpImagePoint iP;
629 iP.set_i(static_cast<double>(iter->second.get_i()));
630 iP.set_j(static_cast<double>(iter->second.get_j()));
631
632 vpImagePoint iP2;
633 iP2.set_i(vpMath::round(iP.get_i() + 7));
634 iP2.set_j(vpMath::round(iP.get_j() + 7));
635
636 #if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
637 std::vector<double> params = {1, //KLT
638 iP.get_i(),
639 iP.get_j(),
640 iP2.get_i(),
641 iP2.get_j(),
642 static_cast<double>(id)};
643 #else
644 std::vector<double> params;
645 params.push_back(1); //KLT
646 params.push_back(iP.get_i());
647 params.push_back(iP.get_j());
648 params.push_back(iP2.get_i());
649 params.push_back(iP2.get_j());
650 params.push_back(static_cast<double>(id));
651 #endif
652 features.push_back(params);
653 }
654
655 return features;
656 }
657
658 /*!
659 Return a list of line parameters to display the primitive at a given pose and camera parameters.
660 Parameters are: <primitive id (here 0 for line)>, <pt_start.i()>, <pt_start.j()>
661 <pt_end.i()>, <pt_end.j()>
662
663 \param camera : The camera parameters.
664 \param displayFullModel : If true, the line is displayed even if it is not
665 */
getModelForDisplay(const vpCameraParameters & camera,bool displayFullModel)666 std::vector<std::vector<double> > vpMbtDistanceKltPoints::getModelForDisplay(const vpCameraParameters &camera,
667 bool displayFullModel)
668 {
669 std::vector<std::vector<double> > models;
670
671 if ((polygon->isVisible() && isTrackedKltPoints) || displayFullModel) {
672 std::vector<std::pair<vpPoint, unsigned int> > roi;
673 polygon->getPolygonClipped(roi);
674
675 for (unsigned int j = 0; j < roi.size(); j += 1) {
676 if (((roi[(j + 1) % roi.size()].second & roi[j].second & vpPolygon3D::NEAR_CLIPPING) == 0) &&
677 ((roi[(j + 1) % roi.size()].second & roi[j].second & vpPolygon3D::FAR_CLIPPING) == 0) &&
678 ((roi[(j + 1) % roi.size()].second & roi[j].second & vpPolygon3D::DOWN_CLIPPING) == 0) &&
679 ((roi[(j + 1) % roi.size()].second & roi[j].second & vpPolygon3D::UP_CLIPPING) == 0) &&
680 ((roi[(j + 1) % roi.size()].second & roi[j].second & vpPolygon3D::LEFT_CLIPPING) == 0) &&
681 ((roi[(j + 1) % roi.size()].second & roi[j].second & vpPolygon3D::RIGHT_CLIPPING) == 0)) {
682
683 vpImagePoint ip1, ip2;
684 std::vector<std::pair<vpPoint, vpPoint> > linesLst;
685
686 if (useScanLine && !displayFullModel)
687 hiddenface->computeScanLineQuery(roi[j].first, roi[(j + 1) % roi.size()].first, linesLst, true);
688 else
689 linesLst.push_back(std::make_pair(roi[j].first, roi[(j + 1) % roi.size()].first));
690
691 for (unsigned int i = 0; i < linesLst.size(); i++) {
692 linesLst[i].first.project();
693 linesLst[i].second.project();
694 vpMeterPixelConversion::convertPoint(camera, linesLst[i].first.get_x(), linesLst[i].first.get_y(), ip1);
695 vpMeterPixelConversion::convertPoint(camera, linesLst[i].second.get_x(), linesLst[i].second.get_y(), ip2);
696
697 #if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
698 std::vector<double> params = {0, //0 for line parameters
699 ip1.get_i(),
700 ip1.get_j(),
701 ip2.get_i(),
702 ip2.get_j()};
703 #else
704 std::vector<double> params;
705 params.push_back(0); //0 for line parameters
706 params.push_back(ip1.get_i());
707 params.push_back(ip1.get_j());
708 params.push_back(ip2.get_i());
709 params.push_back(ip2.get_j());
710 #endif
711 models.push_back(params);
712 }
713 }
714 }
715 }
716
717 return models;
718 }
719
720 #elif !defined(VISP_BUILD_SHARED_LIBS)
721 // Work arround to avoid warning: libvisp_mbt.a(vpMbtDistanceKltPoints.cpp.o)
722 // has no symbols
dummy_vpMbtDistanceKltPoints()723 void dummy_vpMbtDistanceKltPoints(){};
724 #endif
725