1 /*M///////////////////////////////////////////////////////////////////////////////////////
2 //
3 //  IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
4 //
5 //  By downloading, copying, installing or using the software you agree to this license.
6 //  If you do not agree to this license, do not download, install,
7 //  copy or use the software.
8 //
9 //
10 //                          License Agreement
11 //                For Open Source Computer Vision Library
12 //
13 // Copyright (C) 2000-2008, Intel Corporation, all rights reserved.
14 // Copyright (C) 2009, Willow Garage Inc., all rights reserved.
15 // Copyright (C) 2013, OpenCV Foundation, all rights reserved.
16 // Third party copyrights are property of their respective owners.
17 //
18 // Redistribution and use in source and binary forms, with or without modification,
19 // are permitted provided that the following conditions are met:
20 //
21 //   * Redistribution's of source code must retain the above copyright notice,
22 //     this list of conditions and the following disclaimer.
23 //
24 //   * Redistribution's in binary form must reproduce the above copyright notice,
25 //     this list of conditions and the following disclaimer in the documentation
26 //     and/or other materials provided with the distribution.
27 //
28 //   * The name of the copyright holders may not be used to endorse or promote products
29 //     derived from this software without specific prior written permission.
30 //
31 // This software is provided by the copyright holders and contributors "as is" and
32 // any express or implied warranties, including, but not limited to, the implied
33 // warranties of merchantability and fitness for a particular purpose are disclaimed.
34 // In no event shall the Intel Corporation or contributors be liable for any direct,
35 // indirect, incidental, special, exemplary, or consequential damages
36 // (including, but not limited to, procurement of substitute goods or services;
37 // loss of use, data, or profits; or business interruption) however caused
38 // and on any theory of liability, whether in contract, strict liability,
39 // or tort (including negligence or otherwise) arising in any way out of
40 // the use of this software, even if advised of the possibility of such damage.
41 //
42 //M*/
43 
44 #include "precomp.hpp"
45 
46 #include "opencv2/core/utility.hpp"
47 
48 #include <thread>
49 #include <mutex>
50 #include <condition_variable>
51 
52 #if defined(DEBUG) || defined(_DEBUG)
53 #undef DEBUGLOGS
54 #define DEBUGLOGS 1
55 #endif
56 
57 #ifndef DEBUGLOGS
58 #define DEBUGLOGS 0
59 #endif
60 
61 #ifdef __ANDROID__
62 #include <android/log.h>
63 #define LOG_TAG "OBJECT_DETECTOR"
64 #define LOGD0(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__))
65 #define LOGI0(...) ((void)__android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__))
66 #define LOGW0(...) ((void)__android_log_print(ANDROID_LOG_WARN, LOG_TAG, __VA_ARGS__))
67 #define LOGE0(...) ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__))
68 #else
69 
70 #include <stdio.h>
71 
72 #define LOGD0(_str, ...) (printf(_str , ## __VA_ARGS__), printf("\n"), fflush(stdout))
73 #define LOGI0(_str, ...) (printf(_str , ## __VA_ARGS__), printf("\n"), fflush(stdout))
74 #define LOGW0(_str, ...) (printf(_str , ## __VA_ARGS__), printf("\n"), fflush(stdout))
75 #define LOGE0(_str, ...) (printf(_str , ## __VA_ARGS__), printf("\n"), fflush(stdout))
76 #endif //__ANDROID__
77 
78 #if DEBUGLOGS
79 #define LOGD(_str, ...) LOGD0(_str , ## __VA_ARGS__)
80 #define LOGI(_str, ...) LOGI0(_str , ## __VA_ARGS__)
81 #define LOGW(_str, ...) LOGW0(_str , ## __VA_ARGS__)
82 #define LOGE(_str, ...) LOGE0(_str , ## __VA_ARGS__)
83 #else
84 #define LOGD(...)
85 #define LOGI(...)
86 #define LOGW(...)
87 #define LOGE(...)
88 #endif //DEBUGLOGS
89 
90 
91 using namespace cv;
92 
centerRect(const cv::Rect & r)93 static inline cv::Point2f centerRect(const cv::Rect& r)
94 {
95     return cv::Point2f(r.x+((float)r.width)/2, r.y+((float)r.height)/2);
96 }
97 
scale_rect(const cv::Rect & r,float scale)98 static inline cv::Rect scale_rect(const cv::Rect& r, float scale)
99 {
100     cv::Point2f m=centerRect(r);
101     float width  = r.width  * scale;
102     float height = r.height * scale;
103     int x=cvRound(m.x - width/2);
104     int y=cvRound(m.y - height/2);
105 
106     return cv::Rect(x, y, cvRound(width), cvRound(height));
107 }
108 
109 namespace cv
110 {
111     void* workcycleObjectDetectorFunction(void* p);
112 }
113 
114 class cv::DetectionBasedTracker::SeparateDetectionWork
115 {
116     public:
117         SeparateDetectionWork(cv::DetectionBasedTracker& _detectionBasedTracker, cv::Ptr<DetectionBasedTracker::IDetector> _detector,
118                               const cv::DetectionBasedTracker::Parameters& params);
119         virtual ~SeparateDetectionWork();
120         bool communicateWithDetectingThread(const Mat& imageGray, std::vector<Rect>& rectsWhereRegions);
121         bool run();
122         void stop();
123         void resetTracking();
124 
isWorking()125         inline bool isWorking()
126         {
127             return (stateThread==STATE_THREAD_WORKING_SLEEPING) || (stateThread==STATE_THREAD_WORKING_WITH_IMAGE);
128         }
setParameters(const cv::DetectionBasedTracker::Parameters & params)129         void setParameters(const cv::DetectionBasedTracker::Parameters& params)
130         {
131             std::unique_lock<std::mutex> mtx_lock(mtx);
132             parameters = params;
133         }
134 
init()135         inline void init()
136         {
137             std::unique_lock<std::mutex> mtx_lock(mtx);
138             stateThread = STATE_THREAD_STOPPED;
139             isObjectDetectingReady = false;
140             shouldObjectDetectingResultsBeForgot = false;
141             objectDetectorThreadStartStop.notify_one();
142         }
143     protected:
144 
145         DetectionBasedTracker& detectionBasedTracker;
146         cv::Ptr<DetectionBasedTracker::IDetector> cascadeInThread;
147         std::thread second_workthread;
148         std::mutex mtx;
149         std::condition_variable objectDetectorRun;
150         std::condition_variable objectDetectorThreadStartStop;
151         std::vector<cv::Rect> resultDetect;
152         volatile bool isObjectDetectingReady;
153         volatile bool shouldObjectDetectingResultsBeForgot;
154 
155         enum StateSeparatedThread {
156             STATE_THREAD_STOPPED=0,
157             STATE_THREAD_WORKING_SLEEPING,
158             STATE_THREAD_WORKING_WITH_IMAGE,
159             STATE_THREAD_WORKING,
160             STATE_THREAD_STOPPING
161         };
162         volatile StateSeparatedThread stateThread;
163 
164         cv::Mat imageSeparateDetecting;
165 
166         void workcycleObjectDetector();
167         friend void* workcycleObjectDetectorFunction(void* p);
168 
169         long long  timeWhenDetectingThreadStartedWork;
170         cv::DetectionBasedTracker::Parameters parameters;
171 };
172 
SeparateDetectionWork(DetectionBasedTracker & _detectionBasedTracker,cv::Ptr<DetectionBasedTracker::IDetector> _detector,const cv::DetectionBasedTracker::Parameters & params)173 cv::DetectionBasedTracker::SeparateDetectionWork::SeparateDetectionWork(DetectionBasedTracker& _detectionBasedTracker, cv::Ptr<DetectionBasedTracker::IDetector> _detector,
174                                                                         const cv::DetectionBasedTracker::Parameters& params)
175     :detectionBasedTracker(_detectionBasedTracker),
176     cascadeInThread(),
177     isObjectDetectingReady(false),
178     shouldObjectDetectingResultsBeForgot(false),
179     stateThread(STATE_THREAD_STOPPED),
180     timeWhenDetectingThreadStartedWork(-1),
181     parameters(params)
182 {
183     CV_Assert(_detector);
184 
185     cascadeInThread = _detector;
186 }
187 
~SeparateDetectionWork()188 cv::DetectionBasedTracker::SeparateDetectionWork::~SeparateDetectionWork()
189 {
190     if(stateThread!=STATE_THREAD_STOPPED) {
191         LOGE("\n\n\nATTENTION!!! dangerous algorithm error: destructor DetectionBasedTracker::DetectionBasedTracker::~SeparateDetectionWork is called before stopping the workthread");
192     }
193     second_workthread.join();
194 }
run()195 bool cv::DetectionBasedTracker::SeparateDetectionWork::run()
196 {
197     LOGD("DetectionBasedTracker::SeparateDetectionWork::run() --- start");
198     std::unique_lock<std::mutex> mtx_lock(mtx);
199     // unlocked when leaving scope
200     if (stateThread != STATE_THREAD_STOPPED) {
201         LOGE("DetectionBasedTracker::SeparateDetectionWork::run is called while the previous run is not stopped");
202         return false;
203     }
204     stateThread=STATE_THREAD_WORKING_SLEEPING;
205     second_workthread = std::thread(workcycleObjectDetectorFunction, (void*)this); //TODO: add attributes?
206     objectDetectorThreadStartStop.wait(mtx_lock);
207     LOGD("DetectionBasedTracker::SeparateDetectionWork::run --- end");
208     return true;
209 }
210 
211 #define CATCH_ALL_AND_LOG(_block)                                                           \
212     try {                                                                                   \
213         _block;                                                                             \
214     }                                                                                       \
215     catch(const cv::Exception& e) {                                                         \
216         LOGE0("\n %s: ERROR: OpenCV Exception caught: \n'%s'\n\n", CV_Func, e.what());      \
217     } catch(const std::exception& e) {                                                      \
218         LOGE0("\n %s: ERROR: Exception caught: \n'%s'\n\n", CV_Func, e.what());             \
219     } catch(...) {                                                                          \
220         LOGE0("\n %s: ERROR: UNKNOWN Exception caught\n\n", CV_Func);                       \
221     }
222 
workcycleObjectDetectorFunction(void * p)223 void* cv::workcycleObjectDetectorFunction(void* p)
224 {
225     CATCH_ALL_AND_LOG({ ((cv::DetectionBasedTracker::SeparateDetectionWork*)p)->workcycleObjectDetector(); });
226     try{
227         ((cv::DetectionBasedTracker::SeparateDetectionWork*)p)->init();
228     } catch(...) {
229         LOGE0("DetectionBasedTracker: workcycleObjectDetectorFunction: ERROR concerning pointer, received as the function parameter");
230     }
231     return NULL;
232 }
233 
workcycleObjectDetector()234 void cv::DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector()
235 {
236     static double freq = getTickFrequency();
237     LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- start");
238     std::vector<Rect> objects;
239 
240     CV_Assert(stateThread==STATE_THREAD_WORKING_SLEEPING);
241     std::unique_lock<std::mutex> mtx_lock(mtx);
242     {
243         objectDetectorThreadStartStop.notify_one();
244         LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- before waiting");
245         CV_Assert(stateThread==STATE_THREAD_WORKING_SLEEPING);
246         objectDetectorRun.wait(mtx_lock);
247         if (isWorking()) {
248             stateThread=STATE_THREAD_WORKING_WITH_IMAGE;
249         }
250         LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- after waiting");
251     }
252     mtx_lock.unlock();
253 
254     bool isFirstStep=true;
255 
256     isObjectDetectingReady=false;
257 
258     while(isWorking())
259     {
260         LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- next step");
261 
262         if (! isFirstStep) {
263             LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- before waiting");
264             CV_Assert(stateThread==STATE_THREAD_WORKING_SLEEPING);
265             mtx_lock.lock();
266             if (!isWorking()) {//it is a rare case, but may cause a crash
267                 LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- go out from the workcycle from inner part of lock just before waiting");
268                 mtx_lock.unlock();
269                 break;
270             }
271             CV_Assert(stateThread==STATE_THREAD_WORKING_SLEEPING);
272             objectDetectorRun.wait(mtx_lock);
273             if (isWorking()) {
274                 stateThread=STATE_THREAD_WORKING_WITH_IMAGE;
275             }
276             mtx_lock.unlock();
277 
278             LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- after waiting");
279         } else {
280             isFirstStep=false;
281         }
282 
283         if (!isWorking()) {
284             LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- go out from the workcycle just after waiting");
285             break;
286         }
287 
288 
289         if (imageSeparateDetecting.empty()) {
290             LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- imageSeparateDetecting is empty, continue");
291             continue;
292         }
293         LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- start handling imageSeparateDetecting, img.size=%dx%d, img.data=0x%p",
294                 imageSeparateDetecting.size().width, imageSeparateDetecting.size().height, (void*)imageSeparateDetecting.data);
295 
296 
297         int64 t1_detect=getTickCount();
298 
299         cascadeInThread->detect(imageSeparateDetecting, objects);
300 
301         /*cascadeInThread.detectMultiScale( imageSeparateDetecting, objects,
302                 detectionBasedTracker.parameters.scaleFactor, detectionBasedTracker.parameters.minNeighbors, 0
303                 |CV_HAAR_SCALE_IMAGE
304                 ,
305                 min_objectSize,
306                 max_objectSize
307                 );
308         */
309 
310         LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- end handling imageSeparateDetecting");
311 
312         if (!isWorking()) {
313             LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- go out from the workcycle just after detecting");
314             break;
315         }
316 
317         int64 t2_detect = getTickCount();
318         int64 dt_detect = t2_detect-t1_detect;
319         double dt_detect_ms=((double)dt_detect)/freq * 1000.0;
320         (void)(dt_detect_ms);
321 
322         LOGI("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- objects num==%d, t_ms=%.4f", (int)objects.size(), dt_detect_ms);
323         mtx_lock.lock();
324         if (!shouldObjectDetectingResultsBeForgot) {
325             resultDetect=objects;
326             isObjectDetectingReady=true;
327         } else { //shouldObjectDetectingResultsBeForgot==true
328             resultDetect.clear();
329             isObjectDetectingReady=false;
330             shouldObjectDetectingResultsBeForgot=false;
331         }
332         if(isWorking()) {
333             stateThread=STATE_THREAD_WORKING_SLEEPING;
334         }
335         mtx_lock.unlock();
336 
337         objects.clear();
338     }// while(isWorking())
339 
340     LOGI("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector: Returning");
341 }
342 
stop()343 void cv::DetectionBasedTracker::SeparateDetectionWork::stop()
344 {
345     //FIXME: TODO: should add quickStop functionality
346   std::unique_lock<std::mutex> mtx_lock(mtx);
347     if (!isWorking()) {
348         mtx_lock.unlock();
349         LOGE("SimpleHighguiDemoCore::stop is called but the SimpleHighguiDemoCore pthread is not active");
350         stateThread = STATE_THREAD_STOPPING;
351         return;
352     }
353     stateThread=STATE_THREAD_STOPPING;
354     LOGD("DetectionBasedTracker::SeparateDetectionWork::stop: before going to sleep to wait for the signal from the workthread");
355     objectDetectorRun.notify_one();
356     objectDetectorThreadStartStop.wait(mtx_lock);
357     LOGD("DetectionBasedTracker::SeparateDetectionWork::stop: after receiving the signal from the workthread, stateThread=%d", (int)stateThread);
358     mtx_lock.unlock();
359 }
360 
resetTracking()361 void cv::DetectionBasedTracker::SeparateDetectionWork::resetTracking()
362 {
363     LOGD("DetectionBasedTracker::SeparateDetectionWork::resetTracking");
364     std::unique_lock<std::mutex> mtx_lock(mtx);
365 
366     if (stateThread == STATE_THREAD_WORKING_WITH_IMAGE) {
367         LOGD("DetectionBasedTracker::SeparateDetectionWork::resetTracking: since workthread is detecting objects at the moment, we should make cascadeInThread stop detecting and forget the detecting results");
368         shouldObjectDetectingResultsBeForgot=true;
369         //cascadeInThread.setStopFlag();//FIXME: TODO: this feature also should be contributed to OpenCV
370     } else {
371         LOGD("DetectionBasedTracker::SeparateDetectionWork::resetTracking: since workthread is NOT detecting objects at the moment, we should NOT make any additional actions");
372     }
373 
374     resultDetect.clear();
375     isObjectDetectingReady=false;
376 
377     mtx_lock.unlock();
378 }
379 
communicateWithDetectingThread(const Mat & imageGray,std::vector<Rect> & rectsWhereRegions)380 bool cv::DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThread(const Mat& imageGray, std::vector<Rect>& rectsWhereRegions)
381 {
382     static double freq = getTickFrequency();
383 
384     bool shouldCommunicateWithDetectingThread = (stateThread==STATE_THREAD_WORKING_SLEEPING);
385     LOGD("DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThread: shouldCommunicateWithDetectingThread=%d", (shouldCommunicateWithDetectingThread?1:0));
386 
387     if (!shouldCommunicateWithDetectingThread) {
388         return false;
389     }
390 
391     bool shouldHandleResult = false;
392 
393     std::unique_lock<std::mutex> mtx_lock(mtx);
394 
395     if (isObjectDetectingReady) {
396         shouldHandleResult=true;
397         rectsWhereRegions = resultDetect;
398         isObjectDetectingReady=false;
399 
400         double lastBigDetectionDuration = 1000.0 * (((double)(getTickCount()  - timeWhenDetectingThreadStartedWork )) / freq);
401         (void)(lastBigDetectionDuration);
402         LOGD("DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThread: lastBigDetectionDuration=%f ms", (double)lastBigDetectionDuration);
403     }
404 
405     bool shouldSendNewDataToWorkThread = true;
406     if (timeWhenDetectingThreadStartedWork > 0) {
407         double time_from_previous_launch_in_ms=1000.0 * (((double)(getTickCount()  - timeWhenDetectingThreadStartedWork )) / freq); //the same formula as for lastBigDetectionDuration
408         shouldSendNewDataToWorkThread = (time_from_previous_launch_in_ms >= detectionBasedTracker.parameters.minDetectionPeriod);
409         LOGD("DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThread: shouldSendNewDataToWorkThread was 1, now it is %d, since time_from_previous_launch_in_ms=%.2f, minDetectionPeriod=%d",
410                 (shouldSendNewDataToWorkThread?1:0), time_from_previous_launch_in_ms, detectionBasedTracker.parameters.minDetectionPeriod);
411     }
412 
413     if (shouldSendNewDataToWorkThread) {
414 
415         imageSeparateDetecting.create(imageGray.size(), CV_8UC1);
416 
417         imageGray.copyTo(imageSeparateDetecting);//may change imageSeparateDetecting ptr. But should not.
418 
419 
420         timeWhenDetectingThreadStartedWork = getTickCount() ;
421 
422         objectDetectorRun.notify_one();
423     }
424 
425     mtx_lock.unlock();
426     LOGD("DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThread: result: shouldHandleResult=%d", (shouldHandleResult?1:0));
427 
428     return shouldHandleResult;
429 }
430 
Parameters()431 cv::DetectionBasedTracker::Parameters::Parameters()
432 {
433   maxTrackLifetime = 5;
434   minDetectionPeriod = 0;
435 }
436 
InnerParameters()437 cv::DetectionBasedTracker::InnerParameters::InnerParameters()
438 {
439     numLastPositionsToTrack=4;
440     numStepsToWaitBeforeFirstShow=6;
441     numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown=3;
442     numStepsToShowWithoutDetecting=3;
443 
444     coeffTrackingWindowSize=2.0;
445     coeffObjectSizeToTrack=0.85f;
446     coeffObjectSpeedUsingInPrediction=0.8f;
447 
448 }
449 
DetectionBasedTracker(cv::Ptr<IDetector> mainDetector,cv::Ptr<IDetector> trackingDetector,const Parameters & params)450 cv::DetectionBasedTracker::DetectionBasedTracker(cv::Ptr<IDetector> mainDetector, cv::Ptr<IDetector> trackingDetector, const Parameters& params)
451     :separateDetectionWork(),
452     parameters(params),
453     innerParameters(),
454     numTrackedSteps(0),
455     cascadeForTracking(trackingDetector)
456 {
457     CV_Assert( (params.maxTrackLifetime >= 0)
458 //            && mainDetector
459             && trackingDetector );
460 
461     if (mainDetector) {
462         Ptr<SeparateDetectionWork> tmp(new SeparateDetectionWork(*this, mainDetector, params));
463         separateDetectionWork.swap(tmp);
464     }
465 
466     weightsPositionsSmoothing.push_back(1);
467     weightsSizesSmoothing.push_back(0.5);
468     weightsSizesSmoothing.push_back(0.3f);
469     weightsSizesSmoothing.push_back(0.2f);
470 }
471 
~DetectionBasedTracker()472 cv::DetectionBasedTracker::~DetectionBasedTracker()
473 {
474 }
475 
process(const Mat & imageGray)476 void DetectionBasedTracker::process(const Mat& imageGray)
477 {
478     CV_INSTRUMENT_REGION();
479 
480     CV_Assert(imageGray.type()==CV_8UC1);
481 
482     if ( separateDetectionWork && !separateDetectionWork->isWorking() ) {
483         separateDetectionWork->run();
484     }
485 
486     static double freq = getTickFrequency();
487     static long long time_when_last_call_started=getTickCount();
488 
489     {
490         double delta_time_from_prev_call=1000.0 * (((double)(getTickCount()  - time_when_last_call_started)) / freq);
491         (void)(delta_time_from_prev_call);
492         LOGD("DetectionBasedTracker::process: time from the previous call is %f ms", (double)delta_time_from_prev_call);
493         time_when_last_call_started=getTickCount();
494     }
495 
496     Mat imageDetect=imageGray;
497 
498     std::vector<Rect> rectsWhereRegions;
499     bool shouldHandleResult=false;
500     if (separateDetectionWork) {
501         shouldHandleResult = separateDetectionWork->communicateWithDetectingThread(imageGray, rectsWhereRegions);
502     }
503 
504     if (shouldHandleResult) {
505         LOGD("DetectionBasedTracker::process: get _rectsWhereRegions were got from resultDetect");
506     } else {
507         LOGD("DetectionBasedTracker::process: get _rectsWhereRegions from previous positions");
508         for(size_t i = 0; i < trackedObjects.size(); i++) {
509             size_t n = trackedObjects[i].lastPositions.size();
510             CV_Assert(n > 0);
511 
512             Rect r = trackedObjects[i].lastPositions[n-1];
513             if(r.empty()) {
514                 LOGE("DetectionBasedTracker::process: ERROR: ATTENTION: strange algorithm's behavior: trackedObjects[i].rect() is empty");
515                 continue;
516             }
517 
518             //correction by speed of rectangle
519             if (n > 1) {
520                 Point2f center = centerRect(r);
521                 Point2f center_prev = centerRect(trackedObjects[i].lastPositions[n-2]);
522                 Point2f shift = (center - center_prev) * innerParameters.coeffObjectSpeedUsingInPrediction;
523 
524                 r.x += cvRound(shift.x);
525                 r.y += cvRound(shift.y);
526             }
527 
528 
529             rectsWhereRegions.push_back(r);
530         }
531     }
532     LOGI("DetectionBasedTracker::process: tracked objects num==%d", (int)trackedObjects.size());
533 
534     std::vector<Rect> detectedObjectsInRegions;
535 
536     LOGD("DetectionBasedTracker::process: rectsWhereRegions.size()=%d", (int)rectsWhereRegions.size());
537     for(size_t i=0; i < rectsWhereRegions.size(); i++) {
538         Rect r = rectsWhereRegions[i];
539 
540         detectInRegion(imageDetect, r, detectedObjectsInRegions);
541     }
542     LOGD("DetectionBasedTracker::process: detectedObjectsInRegions.size()=%d", (int)detectedObjectsInRegions.size());
543 
544     updateTrackedObjects(detectedObjectsInRegions);
545 }
546 
getObjects(std::vector<cv::Rect> & result) const547 void cv::DetectionBasedTracker::getObjects(std::vector<cv::Rect>& result) const
548 {
549     result.clear();
550 
551     for(size_t i=0; i < trackedObjects.size(); i++) {
552         Rect r=calcTrackedObjectPositionToShow((int)i);
553         if (r.empty()) {
554             continue;
555         }
556         result.push_back(r);
557         LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}", r.width, r.height, r.x, r.y, r.width, r.height);
558     }
559 }
560 
getObjects(std::vector<Object> & result) const561 void cv::DetectionBasedTracker::getObjects(std::vector<Object>& result) const
562 {
563     result.clear();
564 
565     for(size_t i=0; i < trackedObjects.size(); i++) {
566         Rect r=calcTrackedObjectPositionToShow((int)i);
567         if (r.empty()) {
568             continue;
569         }
570         result.push_back(Object(r, trackedObjects[i].id));
571         LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}", r.width, r.height, r.x, r.y, r.width, r.height);
572     }
573 }
getObjects(std::vector<ExtObject> & result) const574 void cv::DetectionBasedTracker::getObjects(std::vector<ExtObject>& result) const
575 {
576     result.clear();
577 
578     for(size_t i=0; i < trackedObjects.size(); i++) {
579         ObjectStatus status;
580         Rect r=calcTrackedObjectPositionToShow((int)i, status);
581         result.push_back(ExtObject(trackedObjects[i].id, r, status));
582         LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}, status = %d", r.width, r.height, r.x, r.y, r.width, r.height, (int)status);
583     }
584 }
585 
run()586 bool cv::DetectionBasedTracker::run()
587 {
588     if (separateDetectionWork) {
589         return separateDetectionWork->run();
590     }
591     return false;
592 }
593 
stop()594 void cv::DetectionBasedTracker::stop()
595 {
596     if (separateDetectionWork) {
597         separateDetectionWork->stop();
598     }
599 }
600 
resetTracking()601 void cv::DetectionBasedTracker::resetTracking()
602 {
603     if (separateDetectionWork) {
604         separateDetectionWork->resetTracking();
605     }
606     trackedObjects.clear();
607 }
608 
updateTrackedObjects(const std::vector<Rect> & detectedObjects)609 void cv::DetectionBasedTracker::updateTrackedObjects(const std::vector<Rect>& detectedObjects)
610 {
611     enum {
612         NEW_RECTANGLE=-1,
613         INTERSECTED_RECTANGLE=-2
614     };
615 
616     int N1=(int)trackedObjects.size();
617     int N2=(int)detectedObjects.size();
618     LOGD("DetectionBasedTracker::updateTrackedObjects: N1=%d, N2=%d", N1, N2);
619 
620     for(int i=0; i < N1; i++) {
621         trackedObjects[i].numDetectedFrames++;
622     }
623 
624     std::vector<int> correspondence(detectedObjects.size(), NEW_RECTANGLE);
625     correspondence.clear();
626     correspondence.resize(detectedObjects.size(), NEW_RECTANGLE);
627 
628     for(int i=0; i < N1; i++) {
629         LOGD("DetectionBasedTracker::updateTrackedObjects: i=%d", i);
630         TrackedObject& curObject=trackedObjects[i];
631 
632         int bestIndex=-1;
633         int bestArea=-1;
634 
635         int numpositions=(int)curObject.lastPositions.size();
636         CV_Assert(numpositions > 0);
637         Rect prevRect=curObject.lastPositions[numpositions-1];
638         LOGD("DetectionBasedTracker::updateTrackedObjects: prevRect[%d]={%d, %d, %d x %d}", i, prevRect.x, prevRect.y, prevRect.width, prevRect.height);
639 
640         for(int j=0; j < N2; j++) {
641             LOGD("DetectionBasedTracker::updateTrackedObjects: j=%d", j);
642             if (correspondence[j] >= 0) {
643                 LOGD("DetectionBasedTracker::updateTrackedObjects: j=%d is rejected, because it has correspondence=%d", j, correspondence[j]);
644                 continue;
645             }
646             if (correspondence[j] !=NEW_RECTANGLE) {
647                 LOGD("DetectionBasedTracker::updateTrackedObjects: j=%d is rejected, because it is intersected with another rectangle", j);
648                 continue;
649             }
650             LOGD("DetectionBasedTracker::updateTrackedObjects: detectedObjects[%d]={%d, %d, %d x %d}",
651                     j, detectedObjects[j].x, detectedObjects[j].y, detectedObjects[j].width, detectedObjects[j].height);
652 
653             Rect r=prevRect & detectedObjects[j];
654             if ( (r.width > 0) && (r.height > 0) ) {
655                 LOGD("DetectionBasedTracker::updateTrackedObjects: There is intersection between prevRect and detectedRect, r={%d, %d, %d x %d}",
656                         r.x, r.y, r.width, r.height);
657                 correspondence[j]=INTERSECTED_RECTANGLE;
658 
659                 if ( r.area() > bestArea) {
660                     LOGD("DetectionBasedTracker::updateTrackedObjects: The area of intersection is %d, it is better than bestArea=%d", r.area(), bestArea);
661                     bestIndex=j;
662                     bestArea=r.area();
663                 }
664             }
665         }
666         if (bestIndex >= 0) {
667             LOGD("DetectionBasedTracker::updateTrackedObjects: The best correspondence for i=%d is j=%d", i, bestIndex);
668             correspondence[bestIndex]=i;
669 
670             for(int j=0; j < N2; j++) {
671                 if (correspondence[j] >= 0)
672                     continue;
673 
674                 Rect r=detectedObjects[j] & detectedObjects[bestIndex];
675                 if ( (r.width > 0) && (r.height > 0) ) {
676                     LOGD("DetectionBasedTracker::updateTrackedObjects: Found intersection between "
677                             "rectangles j=%d and bestIndex=%d, rectangle j=%d is marked as intersected", j, bestIndex, j);
678                     correspondence[j]=INTERSECTED_RECTANGLE;
679                 }
680             }
681         } else {
682             LOGD("DetectionBasedTracker::updateTrackedObjects: There is no correspondence for i=%d ", i);
683             curObject.numFramesNotDetected++;
684         }
685     }
686 
687     LOGD("DetectionBasedTracker::updateTrackedObjects: start second cycle");
688     for(int j=0; j < N2; j++) {
689         LOGD("DetectionBasedTracker::updateTrackedObjects: j=%d", j);
690         int i=correspondence[j];
691         if (i >= 0) {//add position
692             LOGD("DetectionBasedTracker::updateTrackedObjects: add position");
693             trackedObjects[i].lastPositions.push_back(detectedObjects[j]);
694             while ((int)trackedObjects[i].lastPositions.size() > (int) innerParameters.numLastPositionsToTrack) {
695                 trackedObjects[i].lastPositions.erase(trackedObjects[i].lastPositions.begin());
696             }
697             trackedObjects[i].numFramesNotDetected=0;
698         } else if (i==NEW_RECTANGLE){ //new object
699             LOGD("DetectionBasedTracker::updateTrackedObjects: new object");
700             trackedObjects.push_back(detectedObjects[j]);
701         } else {
702             LOGD("DetectionBasedTracker::updateTrackedObjects: was auxiliary intersection");
703         }
704     }
705 
706     std::vector<TrackedObject>::iterator it=trackedObjects.begin();
707     while( it != trackedObjects.end() ) {
708         if ( (it->numFramesNotDetected > parameters.maxTrackLifetime)
709                 ||
710                 (
711                  (it->numDetectedFrames <= innerParameters.numStepsToWaitBeforeFirstShow)
712                  &&
713                  (it->numFramesNotDetected > innerParameters.numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown)
714                 )
715            )
716         {
717             int numpos=(int)it->lastPositions.size();
718             CV_Assert(numpos > 0);
719             Rect r = it->lastPositions[numpos-1];
720             (void)(r);
721             LOGD("DetectionBasedTracker::updateTrackedObjects: deleted object {%d, %d, %d x %d}",
722                     r.x, r.y, r.width, r.height);
723             it=trackedObjects.erase(it);
724         } else {
725             it++;
726         }
727     }
728 }
729 
addObject(const Rect & location)730 int cv::DetectionBasedTracker::addObject(const Rect& location)
731 {
732     LOGD("DetectionBasedTracker::addObject: new object {%d, %d %dx%d}",location.x, location.y, location.width, location.height);
733     trackedObjects.push_back(TrackedObject(location));
734     int newId = trackedObjects.back().id;
735     LOGD("DetectionBasedTracker::addObject: newId = %d", newId);
736     return newId;
737 }
738 
calcTrackedObjectPositionToShow(int i) const739 Rect cv::DetectionBasedTracker::calcTrackedObjectPositionToShow(int i) const
740 {
741     ObjectStatus status;
742     return calcTrackedObjectPositionToShow(i, status);
743 }
calcTrackedObjectPositionToShow(int i,ObjectStatus & status) const744 Rect cv::DetectionBasedTracker::calcTrackedObjectPositionToShow(int i, ObjectStatus& status) const
745 {
746     if ( (i < 0) || (i >= (int)trackedObjects.size()) ) {
747         LOGE("DetectionBasedTracker::calcTrackedObjectPositionToShow: ERROR: wrong i=%d", i);
748         status = WRONG_OBJECT;
749         return Rect();
750     }
751     if (trackedObjects[i].numDetectedFrames <= innerParameters.numStepsToWaitBeforeFirstShow){
752         LOGI("DetectionBasedTracker::calcTrackedObjectPositionToShow: trackedObjects[%d].numDetectedFrames=%d <= numStepsToWaitBeforeFirstShow=%d --- return empty Rect()",
753                 i, trackedObjects[i].numDetectedFrames, innerParameters.numStepsToWaitBeforeFirstShow);
754         status = DETECTED_NOT_SHOWN_YET;
755         return Rect();
756     }
757     if (trackedObjects[i].numFramesNotDetected > innerParameters.numStepsToShowWithoutDetecting) {
758         status = DETECTED_TEMPORARY_LOST;
759         return Rect();
760     }
761 
762     const TrackedObject::PositionsVector& lastPositions=trackedObjects[i].lastPositions;
763 
764     int N=(int)lastPositions.size();
765     if (N<=0) {
766         LOGE("DetectionBasedTracker::calcTrackedObjectPositionToShow: ERROR: no positions for i=%d", i);
767         status = WRONG_OBJECT;
768         return Rect();
769     }
770 
771     int Nsize=std::min(N, (int)weightsSizesSmoothing.size());
772     int Ncenter= std::min(N, (int)weightsPositionsSmoothing.size());
773 
774     Point2f center;
775     double w=0, h=0;
776     if (Nsize > 0) {
777         double sum=0;
778         for(int j=0; j < Nsize; j++) {
779             int k=N-j-1;
780             w += lastPositions[k].width  * weightsSizesSmoothing[j];
781             h += lastPositions[k].height * weightsSizesSmoothing[j];
782             sum+=weightsSizesSmoothing[j];
783         }
784         w /= sum;
785         h /= sum;
786     } else {
787         w=lastPositions[N-1].width;
788         h=lastPositions[N-1].height;
789     }
790 
791     if (Ncenter > 0) {
792         double sum=0;
793         for(int j=0; j < Ncenter; j++) {
794             int k=N-j-1;
795             Point tl(lastPositions[k].tl());
796             Point br(lastPositions[k].br());
797             Point2f c1;
798             c1=tl;
799             c1=c1* 0.5f;
800             Point2f c2;
801             c2=br;
802             c2=c2*0.5f;
803             c1=c1+c2;
804 
805             center=center+  (c1  * weightsPositionsSmoothing[j]);
806             sum+=weightsPositionsSmoothing[j];
807         }
808         center *= (float)(1 / sum);
809     } else {
810         int k=N-1;
811         Point tl(lastPositions[k].tl());
812         Point br(lastPositions[k].br());
813         Point2f c1;
814         c1=tl;
815         c1=c1* 0.5f;
816         Point2f c2;
817         c2=br;
818         c2=c2*0.5f;
819 
820         center=c1+c2;
821     }
822     Point2f tl=center-Point2f((float)w*0.5f,(float)h*0.5f);
823     Rect res(cvRound(tl.x), cvRound(tl.y), cvRound(w), cvRound(h));
824     LOGD("DetectionBasedTracker::calcTrackedObjectPositionToShow: Result for i=%d: {%d, %d, %d x %d}", i, res.x, res.y, res.width, res.height);
825 
826     status = DETECTED;
827     return res;
828 }
829 
detectInRegion(const Mat & img,const Rect & r,std::vector<Rect> & detectedObjectsInRegions)830 void cv::DetectionBasedTracker::detectInRegion(const Mat& img, const Rect& r, std::vector<Rect>& detectedObjectsInRegions)
831 {
832     Rect r0(Point(), img.size());
833     Rect r1 = scale_rect(r, innerParameters.coeffTrackingWindowSize);
834     r1 = r1 & r0;
835 
836     if ( (r1.width <=0) || (r1.height <= 0) ) {
837         LOGD("DetectionBasedTracker::detectInRegion: Empty intersection");
838         return;
839     }
840 
841     int d = cvRound(std::min(r.width, r.height) * innerParameters.coeffObjectSizeToTrack);
842 
843     std::vector<Rect> tmpobjects;
844 
845     Mat img1(img, r1);//subimage for rectangle -- without data copying
846     LOGD("DetectionBasedTracker::detectInRegion: img1.size()=%d x %d, d=%d",
847             img1.size().width, img1.size().height, d);
848 
849     cascadeForTracking->setMinObjectSize(Size(d, d));
850     cascadeForTracking->detect(img1, tmpobjects);
851             /*
852             detectMultiScale( img1, tmpobjects,
853             parameters.scaleFactor, parameters.minNeighbors, 0
854             |CV_HAAR_FIND_BIGGEST_OBJECT
855             |CV_HAAR_SCALE_IMAGE
856             ,
857             Size(d,d),
858             max_objectSize
859             );*/
860 
861     for(size_t i=0; i < tmpobjects.size(); i++) {
862         Rect curres(tmpobjects[i].tl() + r1.tl(), tmpobjects[i].size());
863         detectedObjectsInRegions.push_back(curres);
864     }
865 }
866 
setParameters(const Parameters & params)867 bool cv::DetectionBasedTracker::setParameters(const Parameters& params)
868 {
869     if ( params.maxTrackLifetime < 0 )
870     {
871         LOGE("DetectionBasedTracker::setParameters: ERROR: wrong parameters value");
872         return false;
873     }
874 
875     if (separateDetectionWork) {
876         separateDetectionWork->setParameters(params);
877     }
878     parameters=params;
879     return true;
880 }
881 
getParameters() const882 const cv::DetectionBasedTracker::Parameters& DetectionBasedTracker::getParameters() const
883 {
884     return parameters;
885 }
886