1 /*M///////////////////////////////////////////////////////////////////////////////////////
2  //
3  //  IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
4  //
5  //  By downloading, copying, installing or using the software you agree to this license.
6  //  If you do not agree to this license, do not download, install,
7  //  copy or use the software.
8  //
9  //
10  //                           License Agreement
11  //                For Open Source Computer Vision Library
12  //
13  // Copyright (C) 2014, OpenCV Foundation, all rights reserved.
14  // Third party copyrights are property of their respective owners.
15  //
16  // Redistribution and use in source and binary forms, with or without modification,
17  // are permitted provided that the following conditions are met:
18  //
19  //   * Redistribution's of source code must retain the above copyright notice,
20  //     this list of conditions and the following disclaimer.
21  //
22  //   * Redistribution's in binary form must reproduce the above copyright notice,
23  //     this list of conditions and the following disclaimer in the documentation
24  //     and/or other materials provided with the distribution.
25  //
26  //   * The name of the copyright holders may not be used to endorse or promote products
27  //     derived from this software without specific prior written permission.
28  //
29  // This software is provided by the copyright holders and contributors "as is" and
30  // any express or implied warranties, including, but not limited to, the implied
31  // warranties of merchantability and fitness for a particular purpose are disclaimed.
32  // In no event shall the Intel Corporation or contributors be liable for any direct,
33  // indirect, incidental, special, exemplary, or consequential damages
34  // (including, but not limited to, procurement of substitute goods or services;
35  // loss of use, data, or profits; or business interruption) however caused
36  // and on any theory of liability, whether in contract, strict liability,
37  // or tort (including negligence or otherwise) arising in any way out of
38  // the use of this software, even if advised of the possibility of such damage.
39  //
40  //M*/
41 
42 #include <limits>
43 #include "precomp.hpp"
44 
45 #define thetaA_VAL 200
46 #define thetaL_VAL 250
47 #define epslonGeneric 20
48 
49 namespace cv
50 {
51 namespace saliency
52 {
53 
setImagesize(int W,int H)54 void MotionSaliencyBinWangApr2014::setImagesize( int W, int H )
55 {
56   imageWidth = W;
57   imageHeight = H;
58 }
59 
MotionSaliencyBinWangApr2014()60 MotionSaliencyBinWangApr2014::MotionSaliencyBinWangApr2014()
61 {
62   N_DS = 2;  // Number of template to be downsampled and used in lowResolutionDetection function
63   K = 3;  // Number of background model template
64   N = 4;   // NxN is the size of the block for downsampling in the lowlowResolutionDetection
65   alpha = (float) 0.01;  // Learning rate
66   L0 = 1000;  // Upper-bound values for C0 (efficacy of the first template (matrices) of backgroundModel
67   L1 = 800;  // Upper-bound values for C1 (efficacy of the second template (matrices) of backgroundModel
68   thetaL = thetaL_VAL;  // T0, T1 swap threshold
69   thetaA = thetaA_VAL;
70   gamma = 3;
71   neighborhoodCheck = true;
72 
73   Ainc = 6;  // Activity Incrementation;
74   Bmax = 80;  // Upper-bound value for pixel activity
75   Bth = 20;  //70;  // Max activity threshold
76   Binc = 15;  //50;
77   Bdec = 5;  //20;  // Threshold for pixel-level decision threshold (epslon) adaptation
78   deltaINC = 20;
79   deltaDEC = 0.125;  // Increment-decrement value for epslon adaptation
80   epslonMIN = 18;
81   epslonMAX = 80;
82 
83   className = "BinWangApr2014";
84 }
85 
init()86 bool MotionSaliencyBinWangApr2014::init()
87 {
88   activityControlFlag = false;
89   Size imgSize( imageWidth, imageHeight );
90   epslonPixelsValue = Mat( imgSize.height, imgSize.width, CV_32F, Scalar( epslonGeneric ) );
91   potentialBackground = Mat( imgSize.height, imgSize.width, CV_8UC2, Scalar( 0, 0 ) );
92   backgroundModel.resize( K + 1 );
93 
94   for ( int i = 0; i < K + 1; i++ )
95   {
96     Mat* tmpm = new Mat;
97     tmpm->create( imgSize.height, imgSize.width, CV_32FC2 );
98     tmpm->setTo( Scalar( std::numeric_limits<float>::quiet_NaN(), 0 ) );
99     Ptr<Mat> tmp = Ptr<Mat>( tmpm );
100     backgroundModel[i] = tmp;
101   }
102 
103   noisePixelMask.create( imgSize.height, imgSize.width, CV_8U );
104   noisePixelMask.setTo( Scalar( 0 ) );
105   activityPixelsValue.create( imgSize.height, imgSize.width, CV_8U );
106   activityPixelsValue.setTo( Scalar( 0 ) );
107 
108   return true;
109 
110 }
111 
~MotionSaliencyBinWangApr2014()112 MotionSaliencyBinWangApr2014::~MotionSaliencyBinWangApr2014()
113 {
114 
115 }
116 
117 // classification (and adaptation) functions
fullResolutionDetection(const Mat & image2,Mat & highResBFMask)118 bool MotionSaliencyBinWangApr2014::fullResolutionDetection( const Mat& image2, Mat& highResBFMask )
119 {
120   Mat image = image2.clone();
121 
122   uchar currentPixelValue;
123   float currentEpslonValue;
124   bool backgFlag = false;
125 
126   // Initially, all pixels are considered as foreground and then we evaluate with the background model
127   highResBFMask.create( image.rows, image.cols, CV_8U );
128   highResBFMask.setTo( 1 );
129 
130   uchar* pImage;
131   float* pEpslon;
132   uchar* pMask;
133 
134   // Scan all pixels of image
135   for ( int i = 0; i < image.rows; i++ )
136   {
137 
138     pImage = image.ptr<uchar>( i );
139     pEpslon = epslonPixelsValue.ptr<float>( i );
140     pMask = highResBFMask.ptr<uchar>( i );
141     for ( int j = 0; j < image.cols; j++ )
142     {
143       /*    Pixels with activity greater than Bth are eliminated from the detection result. In this way,
144        continuously blinking noise-pixels will be eliminated from the detection results,
145        preventing the generation of false positives.*/
146       if( activityPixelsValue.at<uchar>( i, j ) < Bth )
147       {
148         backgFlag = false;
149         currentPixelValue = pImage[j];
150         currentEpslonValue = pEpslon[j];
151 
152         int counter = 0;
153         for ( size_t z = 0; z < backgroundModel.size(); z++ )
154         {
155 
156           counter += (int) backgroundModel[z]->ptr<Vec2f>( i )[j][1];
157           if( counter != 0 )
158             break;
159         }
160 
161         if( counter != 0 )  //if at least the first template is activated / initialized
162         {
163 
164           // scan background model vector
165           for ( size_t z = 0; z < backgroundModel.size(); z++ )
166           {
167             float* currentB;
168             float* currentC;
169             currentB = & ( backgroundModel[z]->ptr<Vec2f>( i )[j][0] );
170             currentC = & ( backgroundModel[z]->ptr<Vec2f>( i )[j][1] );
171 
172             //continue;
173             if( ( *currentC ) > 0 )  //The current template is active
174             {
175               // If there is a match with a current background template
176               if( abs( currentPixelValue - ( *currentB ) ) < currentEpslonValue && !backgFlag )
177               {
178                 // The correspondence pixel in the  BF mask is set as background ( 0 value)
179                 pMask[j] = 0;
180                 if( ( *currentC < L0 && z == 0 ) || ( *currentC < L1 && z == 1 ) || ( z > 1 ) )
181                 {
182                   *currentC += 1;  // increment the efficacy of this template
183                 }
184 
185                 *currentB = ( ( 1 - alpha ) * ( *currentB ) ) + ( alpha * currentPixelValue );  // Update the template value
186                 backgFlag = true;
187               }
188               else
189               {
190                 *currentC -= 1;  // decrement the efficacy of this template
191               }
192 
193             }
194 
195           }  // end "for" cicle of template vector
196 
197         }
198         else
199         {
200           pMask[j] = 1;  //if the model of the current pixel is not yet initialized, we mark the pixels as foreground
201         }
202       }
203       else
204       {
205         pMask[j] = 0;
206       }
207 
208     }
209   }  // end "for" cicle of all image's pixels
210 
211   return true;
212 }
213 
lowResolutionDetection(const Mat & image,Mat & lowResBFMask)214 bool MotionSaliencyBinWangApr2014::lowResolutionDetection( const Mat& image, Mat& lowResBFMask )
215 {
216   std::vector<Mat> mv;
217   split( *backgroundModel[0], mv );
218 
219   //if at least the first template is activated / initialized for all pixels
220   if( countNonZero( mv[1] ) > ( mv[1].cols * mv[1].rows ) / 2 )
221   {
222     float currentPixelValue;
223     float currentEpslonValue;
224     float currentB;
225     float currentC;
226 
227     // Create a mask to select ROI in the original Image and Backgound model and at the same time compute the mean
228 
229     Rect roi( Point( 0, 0 ), Size( N, N ) );
230     Scalar imageROImean;
231     Scalar backGModelROImean;
232     Mat currentModel;
233 
234     // Initially, all pixels are considered as foreground and then we evaluate with the background model
235     lowResBFMask.create( image.rows, image.cols, CV_8U );
236     lowResBFMask.setTo( 1 );
237 
238     // Scan all the ROI of original matrices
239     for ( int i = 0; i < (int)ceil( (float) image.rows / N ); i++ )
240     {
241       if( ( roi.y + ( N - 1 ) ) <= ( image.rows - 1 ) )
242       {
243         // Reset original ROI dimension
244         roi = Rect( Point( roi.x, roi.y ), Size( N, N ) );
245       }
246 
247       for ( int j = 0; j < (int)ceil( (float) image.cols / N ); j++ )
248       {
249         /* Pixels with activity greater than Bth are eliminated from the detection result. In this way,
250          continuously blinking noise-pixels will be eliminated from the detection results,
251          preventing the generation of false positives.*/
252         if( activityPixelsValue.at<uchar>( i, j ) < Bth )
253         {
254 
255           // Compute the mean of image's block and epslonMatrix's block based on ROI
256           Mat roiImage = image( roi );
257           Mat roiEpslon = epslonPixelsValue( roi );
258           currentPixelValue = (float) mean( roiImage ).val[0];
259           currentEpslonValue = (float) mean( roiEpslon ).val[0];
260 
261           // scan background model vector
262           for ( int z = 0; z < N_DS; z++ )
263           {
264             // Select the current template 2 channel matrix, select ROI and compute the mean for each channel separately
265             Mat roiTemplate = ( * ( backgroundModel[z] ) )( roi );
266             Scalar templateMean = mean( roiTemplate );
267             currentB = (float) templateMean[0];
268             currentC = (float) templateMean[1];
269 
270             if( ( currentC ) > 0 )  //The current template is active
271             {
272               // If there is a match with a current background template
273               if( abs( currentPixelValue - ( currentB ) ) < currentEpslonValue )
274               {
275                 // The correspondence pixel in the  BF mask is set as background ( 0 value)
276                 rectangle( lowResBFMask, roi, Scalar( 0 ), FILLED );
277                 break;
278               }
279             }
280           }
281           // Shift the ROI from left to right follow the block dimension
282           roi = roi + Point( N, 0 );
283           if( ( roi.x + ( roi.width - 1 ) ) > ( image.cols - 1 ) && ( roi.y + ( N - 1 ) ) <= ( image.rows - 1 ) )
284           {
285             roi = Rect( Point( roi.x, roi.y ), Size( abs( ( image.cols - 1 ) - roi.x ) + 1, N ) );
286           }
287           else if( ( roi.x + ( roi.width - 1 ) ) > ( image.cols - 1 ) && ( roi.y + ( N - 1 ) ) > ( image.rows - 1 ) )
288           {
289             roi = Rect( Point( roi.x, roi.y ), Size( abs( ( image.cols - 1 ) - roi.x ) + 1, abs( ( image.rows - 1 ) - roi.y ) + 1 ) );
290           }
291         }
292         else
293         {
294           // The correspondence pixel in the  BF mask is set as background ( 0 value)
295           rectangle( lowResBFMask, roi, Scalar( 0 ), FILLED );
296         }
297       }
298       //Shift the ROI from up to down follow the block dimension, also bringing it back to beginning of row
299       roi.x = 0;
300       roi.y += N;
301       if( ( roi.y + ( roi.height - 1 ) ) > ( image.rows - 1 ) )
302       {
303         roi = Rect( Point( roi.x, roi.y ), Size( N, abs( ( image.rows - 1 ) - roi.y ) + 1 ) );
304       }
305 
306     }
307     return true;
308   }
309   else
310   {
311     lowResBFMask.create( image.rows, image.cols, CV_8U );
312     lowResBFMask.setTo( 1 );
313     return false;
314   }
315 
316 }
317 
pairCompare(std::pair<float,float> t,std::pair<float,float> t_plusOne)318 bool inline pairCompare( std::pair<float, float> t, std::pair<float, float> t_plusOne )
319 {
320 
321   return ( t.second > t_plusOne.second );
322 
323 }
324 
templateOrdering()325 bool MotionSaliencyBinWangApr2014::templateOrdering()
326 {
327 
328   Mat dstMask, tempMat, dstMask2, dstMask3;
329   Mat convertMat1, convertMat2;
330   int backGroundModelSize = (int)backgroundModel.size();
331 
332   std::vector<std::vector<Mat> > channelSplit( backGroundModelSize );
333   for ( int i = 0; i < backGroundModelSize; i++ )
334   {
335     split( *backgroundModel[i], channelSplit[i] );
336 
337   }
338 
339   //Bubble sort : Template T1 - Tk
340   for ( int i = 1; i < backGroundModelSize - 1; i++ )
341   {
342     // compare and order the i-th template with the others
343     for ( int j = i + 1; j < backGroundModelSize; j++ )
344     {
345 
346       compare( channelSplit[j][1], channelSplit[i][1], dstMask, CMP_GT );
347 
348       channelSplit[i][0].copyTo( tempMat );
349       channelSplit[j][0].copyTo( channelSplit[i][0], dstMask );
350       tempMat.copyTo( channelSplit[j][0], dstMask );
351 
352       channelSplit[i][1].copyTo( tempMat );
353       channelSplit[j][1].copyTo( channelSplit[i][1], dstMask );
354       tempMat.copyTo( channelSplit[j][1], dstMask );
355     }
356   }
357 
358   // SORT Template T0 and T1
359   Mat M_deltaL( backgroundModel[0]->rows, backgroundModel[0]->cols, CV_32F, Scalar( thetaL ) );
360 
361   compare( channelSplit[1][1], M_deltaL, dstMask2, CMP_GT );
362   compare( M_deltaL, channelSplit[0][1], dstMask3, CMP_GT );
363 
364   threshold( dstMask2, dstMask2, 0, 1, THRESH_BINARY );
365   threshold( dstMask3, dstMask3, 0, 1, THRESH_BINARY );
366 
367   bitwise_and( dstMask2, dstMask3, dstMask );
368 
369   //copy correct B element of T1 inside T0 and swap
370   channelSplit[0][0].copyTo( tempMat );
371   channelSplit[1][0].copyTo( channelSplit[0][0], dstMask );
372   tempMat.copyTo( channelSplit[1][0], dstMask );
373 
374   //copy correct C element of T0 inside T1
375   channelSplit[0][1].copyTo( channelSplit[1][1], dstMask );
376 
377   //set new C0 values as gamma * thetaL
378   M_deltaL.mul( gamma );
379   M_deltaL.copyTo( channelSplit[0][1], dstMask );
380 
381   for ( int i = 0; i < backGroundModelSize; i++ )
382   {
383     merge( channelSplit[i], *backgroundModel[i] );
384   }
385 
386   return true;
387 }
388 
templateReplacement(const Mat & finalBFMask,const Mat & image)389 bool MotionSaliencyBinWangApr2014::templateReplacement( const Mat& finalBFMask, const Mat& image )
390 {
391   std::vector<Mat> temp;
392   split( *backgroundModel[0], temp );
393 
394 //if at least the first template is activated / initialized for all pixels
395   if( countNonZero( temp[1] ) <= ( temp[1].cols * temp[1].rows ) / 2 )
396   {
397     thetaA = 50;
398     thetaL = 150;
399     /*    thetaA = 5;
400      thetaL = 15;*/
401     neighborhoodCheck = false;
402 
403   }
404   else
405   {
406     thetaA = thetaA_VAL;
407     thetaL = thetaL_VAL;
408     neighborhoodCheck = true;
409   }
410 
411   int roiSize = 3;  // FIXED ROI SIZE, not change until you first appropriately adjust the following controls in the EVALUATION section!
412   int countNonZeroElements = 0;
413   std::vector<Mat> mv;
414   Mat replicateCurrentBAMat( roiSize, roiSize, CV_8U );
415   Mat backgroundModelROI( roiSize, roiSize, CV_32F );
416   Mat diffResult( roiSize, roiSize, CV_8U );
417 
418 // Scan all pixels of finalBFMask and all pixels of others models (the dimension are the same)
419   const uchar* finalBFMaskP;
420   Vec2b* pbgP;
421   const uchar* imageP;
422   float* epslonP;
423   for ( int i = 0; i < finalBFMask.rows; i++ )
424   {
425     finalBFMaskP = finalBFMask.ptr<uchar>( i );
426     pbgP = potentialBackground.ptr<Vec2b>( i );
427     imageP = image.ptr<uchar>( i );
428     epslonP = epslonPixelsValue.ptr<float>( i );
429     for ( int j = 0; j < finalBFMask.cols; j++ )
430     {
431       /////////////////// MAINTENANCE of potentialBackground model ///////////////////
432       if( finalBFMaskP[j] == 1 )  // i.e. the corresponding frame pixel has been market as foreground
433       {
434         /* For the pixels with CA= 0, if the current frame pixel has been classified as foreground, its value
435          * will be loaded into BA and CA will be set to 1*/
436         if( pbgP[j][1] == 0 )
437         {
438           pbgP[j][0] = imageP[j];
439           pbgP[j][1] = 1;
440         }
441 
442         /*the distance between this pixel value and BA is calculated, and if this distance is smaller than
443          the decision threshold epslon, then CA is increased by 1, otherwise is decreased by 1*/
444         else if( abs( (float) imageP[j] - pbgP[j][0] ) < epslonP[j] )
445         {
446           pbgP[j][1] += 1;
447         }
448         else
449         {
450           pbgP[j][1] -= 1;
451         }
452         /*}*/  /////////////////// END of potentialBackground model MAINTENANCE///////////////////
453         /////////////////// EVALUATION of potentialBackground values ///////////////////
454         if( pbgP[j][1] > thetaA )
455         {
456           if( neighborhoodCheck )
457           {
458             // replicate currentBA value
459             replicateCurrentBAMat.setTo( pbgP[j][0] );
460 
461             for ( size_t z = 0; z < backgroundModel.size(); z++ )
462             {
463               // Neighborhood of current pixel in the current background model template.
464               // The ROI is centered in the pixel coordinates
465 
466               if( i > 0 && j > 0 && i < ( backgroundModel[z]->rows - 1 ) && j < ( backgroundModel[z]->cols - 1 ) )
467               {
468                 split( *backgroundModel[z], mv );
469                 backgroundModelROI = mv[0]( Rect( j - (int) floor((float) roiSize / 2 ), i - (int) floor((float) roiSize / 2 ), roiSize, roiSize ) );
470               }
471               else if( i == 0 && j == 0 )  // upper leftt
472               {
473                 split( *backgroundModel[z], mv );
474                 backgroundModelROI = mv[0]( Rect( j, i, (int) ceil((float) roiSize / 2 ), (int) ceil((float) roiSize / 2 ) ) );
475               }
476               else if( j == 0 && i > 0 && i < ( backgroundModel[z]->rows - 1 ) )  // middle left
477               {
478                 split( *backgroundModel[z], mv );
479                 backgroundModelROI = mv[0]( Rect( j, i - (int) floor((float) roiSize / 2 ), (int) ceil((float) roiSize / 2 ), roiSize ) );
480               }
481               else if( i == ( backgroundModel[z]->rows - 1 ) && j == 0 )  //down left
482               {
483                 split( *backgroundModel[z], mv );
484                 backgroundModelROI = mv[0]( Rect( j, i - (int) floor((float) roiSize / 2 ), (int) ceil((float) roiSize / 2 ), (int) ceil((float) roiSize / 2 ) ) );
485               }
486               else if( i == 0 && j > 0 && j < ( backgroundModel[z]->cols - 1 ) )  // upper - middle
487               {
488                 split( *backgroundModel[z], mv );
489                 backgroundModelROI = mv[0]( Rect( ( j - (int) floor((float) roiSize / 2 ) ), i, roiSize, (int) ceil((float) roiSize / 2 ) ) );
490               }
491               else if( i == ( backgroundModel[z]->rows - 1 ) && j > 0 && j < ( backgroundModel[z]->cols - 1 ) )  //down middle
492               {
493                 split( *backgroundModel[z], mv );
494                 backgroundModelROI = mv[0](
495                     Rect( j - (int) floor((float) roiSize / 2 ), i - (int) floor((float) roiSize / 2 ), roiSize, (int) ceil((float) roiSize / 2 ) ) );
496               }
497               else if( i == 0 && j == ( backgroundModel[z]->cols - 1 ) )  // upper right
498               {
499                 split( *backgroundModel[z], mv );
500                 backgroundModelROI = mv[0]( Rect( j - (int) floor((float) roiSize / 2 ), i, (int) ceil((float) roiSize / 2 ), (int) ceil((float) roiSize / 2 ) ) );
501               }
502               else if( j == ( backgroundModel[z]->cols - 1 ) && i > 0 && i < ( backgroundModel[z]->rows - 1 ) )  // middle - right
503               {
504                 split( *backgroundModel[z], mv );
505                 backgroundModelROI = mv[0](
506                     Rect( j - (int) floor((float) roiSize / 2 ), i - (int) floor((float) roiSize / 2 ), (int) ceil((float) roiSize / 2 ), roiSize ) );
507               }
508               else if( i == ( backgroundModel[z]->rows - 1 ) && j == ( backgroundModel[z]->cols - 1 ) )  // down right
509               {
510                 split( *backgroundModel[z], mv );
511                 backgroundModelROI = mv[0](
512                     Rect( j - (int) floor((float) roiSize / 2 ), i - (int) floor((float) roiSize / 2 ), (int) ceil((float) roiSize / 2 ), (int) ceil((float) roiSize / 2 ) ) );
513               }
514 
515               /* Check if the value of current pixel BA in potentialBackground model is already contained in at least one of its neighbors'
516                * background model
517                */
518               resize( replicateCurrentBAMat, replicateCurrentBAMat, Size( backgroundModelROI.cols, backgroundModelROI.rows ), 0, 0, INTER_LINEAR_EXACT );
519               resize( diffResult, diffResult, Size( backgroundModelROI.cols, backgroundModelROI.rows ), 0, 0, INTER_LINEAR_EXACT );
520 
521               backgroundModelROI.convertTo( backgroundModelROI, CV_8U );
522 
523               absdiff( replicateCurrentBAMat, backgroundModelROI, diffResult );
524               threshold( diffResult, diffResult, epslonP[j], 255, THRESH_BINARY_INV );
525               countNonZeroElements = countNonZero( diffResult );
526 
527               if( countNonZeroElements > 0 )
528               {
529                 /////////////////// REPLACEMENT of backgroundModel template ///////////////////
530                 //replace TA with current TK
531                 backgroundModel[backgroundModel.size() - 1]->at<Vec2f>( i, j ) = potentialBackground.at<Vec2b>( i, j );
532                 potentialBackground.at<Vec2b>( i, j )[0] = 0;
533                 potentialBackground.at<Vec2b>( i, j )[1] = 0;
534 
535                 break;
536               }
537             }  // end for backgroundModel size
538           }
539           else
540           {
541             backgroundModel[backgroundModel.size() - 1]->at<Vec2f>( i, j ) = potentialBackground.at<Vec2b>( i, j );
542             potentialBackground.at<Vec2b>( i, j )[0] = 0;
543             potentialBackground.at<Vec2b>( i, j )[1] = 0;
544           }
545         }  // close if of EVALUATION
546       }  // end of  if( finalBFMask.at<uchar>( i, j ) == 1 )  // i.e. the corresponding frame pixel has been market as foreground
547 
548     }  // end of second for
549   }  // end of first for
550 
551   return true;
552 }
553 
activityControl(const Mat & current_noisePixelsMask)554 bool MotionSaliencyBinWangApr2014::activityControl( const Mat& current_noisePixelsMask )
555 {
556   Mat discordanceFramesNoise, not_current_noisePixelsMask;
557   Mat nonZeroIndexes, not_discordanceFramesNoise;  //u_current_noisePixelsMask;
558 
559 //current_noisePixelsMask.convertTo( u_current_noisePixelsMask, CV_8UC1 );
560 
561 // Derive the discrepancy between noise in the frame n-1 and frame n
562 //threshold( u_current_noisePixelsMask, not_current_noisePixelsMask, 0.5, 1.0, THRESH_BINARY_INV );
563   threshold( current_noisePixelsMask, not_current_noisePixelsMask, 0.5, 1.0, THRESH_BINARY_INV );
564   bitwise_and( noisePixelMask, not_current_noisePixelsMask, discordanceFramesNoise );
565 
566 // indices in which the pixel at frame n-1 was the noise (or not) and now no (or yes) (blinking pixels)
567   findNonZero( discordanceFramesNoise, nonZeroIndexes );
568 
569   Vec2i temp;
570 
571 // we increase the activity value of these pixels
572   for ( int i = 0; i < nonZeroIndexes.rows; i++ )
573   {
574     //TODO check rows, cols inside at
575     temp = nonZeroIndexes.at<Vec2i>( i );
576     if( activityPixelsValue.at<uchar>( temp.val[1], temp.val[0] ) < Bmax )
577     {
578       activityPixelsValue.at<uchar>( temp.val[1], temp.val[0] ) += Ainc;
579     }
580   }
581 
582 // decrement other pixels that have not changed (not blinking)
583   threshold( discordanceFramesNoise, not_discordanceFramesNoise, 0.5, 1.0, THRESH_BINARY_INV );
584   findNonZero( not_discordanceFramesNoise, nonZeroIndexes );
585 
586   Vec2i temp2;
587 
588   for ( int j = 0; j < nonZeroIndexes.rows; j++ )
589   {
590     temp2 = nonZeroIndexes.at<Vec2i>( j );
591     if( activityPixelsValue.at<uchar>( temp2.val[1], temp2.val[0] ) > 0 )
592     {
593       activityPixelsValue.at<uchar>( temp2.val[1], temp2.val[0] ) -= 1;
594     }
595   }
596 // update the noisePixelsMask
597   current_noisePixelsMask.copyTo( noisePixelMask );
598 
599   return true;
600 }
601 
decisionThresholdAdaptation()602 bool MotionSaliencyBinWangApr2014::decisionThresholdAdaptation()
603 {
604 
605   for ( int i = 0; i < activityPixelsValue.rows; i++ )
606   {
607     for ( int j = 0; j < activityPixelsValue.cols; j++ )
608     {
609       if( activityPixelsValue.at<uchar>( i, j ) > Binc && ( epslonPixelsValue.at<float>( i, j ) + deltaINC ) < epslonMAX )
610       {
611 
612         epslonPixelsValue.at<float>( i, j ) += deltaINC;
613       }
614       else if( activityPixelsValue.at<uchar>( i, j ) < Bdec && ( epslonPixelsValue.at<float>( i, j ) - deltaDEC ) > epslonMIN )
615       {
616         epslonPixelsValue.at<float>( i, j ) -= deltaDEC;
617       }
618     }
619   }
620 
621   return true;
622 }
623 
computeSaliencyImpl(InputArray image,OutputArray saliencyMap)624 bool MotionSaliencyBinWangApr2014::computeSaliencyImpl( InputArray image, OutputArray saliencyMap )
625 {
626   CV_Assert(image.channels() == 1);
627 
628   Mat highResBFMask, u_highResBFMask;
629   Mat lowResBFMask, u_lowResBFMask;
630   Mat not_lowResBFMask;
631   Mat current_noisePixelsMask;
632 
633   fullResolutionDetection( image.getMat(), highResBFMask );
634   lowResolutionDetection( image.getMat(), lowResBFMask );
635 
636 // Compute the final background-foreground mask. One pixel is marked as foreground if and only if it is
637 // foreground in both masks (full and low)
638   bitwise_and( highResBFMask, lowResBFMask, saliencyMap );
639 
640   if( activityControlFlag )
641   {
642 
643 // Detect the noise pixels (i.e. for a given pixel, fullRes(pixel) = foreground and lowRes(pixel)= background)
644     threshold( lowResBFMask, not_lowResBFMask, 0.5, 1.0, THRESH_BINARY_INV );
645     bitwise_and( highResBFMask, not_lowResBFMask, current_noisePixelsMask );
646 
647     activityControl( current_noisePixelsMask );
648     decisionThresholdAdaptation();
649   }
650 
651   templateOrdering();
652   templateReplacement( saliencyMap.getMat(), image.getMat() );
653   templateOrdering();
654 
655   activityControlFlag = true;
656   return true;
657 }
658 
659 }  // namespace saliency
660 }  // namespace cv
661