1 /*=========================================================================
2  *
3  *  Copyright Insight Software Consortium
4  *
5  *  Licensed under the Apache License, Version 2.0 (the "License");
6  *  you may not use this file except in compliance with the License.
7  *  You may obtain a copy of the License at
8  *
9  *         http://www.apache.org/licenses/LICENSE-2.0.txt
10  *
11  *  Unless required by applicable law or agreed to in writing, software
12  *  distributed under the License is distributed on an "AS IS" BASIS,
13  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14  *  See the License for the specific language governing permissions and
15  *  limitations under the License.
16  *
17  *=========================================================================*/
18 
19 #include "itkRegularStepGradientDescentOptimizerv4.h"
20 #include "itkGradientDescentOptimizerv4.h"
21 #include "itkMath.h"
22 #include "itkTestingMacros.h"
23 
24 /**
25  *  The objective function is the quadratic form:
26  *
27  *  1/2 x^T A x - b^T x
28  *
29  *  Where A is a matrix and b is a vector
30  *  The system in this example is:
31  *
32  *     | 3  2 ||x|   | 2|   |0|
33  *     | 2  6 ||y| + |-8| = |0|
34  *
35  *
36  *   the solution is the vector | 2 -2 |
37  *
38  * \class RSGv4TestMetric
39  */
40 class RSGv4TestMetric : public itk::ObjectToObjectMetricBase
41 {
42 public:
43 
44   using Self = RSGv4TestMetric;
45   using Superclass = itk::ObjectToObjectMetricBase;
46   using Pointer = itk::SmartPointer<Self>;
47   using ConstPointer = itk::SmartPointer<const Self>;
48   itkNewMacro( Self );
49 
50   enum { SpaceDimension = 2 };
51 
52   using ParametersType = Superclass::ParametersType;
53   using DerivativeType = Superclass::DerivativeType;
54   using ParametersValueType = Superclass::ParametersValueType;
55   using MeasureType = Superclass::MeasureType;
56 
57 
RSGv4TestMetric()58   RSGv4TestMetric()
59   {
60     m_Parameters.SetSize( SpaceDimension );
61     m_Parameters.Fill( 0 );
62   }
63 
Initialize()64   void Initialize() throw ( itk::ExceptionObject ) override {}
65 
GetDerivative(DerivativeType & derivative) const66   void GetDerivative( DerivativeType & derivative ) const override
67   {
68     MeasureType value;
69     GetValueAndDerivative( value, derivative );
70   }
71 
GetValueAndDerivative(MeasureType & value,DerivativeType & derivative) const72   void GetValueAndDerivative( MeasureType & value,
73                               DerivativeType & derivative ) const override
74   {
75     if( derivative.Size() != 2 )
76       {
77       derivative.SetSize(2);
78       }
79 
80     double x = m_Parameters[0];
81     double y = m_Parameters[1];
82 
83     std::cout << "GetValueAndDerivative( ";
84     std::cout << x << " ";
85     std::cout << y << ") = ";
86 
87     value = 0.5*(3*x*x+4*x*y+6*y*y) - 2*x + 8*y;
88 
89     std::cout << "value: " << value << std::endl;
90 
91     //
92     // The optimizer simply takes the derivative from the metric
93     // and adds it to the transform after scaling. So instead of
94     // setting a 'minimize' option in the gradient, we return
95     // a minimizing derivative.
96     //
97     derivative[0] = -( 3 * x + 2 * y -2 );
98     derivative[1] = -( 2 * x + 6 * y +8 );
99 
100     std::cout << "derivative: " << derivative << std::endl;
101   }
102 
GetValue() const103   MeasureType  GetValue() const override
104   {
105     return 0.0;
106   }
107 
UpdateTransformParameters(const DerivativeType & update,ParametersValueType factor)108   void UpdateTransformParameters( const DerivativeType & update, ParametersValueType factor ) override
109   {
110     m_Parameters += update * factor;
111   }
112 
GetNumberOfParameters() const113   unsigned int GetNumberOfParameters() const override
114   {
115     return SpaceDimension;
116   }
117 
HasLocalSupport() const118   bool HasLocalSupport() const override
119   {
120     return false;
121   }
122 
GetNumberOfLocalParameters() const123   unsigned int GetNumberOfLocalParameters() const override
124   {
125     return SpaceDimension;
126   }
127 
128   // These Set/Get methods are only needed for this test derivation that
129   // isn't using a transform.
SetParameters(ParametersType & parameters)130   void SetParameters( ParametersType & parameters ) override
131   {
132     m_Parameters = parameters;
133   }
134 
GetParameters() const135   const ParametersType & GetParameters() const override
136   {
137     return m_Parameters;
138   }
139 
140 private:
141   ParametersType m_Parameters;
142 };
143 
144 template< typename OptimizerType >
RegularStepGradientDescentOptimizerv4TestHelper(itk::SizeValueType numberOfIterations,bool doEstimateLearningRateAtEachIteration,bool doEstimateLearningRateOnce,typename OptimizerType::InternalComputationValueType relaxationFactor,typename OptimizerType::InternalComputationValueType minimumStepLength,typename OptimizerType::InternalComputationValueType gradientMagnitudeTolerance,typename OptimizerType::MeasureType currentLearningRateRelaxation)145 int RegularStepGradientDescentOptimizerv4TestHelper(
146   itk::SizeValueType numberOfIterations,
147   bool doEstimateLearningRateAtEachIteration,
148   bool doEstimateLearningRateOnce,
149   typename OptimizerType::InternalComputationValueType relaxationFactor,
150   typename OptimizerType::InternalComputationValueType minimumStepLength,
151   typename OptimizerType::InternalComputationValueType gradientMagnitudeTolerance,
152   typename OptimizerType::MeasureType currentLearningRateRelaxation )
153 {
154   using ScalesType = typename OptimizerType::ScalesType;
155 
156   typename OptimizerType::Pointer optimizer = OptimizerType::New();
157 
158   // Declaration of the metric
159   RSGv4TestMetric::Pointer metric = RSGv4TestMetric::New();
160 
161   optimizer->SetMetric( metric );
162 
163   using ParametersType = RSGv4TestMetric::ParametersType;
164 
165   const unsigned int spaceDimension =
166     metric->GetNumberOfParameters();
167 
168   // We start not so far from | 2 -2 |
169   ParametersType  initialPosition( spaceDimension );
170   initialPosition[0] =  100;
171   initialPosition[1] = -100;
172   metric->SetParameters( initialPosition );
173 
174   ScalesType parametersScale( spaceDimension );
175   parametersScale[0] = 1.0;
176   parametersScale[1] = 1.0;
177 
178   optimizer->SetScales( parametersScale );
179 
180   typename OptimizerType::InternalComputationValueType learningRate = 100;
181   optimizer->SetLearningRate( learningRate );
182 
183   optimizer->SetNumberOfIterations( numberOfIterations );
184 
185   optimizer->SetDoEstimateLearningRateAtEachIteration( doEstimateLearningRateAtEachIteration );
186   optimizer->SetDoEstimateLearningRateOnce( doEstimateLearningRateOnce );
187 
188   TEST_SET_GET_VALUE( doEstimateLearningRateAtEachIteration, optimizer->GetDoEstimateLearningRateAtEachIteration() );
189   TEST_SET_GET_VALUE( doEstimateLearningRateOnce, optimizer->GetDoEstimateLearningRateOnce() );
190 
191   optimizer->SetRelaxationFactor( relaxationFactor );
192 
193   TEST_SET_GET_VALUE( relaxationFactor, optimizer->GetRelaxationFactor() );
194 
195   optimizer->SetMinimumStepLength( minimumStepLength );
196 
197   TEST_SET_GET_VALUE( minimumStepLength, optimizer->GetMinimumStepLength() );
198 
199   optimizer->SetGradientMagnitudeTolerance( gradientMagnitudeTolerance );
200 
201   TEST_SET_GET_VALUE( gradientMagnitudeTolerance,
202     optimizer->GetGradientMagnitudeTolerance() );
203 
204   optimizer->SetCurrentLearningRateRelaxation( currentLearningRateRelaxation );
205 
206   TEST_SET_GET_VALUE( currentLearningRateRelaxation,
207     optimizer->GetCurrentLearningRateRelaxation() );
208 
209   try
210     {
211     std::cout << "currentPosition before optimization: " << optimizer->GetMetric()->GetParameters() << std::endl;
212     optimizer->StartOptimization();
213     std::cout << "currentPosition after optimization: " << optimizer->GetMetric()->GetParameters() << std::endl;
214     std::cout << " Stop Condition  = " << optimizer->GetStopConditionDescription() << std::endl;
215     }
216   catch( itk::ExceptionObject & e )
217     {
218     std::cout << "Exception thrown ! " << std::endl;
219     std::cout << "An error occurred during Optimization" << std::endl;
220     std::cout << "Location    = " << e.GetLocation() << std::endl;
221     std::cout << "Description = " << e.GetDescription() << std::endl;
222     return EXIT_FAILURE;
223     }
224 
225   if( optimizer->GetCurrentIteration() > 0 )
226     {
227     std::cerr << "The optimizer is running iterations despite of ";
228     std::cerr << "having a maximum number of iterations set to zero" << std::endl;
229     return EXIT_FAILURE;
230     }
231 
232   ParametersType finalPosition = optimizer->GetMetric()->GetParameters();
233   std::cout << "Solution        = (";
234   std::cout << finalPosition[0] << ",";
235   std::cout << finalPosition[1] << ")" << std::endl;
236 
237   //
238   // Check results to see if it is within range
239   //
240   bool pass = true;
241   double trueParameters[2] = { 2, -2 };
242   for( unsigned int j = 0; j < 2; ++j )
243     {
244     if( itk::Math::FloatAlmostEqual( finalPosition[j], trueParameters[j] ) )
245       {
246       pass = false;
247       }
248     }
249 
250   if( !pass )
251     {
252     std::cout << "Test failed." << std::endl;
253     return EXIT_FAILURE;
254     }
255 
256   return EXIT_SUCCESS;
257 }
258 
259 
itkRegularStepGradientDescentOptimizerv4Test(int,char * [])260 int itkRegularStepGradientDescentOptimizerv4Test( int, char* [] )
261 {
262   std::cout << "RegularStepGradientDescentOptimizerv4 Test ";
263   std::cout << std::endl << std::endl;
264 
265   using OptimizerType = itk::RegularStepGradientDescentOptimizerv4< double >;
266 
267   OptimizerType::Pointer itkOptimizer = OptimizerType::New();
268 
269   EXERCISE_BASIC_OBJECT_METHODS( itkOptimizer, RegularStepGradientDescentOptimizerv4,
270     GradientDescentOptimizerv4Template );
271 
272   bool testStatus = EXIT_SUCCESS;
273 
274 
275   bool doEstimateLearningRateAtEachIteration = false;
276   bool doEstimateLearningRateOnce = false;
277 
278   itk::SizeValueType numberOfIterations = 900;
279 
280   OptimizerType::InternalComputationValueType relaxationFactor = 0.5;
281   OptimizerType::InternalComputationValueType minimumStepLength = 1e-6;
282   OptimizerType::InternalComputationValueType gradientMagnitudeTolerance = 1e-6;
283   OptimizerType::MeasureType currentLearningRateRelaxation = 0;
284 
285   testStatus = RegularStepGradientDescentOptimizerv4TestHelper< OptimizerType >(
286      numberOfIterations,
287      doEstimateLearningRateAtEachIteration,
288      doEstimateLearningRateOnce,
289      relaxationFactor,
290      minimumStepLength,
291      gradientMagnitudeTolerance,
292      currentLearningRateRelaxation );
293 
294 
295   // Run now with different learning rate estimation frequencies
296   std::cout << "\nRun test with a different learning rate estimation frequencies:"
297     " estimate learning rate at each iteration: true; "
298     " estimate learning rate once: false." << std::endl;
299   {
300   bool doEstimateLearningRateAtEachIteration2 = true;
301   bool doEstimateLearningRateOnce2 = false;
302   testStatus = RegularStepGradientDescentOptimizerv4TestHelper< OptimizerType >(
303      numberOfIterations,
304      doEstimateLearningRateAtEachIteration2,
305      doEstimateLearningRateOnce2,
306      relaxationFactor,
307      minimumStepLength,
308      gradientMagnitudeTolerance,
309      currentLearningRateRelaxation );
310   }
311 
312   // Run now with different learning rate estimation frequencies
313   std::cout << "\nRun test with a different learning rate estimation frequencies:"
314     " estimate learning rate at each iteration: false; "
315     " estimate learning rate once: true." << std::endl;
316   {
317   bool doEstimateLearningRateAtEachIteration3 = false;
318   bool doEstimateLearningRateOnce3 = true;
319 
320   testStatus = RegularStepGradientDescentOptimizerv4TestHelper< OptimizerType >(
321      numberOfIterations,
322      doEstimateLearningRateAtEachIteration3,
323      doEstimateLearningRateOnce3,
324      relaxationFactor,
325      minimumStepLength,
326      gradientMagnitudeTolerance,
327      currentLearningRateRelaxation );
328   }
329 
330   // Run now with different learning rate estimation frequencies
331   std::cout << "\nRun test with a different learning rate estimation frequencies:"
332     " estimate learning rate at each iteration: true; "
333     " estimate learning rate once: true." << std::endl;
334   {
335   bool doEstimateLearningRateAtEachIteration4 = true;
336   bool doEstimateLearningRateOnce4 = true;
337 
338   testStatus = RegularStepGradientDescentOptimizerv4TestHelper< OptimizerType >(
339      numberOfIterations,
340      doEstimateLearningRateAtEachIteration4,
341      doEstimateLearningRateOnce4,
342      relaxationFactor,
343      minimumStepLength,
344      gradientMagnitudeTolerance,
345      currentLearningRateRelaxation );
346   }
347 
348   // Run now with a different relaxation factor
349   std::cout << "\nRun test with a different relaxation factor: 0.8, instead of default value: 0.5." << std::endl;
350   {
351   OptimizerType::InternalComputationValueType relaxationFactor2 = 0.8;
352 
353   testStatus = RegularStepGradientDescentOptimizerv4TestHelper< OptimizerType >(
354      numberOfIterations,
355      doEstimateLearningRateAtEachIteration,
356      doEstimateLearningRateOnce,
357      relaxationFactor2,
358      minimumStepLength,
359      gradientMagnitudeTolerance,
360      currentLearningRateRelaxation );
361   }
362 
363 
364   // Verify that the optimizer doesn't run if the number of iterations is set to zero.
365   std::cout << "\nCheck the optimizer when number of iterations is set to zero:" << std::endl;
366   {
367   itk::SizeValueType numberOfIterations2 = 0;
368 
369   testStatus = RegularStepGradientDescentOptimizerv4TestHelper< OptimizerType >(
370      numberOfIterations2,
371      doEstimateLearningRateAtEachIteration,
372      doEstimateLearningRateOnce,
373      relaxationFactor,
374      minimumStepLength,
375      gradientMagnitudeTolerance,
376      currentLearningRateRelaxation );
377   }
378 
379   //
380   // Test the Exception if the GradientMagnitudeTolerance is set to a negative value
381   //
382   std::cout << "\nTest the Exception if the GradientMagnitudeTolerance is set to a negative value:" << std::endl;
383   {
384   OptimizerType::InternalComputationValueType gradientMagnitudeTolerance2 = -1.0;
385   bool expectedExceptionReceived = RegularStepGradientDescentOptimizerv4TestHelper< OptimizerType >(
386      numberOfIterations,
387      doEstimateLearningRateAtEachIteration,
388      doEstimateLearningRateOnce,
389      relaxationFactor,
390      minimumStepLength,
391      gradientMagnitudeTolerance2,
392      currentLearningRateRelaxation );
393 
394   if( !expectedExceptionReceived )
395     {
396     std::cerr << "Failure to produce an exception when";
397     std::cerr << " the GradientMagnitudeTolerance is negative " << std::endl;
398     std::cerr << "TEST FAILED !" << std::endl;
399     testStatus = EXIT_FAILURE;
400     }
401   }
402 
403   //
404   // Test the Exception if the RelaxationFactor is set to a value more than one.
405   //
406   std::cout << "\nTest the Exception if the RelaxationFactor is set to a value larger than one:" << std::endl;
407   {
408   itk::SizeValueType numberOfIterations3 = 100;
409   OptimizerType::InternalComputationValueType relaxationFactor3 = 1.1;
410   OptimizerType::InternalComputationValueType gradientMagnitudeTolerance3 = 0.01;
411   bool expectedExceptionReceived = RegularStepGradientDescentOptimizerv4TestHelper< OptimizerType >(
412      numberOfIterations3,
413      doEstimateLearningRateAtEachIteration,
414      doEstimateLearningRateOnce,
415      relaxationFactor3,
416      minimumStepLength,
417      gradientMagnitudeTolerance3,
418      currentLearningRateRelaxation );
419 
420   if( !expectedExceptionReceived )
421     {
422     std::cerr << "Failure to produce an exception when";
423     std::cerr << " the RelaxationFactor is larger than one " << std::endl;
424     std::cerr << "TEST FAILED !" << std::endl;
425     testStatus = EXIT_FAILURE;
426     }
427   }
428 
429   if( !testStatus )
430     {
431     std::cout << "TEST FINISHED SUCCESSFULLY!" << std::endl;
432     }
433   else
434     {
435     std::cout << "TEST FAILED!" << std::endl;
436     }
437 
438   return testStatus;
439 
440 }
441