1 /*=========================================================================
2 *
3 * Copyright Insight Software Consortium
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0.txt
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
17 *=========================================================================*/
18
19 #include "itkLBFGSOptimizer.h"
20 #include "itkMath.h"
21 #include <iostream>
22
23 /**
24 * \class A cost function
25 * The objectif function is the quadratic form:
26 *
27 * 1/2 x^T A x - b^T x
28 *
29 * Where A is represented as an itkMatrix and
30 * b is represented as a itkVector
31 *
32 * The system in this example is:
33 *
34 * | 3 2 ||x| | 2| |0|
35 * | 2 6 ||y| + |-8| = |0|
36 *
37 *
38 * the solution is the vector | 2 -2 |
39 *
40 */
41 class LBFGSCostFunction : public itk::SingleValuedCostFunction
42 {
43 public:
44
45 using Self = LBFGSCostFunction;
46 using Superclass = itk::SingleValuedCostFunction;
47 using Pointer = itk::SmartPointer<Self>;
48 using ConstPointer = itk::SmartPointer<const Self>;
49 itkNewMacro( Self );
50 itkTypeMacro( LBFCostFunction, SingleValuedCostFunction );
51
52 enum { SpaceDimension=2 };
53
54 using ParametersType = Superclass::ParametersType;
55 using DerivativeType = Superclass::DerivativeType;
56
57 using VectorType = vnl_vector<double>;
58 using MatrixType = vnl_matrix<double>;
59
60 using MeasureType = double;
61
62 LBFGSCostFunction() = default;
63
GetValue(const ParametersType & position) const64 double GetValue( const ParametersType & position ) const override
65 {
66 double x = position[0];
67 double y = position[1];
68
69 std::cout << "GetValue ( ";
70 std::cout << x << " , " << y;
71 std::cout << ") = ";
72
73 double val = 0.5*(3*x*x+4*x*y+6*y*y) - 2*x + 8*y;
74
75 std::cout << val << std::endl;
76
77 return val;
78 }
79
GetDerivative(const ParametersType & position,DerivativeType & derivative) const80 void GetDerivative( const ParametersType & position,
81 DerivativeType & derivative ) const override
82 {
83 double x = position[0];
84 double y = position[1];
85
86 std::cout << "GetDerivative ( ";
87 std::cout << x << " , " << y;
88 std::cout << ") = ";
89
90 derivative = DerivativeType(SpaceDimension);
91 derivative[0] = 3*x + 2*y -2;
92 derivative[1] = 2*x + 6*y +8;
93 std::cout << "(";
94 std::cout << derivative[0] <<" , ";
95 std::cout << derivative[1] << ")" << std::endl;
96 }
97
98
GetNumberOfParameters() const99 unsigned int GetNumberOfParameters() const override
100 {
101 return SpaceDimension;
102 }
103
104 private:
105
106
107 };
108
109
itkLBFGSOptimizerTest(int,char * [])110 int itkLBFGSOptimizerTest(int, char* [] )
111 {
112 std::cout << "LBFGS Optimizer Test \n \n";
113
114 using OptimizerType = itk::LBFGSOptimizer;
115 using vnlOptimizerType = OptimizerType::InternalOptimizerType;
116
117 // Declaration of a itkOptimizer
118 OptimizerType::Pointer itkOptimizer = OptimizerType::New();
119
120 // Declaration of the CostFunction adapter
121 LBFGSCostFunction::Pointer costFunction = LBFGSCostFunction::New();
122
123 // Set some optimizer parameters
124 itkOptimizer->SetTrace( false );
125 itkOptimizer->SetMaximumNumberOfFunctionEvaluations( 1000 );
126 itkOptimizer->SetGradientConvergenceTolerance( 1e-3 );
127 itkOptimizer->SetLineSearchAccuracy( 0.1 );
128 itkOptimizer->SetDefaultStepLength( 5.0 );
129 std::cout << "GetValue() before optimizer starts: " << itkOptimizer->GetValue() << std::endl;
130 itkOptimizer->SetCostFunction( costFunction );
131
132 const double G_Tolerance = 1e-4; // Gradient magnitude tolerance
133 constexpr int Max_Iterations = 100; // Maximum number of iterations
134 const bool Trace = false; // Tracing
135 constexpr double LineSearch_Tol = 0.9; // Line search tolerance
136 constexpr double Step_Length = 1.0; // Default step length
137
138 // const double F_Tolerance = 1e-3; // Function value tolerance: not used
139 // const double X_Tolerance = 1e-8; // Search space tolerance: not used
140 // const double Epsilon_Function = 1e-10; // Step : not used
141
142 vnlOptimizerType * vnlOptimizer = itkOptimizer->GetOptimizer();
143
144 vnlOptimizer->set_check_derivatives( 0 );
145
146 constexpr unsigned int SpaceDimension = 2;
147 OptimizerType::ParametersType initialValue(SpaceDimension);
148
149 // We start not so far from | 2 -2 |
150 initialValue[0] = 100;
151 initialValue[1] = -100;
152
153 OptimizerType::ParametersType currentValue(2);
154
155 currentValue = initialValue;
156
157 itkOptimizer->SetInitialPosition( currentValue );
158
159 // Set some optimizer parameters
160 itkOptimizer->SetTrace( Trace );
161 itkOptimizer->SetMaximumNumberOfFunctionEvaluations( Max_Iterations );
162 itkOptimizer->SetGradientConvergenceTolerance( G_Tolerance );
163 itkOptimizer->SetLineSearchAccuracy( LineSearch_Tol );
164 itkOptimizer->SetDefaultStepLength( Step_Length );
165 itkOptimizer->Print( std::cout );
166 std::cout << "Stop description = " << itkOptimizer->GetStopConditionDescription() << std::endl;
167
168 try
169 {
170
171 itkOptimizer->StartOptimization();
172 }
173 catch( itk::ExceptionObject & e )
174 {
175 std::cout << "Exception thrown ! " << std::endl;
176 std::cout << "An error occurred during Optimization" << std::endl;
177 std::cout << "Location = " << e.GetLocation() << std::endl;
178 std::cout << "Description = " << e.GetDescription() << std::endl;
179 return EXIT_FAILURE;
180 }
181
182 std::cout << "End condition = " << vnlOptimizer->get_failure_code() << std::endl;
183 std::cout << "Number of iters = " << vnlOptimizer->get_num_iterations() << std::endl;
184 std::cout << "Number of evals = " << vnlOptimizer->get_num_evaluations() << std::endl;
185 std::cout << std::endl;
186
187 OptimizerType::ParametersType finalPosition;
188 finalPosition = itkOptimizer->GetCurrentPosition();
189
190 std::cout << "Solution = ("
191 << finalPosition[0] << ","
192 << finalPosition[1] << ")" << std::endl;
193
194 std::cout << "End condition = "
195 << itkOptimizer->GetStopConditionDescription() << std::endl;
196 std::cout << "Trace = " << itkOptimizer->GetTrace() << std::endl;
197 std::cout << "LineSearchAccuracy = "
198 << itkOptimizer->GetLineSearchAccuracy() << std::endl;
199 std::cout << "GradientConvergenceTolerance = "
200 << itkOptimizer->GetGradientConvergenceTolerance() << std::endl;
201 std::cout << "DefaultStepLength = "
202 << itkOptimizer->GetDefaultStepLength() << std::endl;
203 std::cout << "MaximumNumberOfFunctionEvaluations = "
204 << itkOptimizer->GetMaximumNumberOfFunctionEvaluations() << std::endl;
205
206 //
207 // check results to see if it is within range
208 //
209 bool pass = true;
210 double trueParameters[2] = { 2, -2 };
211 for( unsigned int j = 0; j < 2; j++ )
212 {
213 if( itk::Math::abs( finalPosition[j] - trueParameters[j] ) > 0.01 )
214 pass = false;
215 }
216
217 if( !pass )
218 {
219 std::cout << "Test failed." << std::endl;
220 return EXIT_FAILURE;
221 }
222
223 // Get the final value of the optimizer
224 std::cout << "Testing GetValue() : ";
225 OptimizerType::MeasureType finalValue = itkOptimizer->GetValue();
226 if(std::fabs(finalValue+10.0)>0.01)
227 {
228 std::cout << "[FAILURE]" << std::endl;
229 return EXIT_FAILURE;
230 }
231 else
232 {
233 std::cout << "[SUCCESS]" << std::endl;
234 }
235
236 std::cout << "Test passed." << std::endl;
237 return EXIT_SUCCESS;
238
239 }
240