1 //                                               -*- C++ -*-
2 /**
3  *  @brief OptimizationResult stores the result of a OptimizationAlgorithmImplementation
4  *
5  *  Copyright 2005-2021 Airbus-EDF-IMACS-ONERA-Phimeca
6  *
7  *  This library is free software: you can redistribute it and/or modify
8  *  it under the terms of the GNU Lesser General Public License as published by
9  *  the Free Software Foundation, either version 3 of the License, or
10  *  (at your option) any later version.
11  *
12  *  This library is distributed in the hope that it will be useful,
13  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
14  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15  *  GNU Lesser General Public License for more details.
16  *
17  *  You should have received a copy of the GNU Lesser General Public License
18  *  along with this library.  If not, see <http://www.gnu.org/licenses/>.
19  *
20  */
21 #include "openturns/OptimizationResult.hxx"
22 #include "openturns/PersistentObjectFactory.hxx"
23 #include "openturns/Curve.hxx"
24 #include "openturns/SpecFunc.hxx"
25 
26 BEGIN_NAMESPACE_OPENTURNS
27 
28 CLASSNAMEINIT(OptimizationResult)
29 
30 static const Factory<OptimizationResult> Factory_OptimizationResult;
31 
32 /* Default constructor */
OptimizationResult()33 OptimizationResult::OptimizationResult()
34   : PersistentObject()
35 {
36   absoluteErrorHistory_.setDimension(1);
37   relativeErrorHistory_.setDimension(1);
38   residualErrorHistory_.setDimension(1);
39   constraintErrorHistory_.setDimension(1);
40 }
41 
42 /* Default constructor */
OptimizationResult(const OptimizationProblem & problem)43 OptimizationResult::OptimizationResult(const OptimizationProblem & problem)
44   : PersistentObject()
45   , problem_(problem)
46 {
47   absoluteErrorHistory_.setDimension(1);
48   relativeErrorHistory_.setDimension(1);
49   residualErrorHistory_.setDimension(1);
50   constraintErrorHistory_.setDimension(1);
51   inputHistory_.setDimension(problem.getObjective().getInputDimension());
52   outputHistory_.setDimension(problem.getObjective().getOutputDimension());
53 }
54 
55 /* Virtual constructor */
clone() const56 OptimizationResult * OptimizationResult::clone() const
57 {
58   return new OptimizationResult(*this);
59 }
60 
61 /* OptimalPoint accessors */
getOptimalPoint() const62 Point OptimizationResult::getOptimalPoint() const
63 {
64   return optimalPoint_;
65 }
66 
setOptimalPoint(const Point & optimalPoint)67 void OptimizationResult::setOptimalPoint(const Point & optimalPoint)
68 {
69   optimalPoint_ = optimalPoint;
70 }
71 
72 /* Optimal value accessors */
getOptimalValue() const73 Point OptimizationResult::getOptimalValue() const
74 {
75   return optimalValue_;
76 }
77 
setOptimalValue(const Point & optimalValue)78 void OptimizationResult::setOptimalValue(const Point &  optimalValue)
79 {
80   optimalValue_ = optimalValue;
81 }
82 
83 /* Evaluation number accessor */
getEvaluationNumber() const84 UnsignedInteger OptimizationResult::getEvaluationNumber() const
85 {
86   return evaluationNumber_;
87 }
88 
setEvaluationNumber(const UnsignedInteger evaluationNumber)89 void OptimizationResult::setEvaluationNumber(const UnsignedInteger evaluationNumber)
90 {
91   evaluationNumber_ = evaluationNumber;
92 }
93 
94 /* Iteration number accessor */
getIterationNumber() const95 UnsignedInteger OptimizationResult::getIterationNumber() const
96 {
97   return iterationNumber_;
98 }
99 
setIterationNumber(const UnsignedInteger iterationNumber)100 void OptimizationResult::setIterationNumber(const UnsignedInteger iterationNumber)
101 {
102   iterationNumber_ = iterationNumber;
103 }
104 
105 /* Absolute error accessor */
getAbsoluteError() const106 Scalar OptimizationResult::getAbsoluteError() const
107 {
108   return absoluteError_;
109 }
110 
getAbsoluteErrorHistory() const111 Sample OptimizationResult::getAbsoluteErrorHistory() const
112 {
113   return absoluteErrorHistory_.getSample();
114 }
115 
116 /* Absolute error accessor */
setAbsoluteError(const Scalar absoluteError)117 void OptimizationResult::setAbsoluteError(const Scalar absoluteError)
118 {
119   absoluteError_ = absoluteError;
120 }
121 
122 /* Relative error accessor */
getRelativeError() const123 Scalar OptimizationResult::getRelativeError() const
124 {
125   return relativeError_;
126 }
127 
getRelativeErrorHistory() const128 Sample OptimizationResult::getRelativeErrorHistory() const
129 {
130   return relativeErrorHistory_.getSample();
131 }
132 
133 /* Relative error accessor */
setRelativeError(const Scalar relativeError)134 void OptimizationResult::setRelativeError(const Scalar relativeError)
135 {
136   relativeError_ = relativeError;
137 }
138 
139 /* Residual error accessor */
getResidualError() const140 Scalar OptimizationResult::getResidualError() const
141 {
142   return residualError_;
143 }
144 
getResidualErrorHistory() const145 Sample OptimizationResult::getResidualErrorHistory() const
146 {
147   return residualErrorHistory_.getSample();
148 }
149 
150 /* Residual error accessor */
setResidualError(const Scalar residualError)151 void OptimizationResult::setResidualError(const Scalar residualError)
152 {
153   residualError_ = residualError;
154 }
155 
156 /* Constraint error accessor */
getConstraintError() const157 Scalar OptimizationResult::getConstraintError() const
158 {
159   return constraintError_;
160 }
161 
getConstraintErrorHistory() const162 Sample OptimizationResult::getConstraintErrorHistory() const
163 {
164   return constraintErrorHistory_.getSample();
165 }
166 
167 /* Constraint error accessor */
setConstraintError(const Scalar constraintError)168 void OptimizationResult::setConstraintError(const Scalar constraintError)
169 {
170   constraintError_ = constraintError;
171 }
172 
getInputSample() const173 Sample OptimizationResult::getInputSample() const
174 {
175   return inputHistory_.getSample();
176 }
177 
getOutputSample() const178 Sample OptimizationResult::getOutputSample() const
179 {
180   return outputHistory_.getSample();
181 }
182 
setProblem(const OptimizationProblem & problem)183 void OptimizationResult::setProblem(const OptimizationProblem & problem)
184 {
185   problem_ = problem;
186 }
187 
getProblem() const188 OptimizationProblem OptimizationResult::getProblem() const
189 {
190   return problem_;
191 }
192 
193 /* String converter */
__repr__() const194 String OptimizationResult::__repr__() const
195 {
196   OSS oss;
197   oss << "class=" << OptimizationResult::GetClassName()
198       << " optimal point=" << optimalPoint_
199       << " optimal value=" << optimalValue_
200       << " evaluationNumber=" << evaluationNumber_
201       << " iterationNumber=" << iterationNumber_
202       << " absoluteError=" << getAbsoluteError()
203       << " relativeError=" << getRelativeError()
204       << " residualError=" << getResidualError()
205       << " constraintError=" << getConstraintError()
206       << " problem=" << problem_;
207   return oss;
208 }
209 
210 /* Method save() stores the object through the StorageManager */
save(Advocate & adv) const211 void OptimizationResult::save(Advocate & adv) const
212 {
213   PersistentObject::save(adv);
214   adv.saveAttribute( "optimalPoint_", optimalPoint_ );
215   adv.saveAttribute( "optimalValue_", optimalValue_ );
216   adv.saveAttribute( "evaluationNumber_", evaluationNumber_ );
217   adv.saveAttribute( "iterationNumber_", iterationNumber_ );
218   adv.saveAttribute( "absoluteError_", absoluteError_ );
219   adv.saveAttribute( "relativeError_", relativeError_ );
220   adv.saveAttribute( "residualError_", residualError_ );
221   adv.saveAttribute( "constraintError_", constraintError_ );
222 
223   adv.saveAttribute( "absoluteErrorHistory_", absoluteErrorHistory_ );
224   adv.saveAttribute( "relativeErrorHistory_", relativeErrorHistory_ );
225   adv.saveAttribute( "residualErrorHistory_", residualErrorHistory_ );
226   adv.saveAttribute( "constraintErrorHistory_", constraintErrorHistory_ );
227 
228   adv.saveAttribute( "inputHistory_", inputHistory_ );
229   adv.saveAttribute( "outputHistory_", outputHistory_ );
230 
231   adv.saveAttribute( "problem_", problem_ );
232 }
233 
234 /* Method load() reloads the object from the StorageManager */
load(Advocate & adv)235 void OptimizationResult::load(Advocate & adv)
236 {
237   PersistentObject::load(adv);
238   adv.loadAttribute( "optimalPoint_", optimalPoint_ );
239   adv.loadAttribute( "optimalValue_", optimalValue_ );
240   adv.loadAttribute( "evaluationNumber_", evaluationNumber_ );
241   adv.loadAttribute( "iterationNumber_", iterationNumber_ );
242   adv.loadAttribute( "absoluteError_", absoluteError_ );
243   adv.loadAttribute( "relativeError_", relativeError_ );
244   adv.loadAttribute( "residualError_", residualError_ );
245   adv.loadAttribute( "constraintError_", constraintError_ );
246 
247   adv.loadAttribute( "absoluteErrorHistory_", absoluteErrorHistory_ );
248   adv.loadAttribute( "relativeErrorHistory_", relativeErrorHistory_ );
249   adv.loadAttribute( "residualErrorHistory_", residualErrorHistory_ );
250   adv.loadAttribute( "constraintErrorHistory_", constraintErrorHistory_ );
251 
252   adv.loadAttribute( "inputHistory_", inputHistory_ );
253   adv.loadAttribute( "outputHistory_", outputHistory_ );
254 
255   adv.loadAttribute( "problem_", problem_ );
256 }
257 
258 /* Incremental history storage */
store(const Point & x,const Point & y,const Scalar absoluteError,const Scalar relativeError,const Scalar residualError,const Scalar constraintError)259 void OptimizationResult::store(const Point & x,
260                                const Point & y,
261                                const Scalar absoluteError,
262                                const Scalar relativeError,
263                                const Scalar residualError,
264                                const Scalar constraintError)
265 {
266   if (!optimalValue_.getDimension()
267     || getProblem().hasLevelFunction() // consider the last value as optimal for nearest-point algos
268     || ((getProblem().isMinimization() && y[0] < optimalValue_[0])
269     || (!getProblem().isMinimization() && y[0] > optimalValue_[0])))
270   {
271     optimalPoint_ = x;
272     optimalValue_ = y;
273   }
274 
275   // update values
276   absoluteError_ = absoluteError;
277   relativeError_ = relativeError;
278   residualError_ = residualError;
279   constraintError_ = constraintError;
280 
281   // append values
282   absoluteErrorHistory_.store(Point(1, absoluteError));
283   relativeErrorHistory_.store(Point(1, relativeError));
284   residualErrorHistory_.store(Point(1, residualError));
285   constraintErrorHistory_.store(Point(1, constraintError));
286 
287   inputHistory_.store(x);
288   outputHistory_.store(y);
289 }
290 
drawErrorHistory() const291 Graph OptimizationResult::drawErrorHistory() const
292 {
293   Graph result("Error history", iterationNumber_ > 0 ? "Iteration number" : "Evaluation number", "Error value", true, "topright", 1.0, GraphImplementation::LOGY);
294   result.setGrid(true);
295   result.setGridColor("black");
296 // create a sample with the iteration number to be plotted as x data
297   const UnsignedInteger size = getAbsoluteErrorHistory().getSize();
298   {
299     Sample data(getAbsoluteErrorHistory());
300     for (UnsignedInteger i = 0; i < size; ++i) if (data(i, 0) <= 0.0) data(i, 0) = SpecFunc::ScalarEpsilon;
301     Curve absoluteErrorCurve( data, "absolute error" );
302     absoluteErrorCurve.setLegend("absolute error");
303     absoluteErrorCurve.setColor("red");
304     result.add( absoluteErrorCurve );
305   }
306 // Relative error
307   {
308     Sample data(getRelativeErrorHistory());
309     for (UnsignedInteger i = 0; i < size; ++i) if (data(i, 0) <= 0.0) data(i, 0) = SpecFunc::ScalarEpsilon;
310     Curve relativeErrorCurve( data, "relative error" );
311     relativeErrorCurve.setLegend("relative error");
312     relativeErrorCurve.setColor("blue");
313     result.add( relativeErrorCurve );
314   }
315 // Residual error
316   {
317     Sample data(getResidualErrorHistory());
318     for (UnsignedInteger i = 0; i < size; ++i) if (data(i, 0) <= 0.0) data(i, 0) = SpecFunc::ScalarEpsilon;
319     Curve residualErrorCurve( data, "residual error" );
320     residualErrorCurve.setLegend("residual error");
321     residualErrorCurve.setColor("green");
322     result.add( residualErrorCurve );
323   }
324 // Constraint error
325   {
326     Sample data(getConstraintErrorHistory());
327     for (UnsignedInteger i = 0; i < size; ++i) if (data(i, 0) <= 0.0) data(i, 0) = SpecFunc::ScalarEpsilon;
328     Curve constraintErrorCurve( data, "constraint error" );
329     constraintErrorCurve.setLegend("constraint error");
330     constraintErrorCurve.setColor("magenta");
331     result.add( constraintErrorCurve );
332   }
333   result.setYMargin(0.0);// tighten the Y axis
334   return result;
335 }
336 
337 /* Draw optimal value graph */
drawOptimalValueHistory() const338 Graph OptimizationResult::drawOptimalValueHistory() const
339 {
340   Graph result("Optimal value history", iterationNumber_ > 0 ? "Iteration number" : "Evaluation number", "Optimal value", true, "topright", 1.0);
341   result.setGrid(true);
342   result.setGridColor("black");
343   Sample data(getOutputSample().getMarginal(0));
344   const UnsignedInteger size = data.getSize();
345   const Bool minimization = problem_.isMinimization();
346   for (UnsignedInteger i = 1; i < size; ++ i)
347   {
348     const UnsignedInteger j = 0;
349     if (!((minimization && (data(i, j) < data(i - 1, j)))
350           || (!minimization && (data(i, j) > data(i - 1, j)))))
351     {
352       data(i, j) = data(i - 1, j);
353     }
354   }
355   Curve optimalValueCurve(data, "optimal value");
356   optimalValueCurve.setLegend("optimal value");
357   optimalValueCurve.setColor("red");
358   result.add(optimalValueCurve);
359   return result;
360 }
361 
362 /* Computes the Lagrange multipliers associated with the constraints as a post-processing of the optimal point. */
363 /* L(x, l_eq, l_lower_bound, l_upper_bound, l_ineq) = J(x) + l_eq * C_eq(x) + l_lower_bound * (x-lb)^+ + l_upper_bound * (ub-x)^+ + l_ineq * C_ineq^+(x)
364    d/dx(L = d/dx(J) + l_eq * d/dx(C_eq) + l_lower_bound * d/dx(x-lb)^+ + l_upper_bound * d/dx(ub - x)^+ + l_ineq * d/dx(C_ineq^+)
365 
366    The Lagrange multipliers are stored as [l_eq, l_lower_bounds, l_upper_bounds, l_ineq], where:
367    * l_eq is of dimension 0 if no equality constraint, else of dimension the number of scalar equality constraints
368    * l_lower_bounds and l_upper_bounds are of dimension 0 if no bound constraint, else of dimension dim(x) for both of them
369    * l_ineq is of dimension 0 if no inequality constraint, else of dimension the number of scalar inequality constraints
370 
371    so if there is no constraint of any kind, the Lagrange multipliers are of dimension 0.
372  */
computeLagrangeMultipliers(const Point & x) const373 Point OptimizationResult::computeLagrangeMultipliers(const Point & x) const
374 {
375   const Scalar maximumConstraintError = ResourceMap::GetAsScalar("OptimizationAlgorithm-DefaultMaximumConstraintError");
376   const UnsignedInteger equalityDimension = problem_.getEqualityConstraint().getOutputDimension();
377   const UnsignedInteger inequalityDimension = problem_.getInequalityConstraint().getOutputDimension();
378   const UnsignedInteger boundDimension = problem_.getBounds().getDimension();
379   // If no constraint
380   if (equalityDimension + inequalityDimension + boundDimension == 0) return Point(0);
381   // Here we have to compute the Lagrange multipliers as the solution of a linear problem with rhs=[d/dx(C_eq) | d/dx(x-lb)^+ | d/dx(ub - x)^+ | d/dx(C_ineq^+)] and lhs=-d/dx(J)
382   const UnsignedInteger inputDimension = x.getDimension();
383   // Get the lhs as a Point
384   const Point lhs(Point(*problem_.getObjective().gradient(x).getImplementation()) * (-1.0));
385   // In order to ease the construction of the rhs matrix, we use its internal storage representation as a Point in column-major storage.
386   Point rhs(0);
387   // First, the equality constraints. Each scalar equality constraint gives a column in the rhs
388   if (equalityDimension > 0)
389     rhs.add(*problem_.getEqualityConstraint().gradient(x).getImplementation());
390   // Second, the bounds
391   if (boundDimension > 0)
392   {
393     // First the lower bounds
394     const Point lowerBounds(problem_.getBounds().getLowerBound());
395     for (UnsignedInteger i = 0; i < boundDimension; ++i)
396     {
397       Point boundGradient(inputDimension);
398       // Check if the current lower bound is active up to the tolerance
399       if (std::abs(x[i] - lowerBounds[i]) <= maximumConstraintError)
400         boundGradient[i] = 1.0;
401       rhs.add(boundGradient);
402     } // Lower bounds
403     // Second the upper bounds
404     const Point upperBounds(problem_.getBounds().getUpperBound());
405     for (UnsignedInteger i = 0; i < boundDimension; ++i)
406     {
407       Point boundGradient(inputDimension);
408       // Check if the current lower bound is active up to the tolerance
409       if (std::abs(upperBounds[i] - x[i]) <= maximumConstraintError)
410         boundGradient[i] = -1.0;
411       rhs.add(boundGradient);
412     } // Upper bounds
413   } // boundDimension > 0
414   // Third, the inequality constraints
415   if (inequalityDimension > 0)
416   {
417     Point inequality(problem_.getInequalityConstraint()(x));
418     Matrix gradientInequality(problem_.getInequalityConstraint().gradient(x));
419     for (UnsignedInteger i = 0; i < inequalityDimension; ++i)
420     {
421       // Check if the current inequality constraint is active up to the tolerance
422       if (std::abs(inequality[i]) <= maximumConstraintError)
423         rhs.add(*gradientInequality.getColumn(i).getImplementation());
424       else
425         rhs.add(Point(inputDimension));
426     }
427   } // Inequality constraints
428   return Matrix(inputDimension, rhs.getDimension() / inputDimension, rhs).solveLinearSystem(lhs, false);
429 }
430 
431 
computeLagrangeMultipliers() const432 Point OptimizationResult::computeLagrangeMultipliers() const
433 {
434   return computeLagrangeMultipliers(getOptimalPoint());
435 }
436 
437 END_NAMESPACE_OPENTURNS
438 
439