1 //                                               -*- C++ -*-
2 /**
3  *  @brief AbdoRackwitz is an actual implementation for
4  *         OptimizationAlgorithmImplementation using the AbdoRackwitz algorithm.
5  *
6  *  Copyright 2005-2021 Airbus-EDF-IMACS-ONERA-Phimeca
7  *
8  *  This library is free software: you can redistribute it and/or modify
9  *  it under the terms of the GNU Lesser General Public License as published by
10  *  the Free Software Foundation, either version 3 of the License, or
11  *  (at your option) any later version.
12  *
13  *  This library is distributed in the hope that it will be useful,
14  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
15  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16  *  GNU Lesser General Public License for more details.
17  *
18  *  You should have received a copy of the GNU Lesser General Public License
19  *  along with this library.  If not, see <http://www.gnu.org/licenses/>.
20  *
21  */
22 #include <cmath>
23 #include "openturns/AbdoRackwitz.hxx"
24 #include "openturns/Log.hxx"
25 #include "openturns/PersistentObjectFactory.hxx"
26 
27 BEGIN_NAMESPACE_OPENTURNS
28 
29 CLASSNAMEINIT(AbdoRackwitz)
30 
31 static const Factory<AbdoRackwitz> Factory_AbdoRackwitz;
32 
33 /* Default constructor */
AbdoRackwitz()34 AbdoRackwitz::AbdoRackwitz()
35   : OptimizationAlgorithmImplementation()
36   , tau_(ResourceMap::GetAsScalar("AbdoRackwitz-DefaultTau"))
37   , omega_(ResourceMap::GetAsScalar("AbdoRackwitz-DefaultOmega"))
38   , smooth_(ResourceMap::GetAsScalar("AbdoRackwitz-DefaultSmooth"))
39 {
40   initialize();
41 }
42 
AbdoRackwitz(const OptimizationProblem & problem,const Scalar tau,const Scalar omega,const Scalar smooth)43 AbdoRackwitz::AbdoRackwitz (const OptimizationProblem & problem,
44                             const Scalar tau,
45                             const Scalar omega,
46                             const Scalar smooth)
47   : OptimizationAlgorithmImplementation(problem)
48   , tau_(tau)
49   , omega_(omega)
50   , smooth_(smooth)
51 {
52   initialize();
53   checkProblem(problem);
54 }
55 
56 
AbdoRackwitz(const OptimizationProblem & problem)57 AbdoRackwitz::AbdoRackwitz(const OptimizationProblem & problem)
58   : OptimizationAlgorithmImplementation(problem)
59   , tau_(ResourceMap::GetAsScalar("AbdoRackwitz-DefaultTau"))
60   , omega_(ResourceMap::GetAsScalar("AbdoRackwitz-DefaultOmega"))
61   , smooth_(ResourceMap::GetAsScalar("AbdoRackwitz-DefaultSmooth"))
62 {
63   initialize();
64   checkProblem(problem);
65 }
66 
67 /* Virtual constructor */
clone() const68 AbdoRackwitz * AbdoRackwitz::clone() const
69 {
70   return new AbdoRackwitz(*this);
71 }
72 
initialize()73 void AbdoRackwitz::initialize()
74 {
75   currentSigma_ = 0.0;
76   currentLevelValue_ = 0.0;
77   currentLambda_ = 0.0;
78 }
79 
80 /* Check whether this problem can be solved by this solver.  Must be overloaded by the actual optimisation algorithm */
checkProblem(const OptimizationProblem & problem) const81 void AbdoRackwitz::checkProblem(const OptimizationProblem & problem) const
82 {
83   if (!problem.hasLevelFunction())
84     throw InvalidArgumentException(HERE) << "Error : " << this->getClassName() << " can only solve nearest-point optimization problems";
85   if (problem.hasMultipleObjective())
86     throw InvalidArgumentException(HERE) << "Error: " << this->getClassName() << " does not support multi-objective optimization ";
87   if (problem.hasBounds())
88     throw InvalidArgumentException(HERE) << "Error : " << this->getClassName() << " cannot solve bound-constrained optimization problems";
89   if (!problem.isContinuous())
90     throw InvalidArgumentException(HERE) << "Error: " << this->getClassName() << " does not support non continuous problems";
91 
92 }
93 
94 /* Line search for globalization of the algorithm */
computeLineSearch()95 Scalar AbdoRackwitz::computeLineSearch()
96 {
97   /* Logal copy of the level function and the level value */
98   const Function levelFunction(getProblem().getLevelFunction());
99   const Scalar levelValue = getProblem().getLevelValue();
100   /* Actualize sigma */
101   currentSigma_ = std::max(currentSigma_ + 1.0, smooth_ * currentPoint_.norm() / currentGradient_.norm());
102   /* Compute penalized scalar objective function at current point */
103   const Scalar currentTheta = 0.5 * currentPoint_.normSquare() + currentSigma_ * std::abs(currentLevelValue_ - levelValue);
104   /* Min bound for step */
105   const Scalar minStep = getMaximumAbsoluteError() / currentDirection_.norm();
106   /* Minimum decrease for the penalized objective function */
107   const Scalar levelIncrement = omega_ * currentDirection_.dot(currentPoint_ + (currentSigma_ * ((currentLevelValue_ > levelValue) ? 1.0 : -1.0)) * currentGradient_);
108   /* Initialization of the line search */
109   /* We start with step=1 */
110   Scalar step = 1.0;
111   Point currentStepPoint;
112   Scalar currentStepLevelValue = -1.0;
113   Scalar currentStepTheta = -1.0;
114   do
115   {
116     currentStepPoint = currentPoint_ + step * currentDirection_;
117     currentStepLevelValue = levelFunction(currentStepPoint)[0];
118 
119     currentStepTheta = 0.5 * currentStepPoint.normSquare() + currentSigma_ * std::abs(currentStepLevelValue - levelValue);
120     if (getVerbose()) LOGINFO(OSS() << "line search step=" << step << " currentStepPoint=" << currentStepPoint << " currentStepLevelValue=" << currentStepLevelValue << " currentStepTheta=" << currentStepTheta);
121     step *= tau_;
122   }
123   while ((step >= minStep) && ( currentStepTheta > currentTheta + step * levelIncrement));
124   currentPoint_ = currentStepPoint;
125   currentLevelValue_ = currentStepLevelValue;
126   /* We went one step beyond */
127   return step / tau_;
128 }
129 
130 
131 /* Performs the actual computation by using the AbdoRackwitz algorithm
132  */
run()133 void AbdoRackwitz::run()
134 {
135   initialize();
136 
137   /* Get a local copy of the level function */
138   const Function levelFunction(getProblem().getLevelFunction());
139   /* Get a local copy of the level value */
140   const Scalar levelValue = getProblem().getLevelValue();
141   /* Current point -> u */
142   currentPoint_ = getStartingPoint();
143   Bool exitLoop = false;
144   UnsignedInteger iterationNumber = 0;
145   const UnsignedInteger initialEvaluationNumber = levelFunction.getEvaluationCallsNumber();
146 
147   Scalar absoluteError = -1.0;
148   Scalar constraintError = -1.0;
149   Scalar relativeError = -1.0;
150   Scalar residualError = -1.0;
151 
152   /* Compute the level function at the current point -> G */
153   currentLevelValue_ = levelFunction(currentPoint_)[0];
154 
155   UnsignedInteger evaluationNumber = levelFunction.getEvaluationCallsNumber() - initialEvaluationNumber;
156 
157   // reset result
158   result_ = OptimizationResult(getProblem());
159   result_.store(currentPoint_, Point(1, currentLevelValue_), absoluteError, relativeError, residualError, constraintError);
160 
161   while ((!exitLoop) && (iterationNumber <= getMaximumIterationNumber()) && (evaluationNumber <= getMaximumEvaluationNumber()))
162   {
163     /* Go to next iteration */
164     ++ iterationNumber;
165 
166     /* Compute the level function gradient at the current point -> Grad(G) */
167     currentGradient_ = levelFunction.gradient(currentPoint_) * Point(1, 1.0);
168     if (getVerbose()) LOGINFO(OSS() << "current point=" << currentPoint_ << " current level value=" << currentLevelValue_ << " current gradient=" << currentGradient_);
169     /* Compute the current Lagrange multiplier */
170     const Scalar normGradientSquared = currentGradient_.normSquare();
171     /* In case of a null gradient, throw an internal exception */
172     if (!(normGradientSquared > 0))
173     {
174       throw InternalException(HERE) << "Error in Abdo Rackwitz algorithm: the gradient of the level function is zero at point u=" << currentPoint_;
175     }
176     /* Lambda = (G - <Grad(G), u>) / ||Grad(G)||^2 */
177     currentLambda_ = (currentLevelValue_ - levelValue - currentGradient_.dot(currentPoint_)) / normGradientSquared;
178     /* Compute the current direction Du = -Lambda Grad(G) - u */
179     /* Be careful! currentGradient_ is an n by 1 matrix, we must multiply it by a 1 by 1
180      * vector in order to get an n dimensional equivalente vector
181      */
182     currentDirection_ = -currentLambda_ * currentGradient_ - currentPoint_;
183     /* Perform a line search in the given direction */
184     const Scalar alpha = computeLineSearch();
185 
186     // update number of evaluations
187     evaluationNumber = levelFunction.getEvaluationCallsNumber() - initialEvaluationNumber;
188 
189     /* Check if convergence has been achieved */
190     absoluteError = std::abs(alpha) * currentDirection_.norm();
191     constraintError = std::abs(currentLevelValue_ - levelValue);
192     const Scalar pointNorm = currentPoint_.norm();
193     if (pointNorm > 0.0)
194     {
195       relativeError = absoluteError / pointNorm;
196     }
197     else
198     {
199       relativeError = -1.0;
200     }
201     residualError = (currentPoint_ + currentLambda_ * currentGradient_).norm();
202     exitLoop = ((absoluteError < getMaximumAbsoluteError()) && (relativeError < getMaximumRelativeError())) || ((residualError < getMaximumResidualError()) && (constraintError < getMaximumConstraintError()));
203 
204     // update result
205     result_.setEvaluationNumber(evaluationNumber);
206     result_.setIterationNumber(iterationNumber);
207     result_.store(currentPoint_, Point(1, currentLevelValue_), absoluteError, relativeError, residualError, constraintError);
208 
209     LOGINFO(getResult().__repr__());
210 
211     // callbacks
212     if (progressCallback_.first)
213     {
214       progressCallback_.first((100.0 * evaluationNumber) / getMaximumEvaluationNumber(), progressCallback_.second);
215     }
216     if (stopCallback_.first)
217     {
218       Bool stop = stopCallback_.first(stopCallback_.second);
219       if (stop)
220       {
221         exitLoop = true;
222         LOGWARN(OSS() << "AbdoRackwitz was stopped by user");
223       }
224     }
225   }
226 
227   /* Check if we converged */
228   if (!exitLoop)
229   {
230     LOGWARN(OSS() << "Warning! The AbdoRackwitz algorithm failed to converge after " << iterationNumber << " iterations, " << evaluationNumber << " evaluations." );
231   }
232 } // run()
233 
234 
235 /* Tau accessor */
getTau() const236 Scalar AbdoRackwitz::getTau() const
237 {
238   return tau_;
239 }
240 
setTau(const Scalar tau)241 void AbdoRackwitz::setTau(const Scalar tau)
242 {
243   tau_ = tau;
244 }
245 
246 /* Omega accessor */
getOmega() const247 Scalar AbdoRackwitz::getOmega() const
248 {
249   return omega_;
250 }
251 
setOmega(const Scalar omega)252 void AbdoRackwitz::setOmega(const Scalar omega)
253 {
254   omega_ = omega;
255 }
256 
257 /* Smooth accessor */
getSmooth() const258 Scalar AbdoRackwitz::getSmooth() const
259 {
260   return smooth_;
261 }
262 
setSmooth(const Scalar smooth)263 void AbdoRackwitz::setSmooth(const Scalar smooth)
264 {
265   smooth_ = smooth;
266 }
267 
268 /* String converter */
__repr__() const269 String AbdoRackwitz::__repr__() const
270 {
271   OSS oss;
272   oss << "class=" << AbdoRackwitz::GetClassName()
273       << " " << OptimizationAlgorithmImplementation::__repr__()
274       << " tau=" << tau_
275       << " omega=" << omega_
276       << " smooth=" << smooth_;
277   return oss;
278 }
279 
280 /* Method save() stores the object through the StorageManager */
save(Advocate & adv) const281 void AbdoRackwitz::save(Advocate & adv) const
282 {
283   OptimizationAlgorithmImplementation::save(adv);
284   adv.saveAttribute("tau_", tau_);
285   adv.saveAttribute("omega_", omega_);
286   adv.saveAttribute("smooth_", smooth_);
287   adv.saveAttribute("currentSigma_", currentSigma_);
288   adv.saveAttribute("currentPoint_", currentPoint_);
289   adv.saveAttribute("currentDirection_", currentDirection_);
290   adv.saveAttribute("currentLevelValue_", currentLevelValue_);
291   adv.saveAttribute("currentGradient_", currentGradient_);
292   adv.saveAttribute("currentLambda_", currentLambda_);
293 }
294 
295 /* Method load() reloads the object from the StorageManager */
load(Advocate & adv)296 void AbdoRackwitz::load(Advocate & adv)
297 {
298   OptimizationAlgorithmImplementation::load(adv);
299   adv.loadAttribute("tau_", tau_);
300   adv.loadAttribute("omega_", omega_);
301   adv.loadAttribute("smooth_", smooth_);
302   adv.loadAttribute("currentSigma_", currentSigma_);
303   adv.loadAttribute("currentPoint_", currentPoint_);
304   adv.loadAttribute("currentDirection_", currentDirection_);
305   adv.loadAttribute("currentLevelValue_", currentLevelValue_);
306   adv.loadAttribute("currentLambda_", currentLambda_);
307 }
308 
309 END_NAMESPACE_OPENTURNS
310