1 #ifndef OptGSS_h
2 #define OptGSS_h
3 
4 /*----------------------------------------------------------------------
5   Copyright (c) 2003
6  ----------------------------------------------------------------------*/
7 
8  /**
9   *
10   * @author R.A.Oliva (raoliva@lbl.gov) Lawrence Berkely National Laboratories.
11   *
12   */
13 
14 #ifdef HAVE_CONFIG_H
15 #include "OPT++_config.h"
16 #endif
17 
18 #ifdef WITH_MPI
19 #include "mpi.h"
20 #endif
21 
22 #include "OptDirect.h"
23 #include "GenSet.h"
24 #include "Opt_PARAMS.h"
25 
26 using std::cerr;
27 
28 namespace OPTPP {
29 
30 class OptGSS : public OptDirect {
31 
32 protected:
33 
34   NLP0* nlp;
35   ///< Pointer to an NLP0 object
36 
37   NLP1* nlp1;
38   ///< Pointer to an NLP1 object
39 
40   NEWMAT::ColumnVector X;
41   ///< Best (current) point in search
42 
43   double fX;
44   ///< Value of objective function at X
45 
46   NEWMAT::ColumnVector gX;
47   ///< Value of gradient at X (when available)
48 
49   double fprev;
50   ///< stores previous best value
51 
52   double Delta;
53   ///< Step-length parameter
54 
55   double Phi;
56   ///< Steplength expanding parameter (>=1)
57 
58   double Theta;
59   ///< Steplength contracting parameter (0<Theta<ThetaMax<1)
60 
61   double Delta_tol;
62   ///< Convergence tolerance. Algoritms stops when Theta <= Theta_tol
63 
64   int Iter_max;
65   ///< Upper limit on the number of iterations
66 
67   bool SearchAll;
68   ///< search type flag (true ==> search on all direction) -- now defunct
69 
70   bool computeGrad;
71   ///< flag to compute gradient after 1st iteration. Used by trustGSS.
72 
73   GenSetBase* gset;
74   ///< Pointer to an Generating Set object
75 
76   NEWMAT::Matrix extras;
77   ///< Extra search directions
78 
79   bool extras_srched;
80   ///< True when extra directions have been searched
81 
82   bool printCOPYRIGHT;
83   ///< if true copyright header is printed before optimization
84 
85   void printHeader();
86   ///< Prints header of output file before optimization begins.
87 
88   bool printXiter;
89   ///< flag for printing (up to 3) components of X during iterations
90 
91   bool printGiter;
92   ///< flag for printing (up to 3) components of gX during iterations
93 
94   int mpi_rank; // also used in serial code
95 
96 #ifdef WITH_MPI
97   int mpi_size;
98 
setpid()99   void setpid() {    // Determine MPI size and rank
100     //
101     MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);   // C style
102     MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
103     //
104     //    mpi_size = Comm::Get_size();          // C++ style
105     //    mpi_rank = Comm::Get_rank();
106   }
107 #endif
108 
109 public:
110 
111   void setParams();
112   ///< Assign default values to algorithm parameters
113 
114   void setParams(OptGSS_params op);
115   ///< Assign values to parameters as specified in argument
116 
117   //--
118   // Constructors
119   //--
120 
OptGSS()121   OptGSS() : nlp(0), nlp1(0), gset(0)
122     { strcpy(method, "Generating Set Search"); setParams(); }
123 
124   /**
125    * @param p nonlinear problem object
126    * @param g generating search object
127    */
OptGSS(NLP0 * p,GenSetBase * g)128   OptGSS(NLP0* p, GenSetBase* g) : nlp(p), nlp1(0), gset(g)
129     { strcpy(method, "Generating Set Search with an NLP0"); setParams(); }
130 
131   /**
132    * @param p nonlinear problem object
133    * @param g generating search object
134    */
OptGSS(NLP1 * p,GenSetBase * g)135   OptGSS(NLP1* p, GenSetBase* g) : nlp(p), nlp1(p), gset(g)
136     { strcpy(method, "Generating Set Search with an NLP1"); setParams(); }
137 
138   /**
139    * @param p nonlinear problem object
140    * @param g generating search object
141    * @param M matrix with extra search directions
142    */
OptGSS(NLP0 * p,GenSetBase * g,NEWMAT::Matrix & M)143   OptGSS(NLP0* p, GenSetBase* g, NEWMAT::Matrix& M) :
144     nlp(p), nlp1(0), gset(g), extras(M)
145     {
146       strcpy(method, "Generating Set Search with an NLP0 & extra directions");
147       setParams();
148     }
149 
150   /**
151    * @param p nonlinear problem object
152    * @param g generating search object
153    * @param M matrix with extra search directions
154    */
OptGSS(NLP1 * p,GenSetBase * g,NEWMAT::Matrix & M)155   OptGSS(NLP1* p, GenSetBase* g, NEWMAT::Matrix& M) :
156     nlp(p), nlp1(p), Delta(0.0), computeGrad(true), gset(g), extras(M) {
157     strcpy(method, "Generating Set Search with an NLP1 & extra directions");
158     setParams();
159   }
160 
161   // Constructors with options passed in
162 
163   /**
164    * @param p nonlinear problem object
165    * @param g generating search object
166    * @param op GSS algorithmic options
167    */
OptGSS(NLP0 * p,GenSetBase * g,OptGSS_params op)168   OptGSS(NLP0* p, GenSetBase* g, OptGSS_params op) : nlp(p), nlp1(0), gset(g)
169     { strcpy(method, "Generating Set Search with an NLP0"); setParams(op); }
170 
171   /**
172    * @param p nonlinear problem object
173    * @param g generating search object
174    * @param op GSS algorithmic options
175    */
OptGSS(NLP1 * p,GenSetBase * g,OptGSS_params op)176   OptGSS(NLP1* p, GenSetBase* g, OptGSS_params op) : nlp(p), nlp1(p), gset(g)
177     { strcpy(method, "Generating Set Search with an NLP1"); setParams(op); }
178 
179   /**
180    * @param p nonlinear problem object
181    * @param g generating search object
182    * @param M matrix with extra search directions
183    * @param op GSS algorithmic options
184    */
OptGSS(NLP0 * p,GenSetBase * g,NEWMAT::Matrix & M,OptGSS_params op)185   OptGSS(NLP0* p, GenSetBase* g, NEWMAT::Matrix& M, OptGSS_params op) :
186     nlp(p), nlp1(0), gset(g), extras(M)
187     {
188       strcpy(method, "Generating Set Search with an NLP0 & extra directions");
189       setParams(op);
190     }
191 
192   /**
193    * @param p nonlinear problem object
194    * @param g generating search object
195    * @param M matrix with extra search directions
196    * @param op GSS algorithmic options
197    */
OptGSS(NLP1 * p,GenSetBase * g,NEWMAT::Matrix & M,OptGSS_params op)198   OptGSS(NLP1* p, GenSetBase* g, NEWMAT::Matrix& M, OptGSS_params op) :
199     nlp(p), nlp1(p), Delta(0.0), computeGrad(true), gset(g), extras(M) {
200     strcpy(method, "Generating Set Search with an NLP1 & extra directions");
201     setParams(op);
202   }
203 
204   //--
205   // Destructor(s)
206   //--
~OptGSS()207   ~OptGSS(){;}
208 
209   //--
210   // Attribute access methods
211   //--
212 
213   /**
214    * Let the user set the step size
215    */
setStepSize(double s)216   void setStepSize(double s) {Delta = s;}
217 
218   /**
219    * Let the user set the step tolerance
220    */
setStepTol(double s)221   void setStepTol(double s) {Delta_tol = s;}
222 
223   /**
224    * Let the user set the step increment parameter
225    */
setStepInc(double s)226   void setStepInc(double s) {
227     if (s>1) {Phi = s; return;}
228     cerr << "Step increment factor must exceed 1.0\n";
229   }
230 
231   /**
232    * Let the user set the step decrement parameter
233    */
setStepDec(double s)234   void setStepDec(double s) {
235     if (0<s && s<1) {Theta = s; return;}
236     cerr << "Step decrement factor must be in interval (0,1)\n";
237   }
238 
239   /**
240    * Let the user set the max number of iterations
241    */
setMaxIter(int s)242   void setMaxIter(int s) { Iter_max = s;}
243 
244   /**
245    * Let the user set the search strategy
246    */
setFullSearch(bool s)247   void setFullSearch(bool s) { SearchAll = s;}
248 
extras_searched()249   bool extras_searched() { return extras_srched; }
250 
setPrintX(bool s)251   void setPrintX(bool s) {printXiter = s;}
setPrintG(bool s)252   void setPrintG(bool s) {printGiter = s;}
printCopyright(bool doit)253   void printCopyright(bool doit) { printCOPYRIGHT = doit; }
254 
setComputeGrad(bool s)255   void setComputeGrad(bool s) {computeGrad = s;}
256 
257 
258   //--
259   // Our internal Optimization methods
260   //--
261 
262   /**
263    * Default Step Increment Method
264    */
expandStep()265   void expandStep() { Delta *= Phi;}
266 
267   /**
268    * Default Step Decrement Method
269    */
contractStep()270   void contractStep() { Delta *= Theta; }
271 
272   /**
273    * Default update of X and FX with new values
274    */
updateX(double & newfX,NEWMAT::ColumnVector & newX)275   void updateX(double& newfX, NEWMAT::ColumnVector& newX) {
276     fX = newfX;
277     X  << newX;
278   }
279 
280   /**
281    * Default Stopping criterion
282    */
StopCondition()283   bool StopCondition() { return Delta_tol > Delta;}
284   int  StepCondition();
285 
286   /**
287    * Search for improved point;
288    */
289   int search();
290   //  int search(bool flag=false);
291   //  int searchExtras() { return search(true);  }
292   //  int searchGenSet() { return search(false); }
293 
294 
295   /**
296    * Reset parameter values
297    */
298   virtual void reset();
299 
300   //--
301   // Optimization class methods
302   //--
303 
304   // -- these are virtual in Optimizeclass
305   virtual void initOpt();
306   virtual void optimize();
307   virtual int checkConvg();
308   int checkConvg_fcn();
309   int checkConvg_grad();
310 
311 #ifdef DAKOTA_OPTPP
printStatus(char * msg)312   virtual void printStatus(char * msg) { printStatus(msg, true);}
313 #else
printStatus(char * msg)314   virtual void printStatus(char * msg) { printStatus(msg, false);}
315   /// Print the status of optimizer at Xc, but not Xc
316 #endif
317 
318   void printStatus(char * msg, bool printXc);
319   /// Print the status of optimizer at Xc, and Xc if 2n arg==true
320 
321   //--
322   // Define unused virtal methods from base class (OptimizeClass)
323   //--
computeSearch(NEWMAT::SymmetricMatrix &)324   NEWMAT::ColumnVector computeSearch(NEWMAT::SymmetricMatrix& )
325     {return NEWMAT::ColumnVector();}
326 
acceptStep(int k,int step_type)327   virtual void acceptStep(int k, int step_type)
328     {OptimizeClass::defaultAcceptStep(k, step_type);}
329 
updateModel(int k,int ndim,NEWMAT::ColumnVector x)330   virtual void updateModel(int k, int ndim, NEWMAT::ColumnVector x)
331     {OptimizeClass::defaultUpdateModel(k, ndim, x);}
332 
readOptInput()333   virtual void readOptInput() {;}
334 
335   void printIter(int iter, int bp);
336 
337 #ifdef WITH_MPI
getMPIRank()338   int getMPIRank() { return mpi_rank; }
getMPISize()339   int getMPISize() { return mpi_size; }
340   //  int pll_search();
341 #endif
342 
343 };
344 
345 } // namespace OPTPP
346 #endif
347