1 //------------------------------------------------------------------------
2 // Copyright (C) 2003:
3 // R.A.Oliva, Lawrence Berkeley National Laboratory.
4 // raoliva@lbl.gov
5 //------------------------------------------------------------------------
6 
7 #ifdef HAVE_CONFIG_H
8 #include "OPT++_config.h"
9 #endif
10 
11 #ifdef HAVE_STD
12 #include <cstring>
13 #include <ctime>
14 #else
15 #include <string.h>
16 #include <time.h>
17 #endif
18 
19 #include "OptGSS.h"
20 #include "NLF.h"
21 #include "ioformat.h"
22 
23 using namespace std;
24 using Teuchos::SerialDenseVector;
25 
26 namespace OPTPP {
27 
setParams()28 void OptGSS::setParams() {
29   // use defaults parameters
30   OptGSS_params op;
31   setParams(op);
32 }
setParams(OptGSS_params op)33 void OptGSS::setParams(OptGSS_params op) {
34   // use the parameters passed as arguments
35   Delta     = op.Delta;
36   Delta_tol = op.Delta_tol;
37   Phi       = op.Phi;
38   Theta     = op.Theta;
39   Iter_max  = op.Iter_max;
40   printCOPYRIGHT = op.printCOPYRIGHT;
41   printXiter  = op.printXiter;
42   printGiter  = op.printGiter;
43 
44   // default non-user parameters:
45   computeGrad = ((nlp1)? true : false);
46   mpi_rank = 0;
47 #ifdef OPTPP_HAVE_MPI
48   setpid();
49 #endif
50 }
51 
reset()52 void OptGSS::reset()
53 {
54    int n = nlp->getDim();
55    nlp->reset();
56    OptimizeClass::defaultReset(n);
57    setParams();
58    // Still to do - double check parameters in the genSet class.
59 }
60 
initOpt()61 void OptGSS::initOpt()
62 {
63   if (debug_)
64     nlp->setDebug();
65 
66   if(nlp->hasConstraints()){
67     cerr << "Error: OptGSS does not support bound, linear, or nonlinear "
68          << "constraints.\n       Please select a different method for "
69          << "constrained problems." << endl;
70     abort_handler(-1);
71   }
72 
73   bool debug = nlp->getDebug();
74 
75   extras_srched = false; // before optimize()
76 
77   nlp->initFcn();  // sets initial point
78 
79   if (nlp1)
80     nlp1->eval();
81   else
82     nlp->eval();
83 
84   if (debug) {
85     *optout << "NLP Initialized in OptGSS::initOpt()\n";
86     if (nlp1)
87       *optout << "GSS::initOpt() - NLP1 eval()\n";
88     else
89       *optout << "GSS::initOpt() - NLP0 eval()\n";
90     optout->flush();
91 
92   } // END isInit==false
93 
94   //--
95   // Initialize X, fX, gX (if available), and genset.
96   //--
97   X = nlp->getXc();
98 
99   if (nlp1) {
100     gX = nlp1->getGrad();
101     gset->init(gX);
102   }
103   else {
104     gset->init();
105   }
106 
107   fX = nlp->getF();
108   fprev = fX;
109 
110   //--
111   // init step-size if not set by user
112   //--
113   if (Delta==0.0) {
114 
115     // set to max(|x_i|) ....
116     for (int i=0; i<X.length(); i++) {
117       double xi = fabs(X(i));
118       if (xi > Delta) Delta = xi;
119     }
120 
121     // ... or to 1 if all |x_i| == 0
122     if (Delta==0.0) {
123       Delta = 1.0;
124     }
125 
126   } // END if (Delta==0)
127 
128   //--
129   // Print out header with start time and "0th-iteration" info
130   //--
131   printHeader();
132   printIter(0,0);
133 
134 }
135 
optimize()136 void OptGSS::optimize()
137 //------------------------------------------------------------------------
138 // Generating Set Search.
139 //
140 // Solves the unconstrained minimization problem
141 //
142 //          min F(x),    x = (x_1, x_2, ..., x_N)
143 //
144 // using the Generating Set Search Algorithm.
145 //
146 // References:
147 //
148 // T. Kolda and V. Torczon, Optimization by Direct Search: New Perspectives
149 // on Some Classical and Modern Methods, SIAM J. of Optimization. 2004.
150 //
151 // Description of the algorithm
152 //
153 // 1. On each iteration, evaluate f on the search-space.
154 // 2. If a function decrease is found, advance to that point
155 //    and (optionally) increase the step-size;
156 //    Otherwise, decrease the step size.
157 //
158 // 3. Stop iterations if step-size becomes too small or
159 //    either function or gradient convergence tolarances are met.
160 //    Else, do updates and iterate.
161 //
162 // Notes:
163 //
164 // (1)
165 // The search-space consists of points generated by the
166 // generating set, pruned by the gradient when available,
167 // plus an optional matrix of extra search points (in columns).
168 //
169 // (2)
170 // Each iteration changes either the step size
171 // or the current point, but not both, so we split checkConvg()
172 // into its 3 componenets: step-tol, fcn-tol, gradnorm-tol.
173 //
174 // (3)
175 // The update of the gradient will be skipped when
176 //   * there is only one iteration,
177 //               --AND--
178 //   * the computeGrad flag is false.
179 // This is to save evaluating the gradient when it might
180 // not be used by the calling routine (i.e. trustGSS).
181 // WARNING:
182 //   In this case, the grad in the NLP1 may not correspond to the
183 //   current point xc when optimize() terminates; the calling
184 //   program must do the gradient update.
185 //
186 { // optimize BEGIN
187 
188   //--
189 
190   SpecOption SpecTmp = nlp->getSpecOption();
191   nlp->setSpecOption(NoSpec);
192 
193   initOpt();
194 
195   bool done = StepCondition(); // should be false initially
196   if (done) {
197     *optout << "!!! Step tolerance met "
198 	    << "before iterations begin !!!\n";
199     cerr   << "Warning: step tolerance met "
200 	    << "before iterations begin!\n*******\n";
201     ret_code = 1;
202     setReturnCode(ret_code);
203   }
204   else
205     ret_code = 0;
206 
207   int iter = 0;
208   int bestid = -1;
209 
210   while (!done) {
211 
212     ++iter;
213 
214     //--
215     // search for a better point on {gen set mesh} union {extra search pts}
216     // update current point if we find one.
217     bestid = search();
218 
219     //--
220     // update step size
221     //--
222     if (bestid==-1) { // search failed
223 
224       contractStep();
225 
226     }
227     else if (bestid <= gset->size()) {   // advanced to a mesh point";
228 
229       expandStep();
230 
231     }
232 
233     // print iteration data, pass best point
234     printIter(iter, bestid);
235 
236     // if search failed, check reduced step > min step
237     if (bestid == -1)
238 
239       ret_code = StepCondition();
240 
241     else { // search succeeded ==> have a new point
242 
243       // check convergence on fcn value if we are iterating
244       if (Iter_max > 1)
245 	ret_code = checkConvg_fcn();
246 
247       // gradient update -- see note (2) above.
248       if (nlp1)
249 	if (Iter_max > 1 || computeGrad==true) {
250 	  nlp1->evalG();
251 	  gX = nlp1->getGrad();
252 
253 	  if (ret_code == 0)
254 	    ret_code = checkConvg_grad();
255 	}
256 
257     } //
258 
259     // check iteration condition
260     done = (ret_code || iter == Iter_max);
261 
262     if (!done) { // update: compute D_k = G_k union H_k
263       if (nlp1 && gset->prunes() )
264 	gset->update(gX);
265       else
266 	gset->update();
267     }
268 
269   } // END main loop
270 
271   iter_taken = iter;
272   if (ret_code ==0) { // no conv. criteria was met
273     ret_code = -4;
274     setReturnCode(ret_code);
275     std::strcpy(mesg,"Algorithm terminated - Number of iterations exceeds the specified limit ");
276   }
277 
278   nlp->setSpecOption(SpecTmp);
279 
280 } // END optimize() ____________________________________
281 
282 
283 
checkConvg()284 int OptGSS::checkConvg() {
285   // all convergence tests - currently not used.
286   int rc;
287   if ( rc = StepCondition() ) return rc;
288   if ( rc = checkConvg_fcn() ) return rc;
289   if ( rc = checkConvg_grad() ) return rc;
290   return 0;
291 }
292 
293 //--
294 // Check convergence wrt step size
StepCondition()295 int OptGSS::StepCondition() {
296 
297   if ( Delta > Delta_tol ) return 0;
298 
299   std::strcpy(mesg,"Algorithm converged - Norm of last step is less than step tolerance");
300   if (mpi_rank==0) {
301     *optout << "             \tSteplength = " << e(Delta,12,4)
302 	    << " Steplength Tolerance: " << e(Delta_tol,12,4) << endl;
303   }
304   setReturnCode(1);
305   return 1;
306 
307 }
308 
309 //--
310 // Checks convergence of function value
checkConvg_fcn()311 int OptGSS::checkConvg_fcn() {
312 
313   double ftol = tol.getFTol();
314   double fvalue = fX;
315   double rftol = ftol*max(1.0,fabs(fprev));
316   double deltaf = fprev - fvalue;
317 
318   if (deltaf <= rftol) {
319     std::strcpy(mesg,"Algorithm converged - Difference of successive fcn values is less than fcn tolerance");
320     if (mpi_rank==0) {
321       *optout << "checkConvg():\tdeltaf = " << e(deltaf,12,4)
322 	      << "  ftol = " << e(ftol,12,4) << "\n";
323     }
324     setReturnCode(2);
325     return 2;
326   }
327   return 0;
328 }
329 
330 //--
331 // Checks convergence wrt norm of the gradient
checkConvg_grad()332 int OptGSS::checkConvg_grad()
333 {
334   if (!nlp1) return 0; // no gradient
335 
336   // Test 3. gradient tolerance
337 
338   //  SerialDenseVector<int,double> grad(nlp1->getGrad());
339   double gtol = tol.getGTol();
340   double rgtol = gtol*max(1.0,fabs(fX));
341   //double gnorm = Norm2(gX);
342   double gnorm = std::sqrt(gX.dot(gX));
343   if (gnorm <= rgtol) {
344     std::strcpy(mesg,"Algorithm converged - Norm of gradient is less than gradient tolerance");
345     if (mpi_rank==0) {
346       *optout << "checkConvg():\tgnorm = " << e(gnorm,12,4)
347 	      << "  gtol = " << e(rgtol, 12,4) << "\n";
348     }
349     setReturnCode(3);
350     return 3;
351   }
352 
353   // Test 4. absolute gradient tolerance
354 
355   if (gnorm <= gtol) {
356     std::strcpy(mesg,"Algorithm converged - Norm of gradient is less than gradient tolerance");
357     if (mpi_rank==0) {
358       *optout << "checkConvg: gnorm = " << e(gnorm,12,4)
359 	      << " gtol = " << e(gtol, 12,4) << "\n";
360     }
361     setReturnCode(4);
362     return 4;
363   }
364 
365   return 0; // Nothing to report
366 }
367 
368 // --
369 // Search Method: Parallel and Serial, For GenSet AND Extras
370 //--
search()371 int OptGSS::search() {
372 
373   bool debug = nlp->getDebug();
374 
375   // size of search space:
376   int searchsize;
377   searchsize = gset->nActive() + extras.numCols();
378 
379   if (searchsize == 0) {
380     if (debug) *optout << "*-*-* empty search set! -- nothing done\n";
381     return -1;
382   }
383 
384   // the size of the active gen-set mesh
385   int gssz = gset->nActive();
386 
387   // vars to keep best point, its index and its fval
388   SerialDenseVector<int,double> bestx(X);
389   int          besti = -1;
390   double       bestf = fX;
391 
392   // vars to keep trial point and its fval
393   SerialDenseVector<int,double> xi(X);
394   double       fi;
395 
396 #ifdef OPTPP_HAVE_MPI
397 
398   double pll_bestf;
399   int    pll_besti;
400 
401   // [imin,imax] <==> this processor's set of directions
402 
403   int imin = searchsize * mpi_rank / mpi_size + 1;
404   int imax = searchsize * (mpi_rank+1) / mpi_size;
405 
406 #else
407 
408   // [imin,imax] <==> all search directions
409   int imin = 1;
410   int imax = searchsize;
411 
412 #endif
413 
414   if (debug) {
415 
416     int gssz = gset->nActive();
417     if (imax <= gssz)
418       *optout << "Searching genset " << imin << " to " << imax << endl;
419     else if (imin > gssz)
420       *optout << "Searching extras " << imin-gssz << " to " << imax-gssz << endl;
421     else
422       *optout << "Searching genset " << imin << " to " << gssz
423 	      << " and extras 1 to " << imax - gssz << endl;
424 
425     optout->flush();
426   }
427 
428   //--
429   // Search loop
430   //--
431 
432   for (int i = imin; i <= imax;  i++) {
433 
434     if (i<=gssz)
435       gset->generateActive(i-1, Delta, X, xi); // sets xi = X + Delta * Act_i
436     else{
437       for(int j=0;j<extras.numRows();j++)
438 	{xi(j) = extras(j,i-gssz-1);}
439       // xi = extras.Column(i-gssz);
440     }
441 
442     fi = nlp->evalF(xi);
443 
444     if (fi < bestf) {  // (fi < fX - forcingfun()) {
445       besti = i-1;
446       bestf = fi;
447       bestx = xi;
448     }
449 
450   } // END for
451 
452 
453 #ifdef OPTPP_HAVE_MPI
454   // MIN-reduce local best to global best among all processors
455   MPI_Allreduce(&bestf, &pll_bestf, 1, MPI_DOUBLE, MPI_MIN, MPI_COMM_WORLD);
456   if (debug) {
457     *optout << "First MPI_Allreduce() completed\n";
458     *optout << "-- Current fX   = " << fX << endl;
459     *optout << "-- local  bestf = " << bestf << endl;
460     *optout << "-- global bestf = " << pll_bestf << endl;
461     optout->flush();
462   }
463 
464   // if there is no better point than current pt, all process are done
465   if (pll_bestf == fX) { return -1; }
466 
467   // if we dont hold the best point unset besti
468   if (pll_bestf != bestf)  besti = -1;
469 
470   // MAX-reduce best i to catch the global best
471   MPI_Allreduce(&besti, &pll_besti, 1, MPI_INT, MPI_MAX, MPI_COMM_WORLD);
472   if (debug) {
473     *optout << "Second MPI_Allreduce() completed\n";
474     *optout << "-- local besti  = " << besti << endl;
475     *optout << "-- global besti = " << pll_besti << endl;
476     optout->flush();
477   }
478 
479   // Update X, fX, ...
480 
481   besti  = pll_besti;
482   bestf  = pll_bestf;
483   if (pll_besti > gssz)
484     {
485       // bestx = extras.Column(pll_besti-gssz);
486       for(int j=1;j<=bestx.length();j++)
487 	{bestx(j) = extras(j,pll_besti-gssz);}
488     }
489   else {
490     gset->generateActive(pll_besti, Delta, X, bestx);
491   }
492 
493 #else
494 
495   if (besti == -1) {  // search failed to find better pt
496 
497     if (debug)
498       *optout << "search() done. No improved pt found.\n";
499       //  *optout << "search(" << flag << ") done. No improved pt found.\n";
500 
501     return (-1);
502   }
503 
504 #endif
505 
506   if (debug) {
507     if (besti==-1)
508       *optout << "!!! GSS::search() error: besti is zero past return condition.\n";
509       //      *optout << "!!! GSS::search(" << flag
510       //	   << ") error: besti is zero past return condition.\n";
511   }
512 
513   if (debug && besti<=gssz) {
514     int acid = gset->activeID(besti);
515     if ( acid==0 || acid > gset->size() )
516       *optout << "!!! GSS:search() error: Invalid gset->ActiveID(" << besti << "):  " << acid << "\n";
517   }
518 
519   SerialDenseVector<int,double> xc(X);
520   xc = nlp->getXc();
521 
522   if (debug) {
523     if (besti<=gssz) {
524       gset->generateActive(besti, Delta, X, xi);
525       if (bestx != xi ) {
526 	*optout << "!!! GSS search() : gset->Active(" << besti << ") != bestx ***\n";
527       }
528     }
529     else {
530       // xi = extras.Column(besti-gssz);
531       for(int j=0;j<xi.length();j++)
532 	{xi(j) = extras(j,besti-gssz-1);}
533       if (bestx != xi ) {
534 	*optout << "!!! GSS search() : extras(:," << besti-gssz << ") != bestx ***\n";
535       }
536     }
537   }
538 
539 
540   // Search succeeded, so do update:
541   fprev = fX;
542   X     = bestx;
543   fX    = bestf;
544   nlp->setX(X);
545   nlp->setF(fX);
546 
547   // return actual idx of best dir in ordering [genset | extras]
548   int idx;
549   if (besti<=gssz)
550     idx = gset->activeID(besti);
551   else
552     idx = besti-gssz + gset->size();
553 
554   if (debug)
555     *optout << "search() done. Best pt idx = " << idx << endl;
556     //    *optout << "search(" << flag << ") done. Best pt idx = " << idx << endl;
557 
558   return idx;
559 
560 } // END search()
561 
562 
printStatus(char * s,bool printSoln)563 void OptGSS::printStatus(char *s, bool printSoln) // set Message
564 {
565   *optout << "\n\n=========  " << s << "  ===========\n\n";
566   *optout << "Optimization method       = " << method << "\n";
567   *optout << "Dimension of the problem  = " << nlp->getDim() << "\n";
568   *optout << "Return code               = " << ret_code << " ("
569        << mesg << ")\n";
570   *optout << "No. iterations taken      = " << iter_taken  << "\n";
571   *optout << "No. iterations allowed    = " << Iter_max    << "\n";
572   *optout << "No. function evaluations  = " << nlp->getFevals() << "\n";
573   *optout << "Last step length          = " << Delta << "\n";
574   *optout << "Last function value       = " << nlp->getF() << "\n";
575   *optout << "Norm of last point        = " << std::sqrt((nlp->getXc()).dot(nlp->getXc())) << "\n";
576   if (nlp1)
577     *optout << "Norm of last gradient     = " << std::sqrt((nlp1->getGrad()).dot(nlp1->getGrad())) << "\n";
578 
579 
580   if (printSoln) {
581     *optout << "\n\n=========  " << "Solution"  << "  ===========\n\n";
582     *optout << "   i   \t" << "x" << endl;
583     for (int i=0; i<gset->vdim(); i++)
584       *optout << d(i,5) << "\t" << e(X(i),12,4) << endl;
585     *optout << "\n\n";
586   }
587 
588   tol.printTol(optout);
589 }
590 
591 //--
592 // Prints iteration information to output file
593 //
printIter(int iter,int imp)594 void OptGSS::printIter(int iter, int imp) {
595 
596   *optout << d(iter,5) << " " << e(fX,12,4) << "\t"
597 	  << e(Delta,12,4);
598   if (nlp1) {
599     *optout << "\t" << e(std::sqrt(gX.dot(gX)),4);
600   }
601 
602   int nsearched = 0;
603   if (iter==1) nsearched = gset->nActive() + extras.numCols();
604   *optout << "\t" << d(nsearched,5);
605 
606   *optout << "\t" << d(imp,5)
607 	  << "\t" << d(nlp->getFevals(),8);
608 
609   if (printXiter) {
610     // print first 3 components of X
611     *optout << "\t";
612     int k = nlp->getDim();
613     if (k>3) k=3;
614     for (int i=0; i<k; i++)
615       *optout << f(X(i),8,4) << " ";
616   }
617 
618   if (printGiter && nlp1) {
619   // print first 3 components of gX
620     *optout << "\t";
621     int k = nlp->getDim();
622     if (k>3) k=3;
623     for (int i=0; i<k; i++)
624       *optout << f(gX(i),8,4) << " ";
625   }
626 
627   // finish output line;
628   *optout << endl;
629 }
630 
631 
632 //--
633 // Prints output header before iterations begin
printHeader()634 void OptGSS::printHeader() {
635 
636   //  time_t t;
637   //  char *c;
638 
639   //  t = time(NULL);
640   //  t = time(0);
641   //  c = asctime(localtime(&t));
642 
643   if (printCOPYRIGHT) {
644     *optout << "************************************************************\n";
645     *optout << "OPT++ version " << OPT_GLOBALS::OPT_VERSION << "\n";
646     //    *optout << "Job run at " << c << "\n";
647     copyright();
648     *optout << "************************************************************\n";
649   }
650 
651   *optout << method << endl
652 	  << "Iter \t\t F(x)\t    ||step||";
653   if (nlp1) {
654     *optout << "\t||gX||"
655 	    << "\t ndir";
656       }
657   *optout << "\tbesti\t   fevals \t";
658   if (printXiter)         *optout << "\t X(1:3)";
659   if (nlp1 && printGiter) *optout << "\t gX(1:3)";
660   *optout << "\n\n";
661 
662 }
663 
664 } // namespace OPTPP
665