1 //------------------------------------------------------------------------
2 // Copyright (C) 1993:
3 // J.C. Meza
4 // Sandia National Laboratories
5 // meza@california.sandia.gov
6 //------------------------------------------------------------------------
7
8 #ifdef HAVE_CONFIG_H
9 #include "OPT++_config.h"
10 #endif
11
12 #ifdef HAVE_STD
13 #include <cstring>
14 #else
15 #include <string.h>
16 #endif
17
18 #include "OptConstrNewton.h"
19 #include <float.h>
20 #include "cblas.h"
21 #include "ioformat.h"
22
23 using Teuchos::SerialDenseVector;
24 using Teuchos::SerialSymDenseMatrix;
25
26 namespace OPTPP {
27
28 //------------------------------------------------------------------------
29 //
30 // Newton Method member functions
31 //
32 // checkDeriv()
33 // initHessian()
34 // printStatus()
35 // stepTolNorm()
36 // updateH()
37 //------------------------------------------------------------------------
38
39 // Check the analytic gradient with FD gradient
checkDeriv()40 int OptConstrNewton::checkDeriv()
41 {
42 NLP2* nlp = nlprob2();
43 int retcode = checkAnalyticFDGrad();
44
45 double mcheps = DBL_EPSILON;
46 double third = 0.3333333;
47 double gnorm = nlp->getGrad().normInf();
48 double eta = pow(mcheps,third)*max(1.0,gnorm);
49 *optout <<"\nCheck_Deriv: Checking Hessian versus finite-differences\n";
50 SerialSymDenseMatrix<int,double> Hess(dim), FDHess(dim), ErrH(dim);
51 FDHess = nlp->FDHessian(sx);
52 Hess = nlp->getHess();
53 ErrH = Hess;
54 ErrH -= FDHess;
55 Print(ErrH);
56 double maxerr = ErrH.normInf();
57 *optout << "maxerror = " << e(maxerr, 12,4)
58 << "tolerance = " << e(eta, 12,4) << "\n";
59 if (maxerr > eta) retcode = false;
60
61 return retcode;
62 }
63
64
initHessian()65 void OptConstrNewton::initHessian()
66 {
67 if (debug_) *optout << "OptConstrNewton::initHessian: \n";
68 NLP2* nlp = nlprob2();
69 Hessian = nlp->getHess();
70 return;
71 }
72
printStatus(char * title)73 void OptConstrNewton::printStatus(char *title) // set Message
74 {
75 NLP1* nlp = nlprob();
76 *optout << "\n\n========= " << title << " ===========\n\n";
77 *optout << "Optimization method = " << method << "\n";
78 *optout << "Dimension of the problem = " << nlp->getDim() << "\n";
79 *optout << "Return code = " << ret_code << " ("
80 << mesg << ")\n";
81 *optout << "No. iterations taken = " << iter_taken << "\n";
82 *optout << "No. function evaluations = " << nlp->getFevals() << "\n";
83 *optout << "No. gradient evaluations = " << nlp->getGevals() << "\n";
84
85 if (debug_) {
86 *optout << "Hessian \n";
87 Print(Hessian);
88 }
89
90 tol.printTol(optout);
91
92 nlp->fPrintState(optout, title);
93 }
94
stepTolNorm()95 real OptConstrNewton::stepTolNorm() const
96 {
97 NLP1* nlp = nlprob();
98 // SerialDenseVector<int,double> step(sx.AsDiagonal()*(nlp->getXc() - xprev));
99 //return Norm2(step);
100 SerialDenseVector<int,double> tmp(nlp->getXc().length());
101 tmp = nlp->getXc();
102 tmp -= xprev;
103 SerialDenseVector<int,double> step(tmp.length());
104 for(int i=0; i<tmp.length(); i++)
105 {step(i) = tmp(i)*sx(i);}
106 return sqrt(step.dot(step));
107 }
108
updateH(SerialSymDenseMatrix<int,double> &,int)109 SerialSymDenseMatrix<int,double> OptConstrNewton::updateH(SerialSymDenseMatrix<int,double>&, int)
110 {
111 return nlprob2()->evalH();
112 }
113
114 } // namespace OPTPP
115