1 //------------------------------------------------------------------------
2 // Copyright (C) 1996:
3 // J.C. Meza
4 // Sandia National Laboratories
5 // meza@california.sandia.gov
6 //------------------------------------------------------------------------
7
8 #ifdef HAVE_CONFIG_H
9 #include "OPT++_config.h"
10 #endif
11
12 #ifdef HAVE_STD
13 #include <cstring>
14 #else
15 #include <string.h>
16 #endif
17
18 #include "OptBCNewton.h"
19 #include "precisio.h"
20 #include "cblas.h"
21 #include "ioformat.h"
22
23 using NEWMAT::Real;
24 using NEWMAT::FloatingPointPrecision;
25 using NEWMAT::ColumnVector;
26 using NEWMAT::SymmetricMatrix;
27 using NEWMAT::LowerTriangularMatrix;
28
29 namespace OPTPP {
30
31 static char* class_name = "OptBCNewton";
32
33 //------------------------------------------------------------------------
34 // BCNewton functions
35 // initHessian
36 // checkDeriv
37 // checkConvg
38 // initOpt
39 // printStatus
40 // stepTolNorm
41 // updateH
42 // computeSearch
43 // updateConstraints
44 // reset
45 //------------------------------------------------------------------------
46
initHessian()47 void OptBCNewton::initHessian()
48 {
49 if (debug_) *optout << class_name << "::initHessian: \n";
50 NLP2* nlp = nlprob2();
51 Hessian = nlp->getHess();
52 return;
53 }
54
checkConvg()55 int OptBCNewton::checkConvg() // Check convergence
56 {
57 NLP1* nlp = nlprob();
58 ColumnVector xc(nlp->getXc());
59 int i, n = nlp->getDim();
60
61 // Test 1. step tolerance
62
63 double step_tol = tol.getStepTol();
64 double snorm = stepTolNorm();
65 double xnorm = Norm2(xc);
66 double stol = step_tol*max(1.0,xnorm);
67 if (snorm <= stol) {
68 strcpy(mesg,"Step tolerance test passed");
69 *optout << "checkConvg: snorm = " << snorm
70 << " stol = " << stol << "\n";
71 return 1;
72 }
73
74 // Test 2. function tolerance
75 double ftol = tol.getFTol();
76 double fvalue = nlp->getF();
77 double rftol = ftol*max(1.0,fabs(fvalue));
78 Real deltaf = fprev - fvalue;
79 if (deltaf <= rftol) {
80 strcpy(mesg,"Function tolerance test passed");
81 *optout << "BCNewton::checkConvg: Delta f = " << e(deltaf,12,4)
82 << " ftol = " << e(ftol,12,4) << "\n";
83 return 2;
84 }
85
86
87 // Test 3. gradient tolerance
88
89 ColumnVector grad(nlp->getGrad());
90 double gtol = tol.getGTol();
91 double rgtol = gtol*max(1.0,fabs(fvalue));
92 for (i=1; i<=n; i++) if (work_set(i) == true) grad(i) = 0.0;
93 double gnorm = Norm2(grad);
94 if (gnorm <= rgtol) {
95 strcpy(mesg,"Gradient tolerance test passed");
96 *optout << "checkConvg: gnorm = " << e(gnorm,12,4)
97 << " gtol = " << e(rgtol, 12,4) << "\n";
98 return 3;
99 }
100
101
102 // Test 4. absolute gradient tolerance
103
104 if (gnorm <= gtol) {
105 strcpy(mesg,"Gradient tolerance test passed");
106 *optout << "checkConvg: gnorm = " << e(gnorm,12,4)
107 << " gtol = " << e(gtol, 12,4) << "\n";
108 return 4;
109 }
110
111 // Nothing to report
112
113 return 0;
114
115 }
116
checkDeriv()117 int OptBCNewton::checkDeriv() // Check the analytic gradient with FD gradient
118 {
119 NLP2* nlp = nlprob2();
120 SymmetricMatrix Hess(dim), FDHess(dim), ErrH(dim);
121
122 int retcode = checkAnalyticFDGrad();
123 Real mcheps = FloatingPointPrecision::Epsilon();
124 Real third = 0.3333333;
125 double gnorm = nlp->getGrad().NormInfinity();
126 double eta = pow(mcheps,third)*max(1.0,gnorm);
127 FDHess = nlp->FDHessian(sx);
128 Hess = nlp->getHess();
129 ErrH = Hess - FDHess;
130 Real maxerr = ErrH.NormInfinity();
131 if(debug_){
132 *optout <<"\nCheck_Deriv: Checking Hessian versus finite-differences\n";
133 *optout << "maxerror = " << e(maxerr, 12,4)
134 << "tolerance = " << e(eta, 12,4) << "\n";
135 }
136 if (maxerr > eta) retcode = BAD;
137 return retcode;
138 }
139
140
initOpt()141 void OptBCNewton::initOpt()
142 {
143 OptBCNewtonLike::initOpt();
144 updateConstraints(0);
145 return;
146 }
147
printStatus(char * s)148 void OptBCNewton::printStatus(char *s) // set Message
149 {
150 NLP2* nlp = nlprob2();
151 *optout << "\n\n========= " << s << " ===========\n\n";
152 *optout << "Optimization method = " << method << "\n";
153 *optout << "Dimension of the problem = " << nlp->getDim() << "\n";
154 *optout << "No. of bound constraints = " << nlp->getDim() << "\n";
155 *optout << "Return code = " << ret_code << " ("
156 << mesg << ")\n";
157 *optout << "No. iterations taken = " << iter_taken << "\n";
158 *optout << "No. function evaluations = " << nlp->getFevals() << "\n";
159 *optout << "No. gradient evaluations = " << nlp->getGevals() << "\n";
160
161 if (debug_) {
162 *optout << "Hessian \n";
163 Print(Hessian);
164 }
165
166 tol.printTol(optout);
167
168 nlp->fPrintState(optout, s);
169 }
170
stepTolNorm()171 real OptBCNewton::stepTolNorm() const
172 {
173 NLP2* nlp = nlprob2();
174 ColumnVector step(sx.AsDiagonal()*(nlp->getXc() - xprev));
175 return Norm2(step);
176 }
177
updateH(SymmetricMatrix &,int)178 SymmetricMatrix OptBCNewton::updateH(SymmetricMatrix&, int)
179 {
180 return nlprob()->evalH();
181 }
182
183 //----------------------------------------------------------------------------
184 //
185 // Compute the maximum step allowed along the search direction sk
186 // before we hit a constraint
187 //
188 //---------------------------------------------------------------------------
computeMaxStep(ColumnVector & sk)189 double OptBCNewton::computeMaxStep(ColumnVector &sk)
190 {
191 NLP2* nlp = nlprob2();
192 int i, n = nlp->getDim();
193 double gamma=FLT_MAX, delta;
194 ColumnVector lower = nlp->getConstraints()->getLower();
195 ColumnVector upper = nlp->getConstraints()->getUpper();
196 ColumnVector xc = nlp->getXc();
197
198 double snorm = Norm2(sk);
199 double feas_tol = 1.e-3;
200
201 for (i=1; i<=n; i++) {
202 if (work_set(i) == false) {
203 if (sk(i) > 0.0e0) {
204 delta = (upper(i)-xc(i)) / sk(i);
205 if (delta <= feas_tol) {
206 if (debug_)
207 *optout << "Hit an upper constraint for variable " << i << "\n";
208 }
209 }
210 else if (sk(i) < 0.0e0) {
211 delta = (lower(i)-xc(i)) / sk(i);
212 if (delta <= feas_tol) {
213 if (debug_)
214 *optout << "Hit a lower constraint for variable " << i << "\n";
215 }
216 }
217 gamma = min(gamma,delta);
218 }
219 }
220 if (debug_)
221 *optout << "computeMaxStep: maximum step allowed = " << gamma*snorm << "\n";
222 return gamma*snorm;
223 }
224
computeSearch(SymmetricMatrix & H)225 ColumnVector OptBCNewton::computeSearch(SymmetricMatrix &H)
226 {
227 NLP2* nlp = nlprob2();
228 int i, j, ncnt=0, *index_array, n = nlp->getDim();
229
230 ColumnVector gg(n), sk2(n), sk(n);
231 SymmetricMatrix H1;
232 LowerTriangularMatrix L;
233
234 // set up index_array to count the number of free variables
235
236 index_array = new int[n+1];
237 for (i=1; i<=n; i++) index_array[i] = 0;
238 for (i=1; i<=n; i++)
239 if (work_set(i) == false) index_array[i] = ++ncnt;
240 if (ncnt != (n-nactive)) {
241 *optout << "Number of fixed and free variables do not correspond. \n";
242 exit(-1);
243 }
244
245 // Form the projected Hessian
246
247 H1.ReSize(ncnt);
248 for (i=1; i<=n; i++) {
249 for (j=1; j<=n; j++)
250 if (index_array[i] != 0 && index_array[j] != 0)
251 H1(index_array[i],index_array[j]) = H(i,j);
252 }
253
254 // Form the projected gradient
255
256 gg.ReSize(ncnt,1);
257 for (i=1; i<=n; i++)
258 if (index_array[i] != 0) gg(index_array[i]) = gprev(i);
259
260 // Solve (H1 * sk2 = - gg) for projected search direction sk2
261
262 L.ReSize(ncnt);
263 sk2.ReSize(ncnt,1);
264 if (ncnt == 1) sk2(1) = - gg(1) / H1(1,1);
265 else {
266 L = MCholesky(H1);
267 sk2 = -(L.t().i()*(L.i()*gg));
268 }
269
270 // Form search direction sk from from projected search direction sk2
271
272 for (i=1; i<=n; i++) sk(i) = 0.0;
273 for (i=1; i<=n; i++)
274 if (index_array[i] != 0) sk(i) = sk2(index_array[i]);
275
276 // Sanitation and return
277
278 delete [] index_array;
279 return sk;
280 }
281
updateConstraints(int step_type)282 int OptBCNewton::updateConstraints(int step_type)
283 {
284 NLP2* nlp = nlprob2();
285 int n = nlp->getDim(), ret_flag=0;
286 int i, j, j2, k, *new_active, actcnt=0, notnew;
287 double reduced_grad_norm, feas_tol=1.0e-12;
288 ColumnVector lower(n), upper(n), xc(n), gg(n);
289
290 // initialization
291
292 lower = nlp->getConstraints()->getLower();
293 upper = nlp->getConstraints()->getUpper();
294 xc = nlp->getXc();
295 new_active = new int[n];
296
297 // cpjw - Use current gradient info or rather gradient at xc
298 gg = nlp->evalG(xc);
299
300 // Add variables to the working set
301
302 for (i=1; i<=n; i++) {
303 if ((fabs(upper(i)-xc(i))<feas_tol) || (fabs(lower(i)-xc(i))<feas_tol)) {
304 if (work_set(i) == false) {
305 new_active[actcnt++] = i; work_set(i) = true; nactive++;
306 *optout << "OptBCNewton : variable added to working set : " << i << "\n";
307 }
308 }
309 }
310
311 // Delete variables from the active set
312 // First compute the norm of the reduced gradient
313
314 int jdel=0;
315 double ggdel=0;
316
317 //gg = nlp->getGrad();
318 for (i=1; i<=n; i++) if(work_set(i) == true) gg(i) = 0.0;
319 reduced_grad_norm = Norm2(gg);
320 if (m_nconvgd > 0 || step_type < 0) {
321 gg = nlp->getGrad();
322 ret_flag = -1;
323 *optout << "OptBCNewton : reduced_grad_norm = " << reduced_grad_norm << "\n";
324 for (i=1; i<=n; i++) {
325 notnew = true;
326 for (j=0; j<actcnt; j++) if (new_active[j] == i) notnew = false;
327 if (work_set(i) == true && notnew)
328 if (((fabs(upper(i)-xc(i))<feas_tol) && gg(i)>feas_tol) ||
329 ((fabs(lower(i)-xc(i))<feas_tol) && gg(i)<(-feas_tol))) {
330 if (fabs(gg(i)) > ggdel) {jdel = i; ggdel = fabs(gg(i)); }
331 }
332 }
333 if (jdel != 0) {
334 work_set(jdel) = false; nactive--;
335 *optout << "OptBCNewton : variable deleted from working set : " << jdel << "\n";
336 ret_flag = 1;
337 }
338 }
339 if (nactive > 0) *optout << "OptBCNewton: Current working set \n";
340 k = 1;
341 for (i=1; i<=nactive; i+=10) {
342 *optout << " ----- variable index: ";
343 j2 = min(i*10,nactive);
344 for (j=(i-1)*10+1; j<=j2; j++) {
345 while (work_set(k) == false) k++;
346 *optout << d(k,6) << "\t" << xc(k); k++;
347 }
348 *optout << "\n ";
349 }
350 return ret_flag;
351 }
352
reset()353 void OptBCNewton::reset()
354 {
355 NLP1* nlp = nlprob();
356 int n = nlp->getDim();
357 nlp->reset();
358 OptimizeClass::defaultReset(n);
359 nactive = 0;
360 work_set = false;
361 }
362
363 } // namespace OPTPP
364