1 /*  _________________________________________________________________________
2  *
3  *  Acro: A Common Repository for Optimizers
4  *  Copyright (c) 2008 Sandia Corporation.
5  *  This software is distributed under the BSD License.
6  *  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
7  *  the U.S. Government retains certain rights in this software.
8  *  For more information, see the README.txt file in the top Acro directory.
9  *  _________________________________________________________________________
10  */
11 
12 //
13 // GradientDescent.cpp
14 //
15 
16 #include <acro_config.h>
17 
18 #include <colin/solver/GradientDescent.h>
19 #include <colin/SolverMngr.h>
20 
21 #include <utilib/_math.h>
22 
23 namespace colin
24 {
25 
26 namespace StaticInitializers {
27 
28 namespace {
29 
RegisterGradientDescent()30 bool RegisterGradientDescent()
31 {
32    SolverMngr().declare_solver_type<GradientDescent>
33       ("colin:GradientDescent", "A simple gradient descent local search");
34 
35    SolverMngr().declare_solver_type<GradientDescent>
36       ("colin:gd", "An alias to colin:GradientDescent");
37 
38    return true;
39 }
40 
41 } // namespace colin::StaticInitializers::(local)
42 
43 extern const volatile bool gradient_descent = RegisterGradientDescent();
44 
45 } // namespace colin::StaticInitializers
46 
47 
48 
49 
GradientDescent()50 GradientDescent::GradientDescent()
51 {
52    this->reset_signal.connect
53       ( boost::bind( &GradientDescent::reset_GradientDescent, this ) );
54 
55    bc_flag = false;
56 }
57 
58 
reset_GradientDescent()59 void GradientDescent::reset_GradientDescent()
60 {
61    if ( problem.empty() )
62       return;
63 
64    bc_flag = problem->enforcing_domain_bounds;
65    if (bc_flag)
66    {
67       utilib::TypeManager()->
68 	lexical_cast(problem->real_lower_bounds.get(), rlower);
69       utilib::TypeManager()->
70 	lexical_cast(problem->real_upper_bounds.get(), rupper);
71    }
72 }
73 
74 
optimize()75 void GradientDescent::optimize()
76 {
77    //
78    // Misc initialization of the optimizer
79    //
80    unsigned int num_iters;
81    if (max_iters <= 0)
82       num_iters = MAXINT;
83    else
84       num_iters = curr_iter + max_iters;
85    //
86    // Setup initial point
87    //
88    utilib::BasicArray<double> tmp_pt(problem->num_real_vars.as<size_t>());
89    utilib::BasicArray<double> best_pt(problem->num_real_vars.as<size_t>());
90    best_pt << initial_point;
91    //
92    // Evaluate the initial point
93    //
94    colin::AppRequest request = problem->set_domain(best_pt);
95    problem->Request_F(request, best().value());
96    problem->Request_G(request, grad);
97    best().response = eval_mngr().perform_evaluation(request);
98    debug_io(ucout);
99    //
100    // Iterate
101    //
102    real tmp_value;
103    colin::AppResponse tmp_response;
104    //bool improving = true;
105    double step = 1.0;
106    for (curr_iter++; curr_iter <= num_iters;  curr_iter++)
107    {
108       //
109       // Determine if the algorithm is finished
110       //
111       if (check_convergence())
112          break;
113       //
114       // Step in the direction of the negative gradient until we find an
115       // improving step.
116       //
117       bool contracting=true;
118       bool success=false;
119       real tmp_fval;
120       tmp_pt << best_pt;
121 	for (size_t i=0; i<tmp_pt.size(); i++) {
122 	  tmp_pt[i] = best_pt[i] - grad[i]*step;
123 	  }
124       problem->EvalF(eval_mngr(),tmp_pt,tmp_fval);
125       if (tmp_fval > best().value()) {
126          contracting=true;
127       }
128       while (true) {
129         if (contracting) {
130 	   step /= 2.0;
131 	} else {
132 	    step *= 2.0;
133 	}
134         if (step > 10e6) {success=true; break;}
135         if (step < 1e-16){success=(tmp_fval < best().value()); break;}
136 	tmp_pt << best_pt;
137 	for (size_t i=0; i<tmp_pt.size(); i++) {
138 	  tmp_pt[i] = best_pt[i] - grad[i]*step;
139 	  }
140         problem->EvalF(eval_mngr(),tmp_pt,tmp_fval);
141 	DEBUGPR(100,ucout << "Evaluating point " << tmp_pt << " value=" << tmp_fval);
142         if (contracting && (tmp_fval < best().value())) {
143 	   success=true;
144            break;
145 	   }
146         if (!contracting && (tmp_fval > best().value())) {
147            step /= 2.0;
148 	   tmp_pt << best_pt;
149 	   for (size_t i=0; i<tmp_pt.size(); i++) {
150 	     tmp_pt[i] = best_pt[i] - grad[i]*step;
151 	     }
152 	   success=true;
153            break;
154            }
155 	 }
156       if (!success) {
157 	std::stringstream tmp;
158          tmp << "Unsuccessful line search: FinalStep=" << step;
159          solver_status.termination_info = tmp.str();
160 
161 	break ;
162 	}
163       best_pt << tmp_pt;
164       colin::AppRequest request = problem->set_domain(best_pt);
165       problem->Request_F(request, best().value());
166       problem->Request_G(request, grad);
167       best().response = eval_mngr().perform_evaluation(request);
168       DEBUGPR(100, ucout << "New point " << best_pt << " value=" << best().value() << " grad=" << grad);
169       //
170       // Debugging IO
171       //
172       debug_io(ucout);
173    }
174 
175    best().point = best_pt;
176    debug_io(ucout, true);
177    ucout << utilib::Flush;
178 }
179 
180 ///
check_convergence()181 bool GradientDescent::check_convergence()
182 {
183    if ( ColinSolver<utilib::BasicArray<double>, UNLP1_problem >
184         ::check_convergence() )
185       return true;
186 
187    real norm = length(grad);
188    if (norm <= 1e-8)
189    {
190       std::stringstream tmp;
191       tmp << "Grad-Norm Norm=" << norm << "<="
192           << 1e-8 << "=Norm_thresh";
193       solver_status.termination_info = tmp.str();
194       return true;
195    }
196    return false;
197 }
198 
define_solver_type() const199 std::string GradientDescent::define_solver_type() const
200 {
201    return "GradientDescent";
202 }
203 
204 } // namespace colin
205 
206