1 /*
2  *    This file is part of CasADi.
3  *
4  *    CasADi -- A symbolic framework for dynamic optimization.
5  *    Copyright (C) 2010-2014 Joel Andersson, Joris Gillis, Moritz Diehl,
6  *                            K.U. Leuven. All rights reserved.
7  *    Copyright (C) 2011-2014 Greg Horn
8  *
9  *    CasADi is free software; you can redistribute it and/or
10  *    modify it under the terms of the GNU Lesser General Public
11  *    License as published by the Free Software Foundation; either
12  *    version 3 of the License, or (at your option) any later version.
13  *
14  *    CasADi is distributed in the hope that it will be useful,
15  *    but WITHOUT ANY WARRANTY; without even the implied warranty of
16  *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
17  *    Lesser General Public License for more details.
18  *
19  *    You should have received a copy of the GNU Lesser General Public
20  *    License along with CasADi; if not, write to the Free Software
21  *    Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
22  *
23  */
24 
25 
26 #include "nlpsol_impl.hpp"
27 #include "external.hpp"
28 #include "casadi/core/timing.hpp"
29 #include "nlp_builder.hpp"
30 
31 using namespace std;
32 namespace casadi {
33 
has_nlpsol(const string & name)34   bool has_nlpsol(const string& name) {
35     return Nlpsol::has_plugin(name);
36   }
37 
load_nlpsol(const string & name)38   void load_nlpsol(const string& name) {
39     Nlpsol::load_plugin(name);
40   }
41 
doc_nlpsol(const string & name)42   string doc_nlpsol(const string& name) {
43     return Nlpsol::getPlugin(name).doc;
44   }
45 
nlpsol(const string & name,const string & solver,const SXDict & nlp,const Dict & opts)46   Function nlpsol(const string& name, const string& solver,
47                   const SXDict& nlp, const Dict& opts) {
48     return nlpsol(name, solver, Nlpsol::create_oracle(nlp, opts), opts);
49   }
50 
nlpsol(const string & name,const string & solver,const MXDict & nlp,const Dict & opts)51   Function nlpsol(const string& name, const string& solver,
52                   const MXDict& nlp, const Dict& opts) {
53     return nlpsol(name, solver, Nlpsol::create_oracle(nlp, opts), opts);
54   }
55 
56   template<typename XType>
create_oracle(const std::map<std::string,XType> & d,const Dict & opts)57   Function Nlpsol::create_oracle(const std::map<std::string, XType>& d,
58                                  const Dict& opts) {
59     std::vector<XType> nl_in(NL_NUM_IN), nl_out(NL_NUM_OUT);
60     for (auto&& i : d) {
61       if (i.first=="x") {
62         nl_in[NL_X]=i.second;
63       } else if (i.first=="p") {
64         nl_in[NL_P]=i.second;
65       } else if (i.first=="f") {
66         nl_out[NL_F]=i.second;
67       } else if (i.first=="g") {
68         nl_out[NL_G]=i.second;
69       } else {
70         casadi_error("No such field: " + i.first);
71       }
72     }
73     if (nl_out[NL_F].is_empty()) nl_out[NL_F] = 0;
74     if (nl_out[NL_G].is_empty()) nl_out[NL_G] = XType(0, 1);
75 
76     // Options for the oracle
77     Dict oracle_options;
78     Dict::const_iterator it = opts.find("oracle_options");
79     if (it!=opts.end()) {
80       // "oracle_options" has been set
81       oracle_options = it->second;
82     } else if ((it=opts.find("verbose")) != opts.end()) {
83       // "oracle_options" has not been set, but "verbose" has
84       oracle_options["verbose"] = it->second;
85     }
86 
87     // Create oracle
88     return Function("nlp", nl_in, nl_out, NL_INPUTS, NL_OUTPUTS, oracle_options);
89   }
90 
nlpsol(const std::string & name,const std::string & solver,const NlpBuilder & nl,const Dict & opts)91   Function nlpsol(const std::string& name, const std::string& solver,
92                   const NlpBuilder& nl, const Dict& opts) {
93      MXDict nlp;
94      nlp["x"] = vertcat(nl.x);
95      nlp["f"] = nl.f;
96      nlp["g"] = vertcat(nl.g);
97      return nlpsol(name, solver, nlp, opts);
98   }
99 
nlpsol(const std::string & name,const std::string & solver,const std::string & fname,const Dict & opts)100   Function nlpsol(const std::string& name, const std::string& solver,
101                   const std::string& fname, const Dict& opts) {
102     // If fname ends with .c, JIT
103     if (fname.size()>2 && fname.compare(fname.size()-2, fname.size(), ".c")==0) {
104       Importer compiler(fname, "clang");
105       return nlpsol(name, solver, compiler, opts);
106     } else {
107       return nlpsol(name, solver, external("nlp", fname), opts);
108     }
109   }
110 
nlpsol(const string & name,const string & solver,const Importer & compiler,const Dict & opts)111   Function nlpsol(const string& name, const string& solver,
112                   const Importer& compiler, const Dict& opts) {
113     return nlpsol(name, solver, external("nlp", compiler), opts);
114   }
115 
nlpsol(const string & name,const string & solver,const Function & nlp,const Dict & opts)116   Function nlpsol(const string& name, const string& solver,
117                   const Function& nlp, const Dict& opts) {
118     // Make sure that nlp is sound
119     if (nlp.has_free()) {
120       casadi_error("Cannot create '" + name + "' since " + str(nlp.get_free()) + " are free.");
121     }
122     return Function::create(Nlpsol::instantiate(name, solver, nlp), opts);
123   }
124 
nlpsol_in()125   vector<string> nlpsol_in() {
126     vector<string> ret(nlpsol_n_in());
127     for (size_t i=0; i<ret.size(); ++i) ret[i]=nlpsol_in(i);
128     return ret;
129   }
130 
nlpsol_out()131   vector<string> nlpsol_out() {
132     vector<string> ret(nlpsol_n_out());
133     for (size_t i=0; i<ret.size(); ++i) ret[i]=nlpsol_out(i);
134     return ret;
135   }
136 
nlpsol_default_in(casadi_int ind)137   double nlpsol_default_in(casadi_int ind) {
138     switch (ind) {
139     case NLPSOL_LBX:
140     case NLPSOL_LBG:
141       return -std::numeric_limits<double>::infinity();
142     case NLPSOL_UBX:
143     case NLPSOL_UBG:
144       return std::numeric_limits<double>::infinity();
145     default:
146       return 0;
147     }
148   }
149 
nlpsol_default_in()150   std::vector<double> nlpsol_default_in() {
151     vector<double> ret(nlpsol_n_in());
152     for (size_t i=0; i<ret.size(); ++i) ret[i]=nlpsol_default_in(i);
153     return ret;
154   }
155 
nlpsol_in(casadi_int ind)156   string nlpsol_in(casadi_int ind) {
157     switch (static_cast<NlpsolInput>(ind)) {
158     case NLPSOL_X0:     return "x0";
159     case NLPSOL_P:      return "p";
160     case NLPSOL_LBX:    return "lbx";
161     case NLPSOL_UBX:    return "ubx";
162     case NLPSOL_LBG:    return "lbg";
163     case NLPSOL_UBG:    return "ubg";
164     case NLPSOL_LAM_X0: return "lam_x0";
165     case NLPSOL_LAM_G0: return "lam_g0";
166     case NLPSOL_NUM_IN: break;
167     }
168     return string();
169   }
170 
nlpsol_out(casadi_int ind)171   string nlpsol_out(casadi_int ind) {
172     switch (static_cast<NlpsolOutput>(ind)) {
173     case NLPSOL_X:     return "x";
174     case NLPSOL_F:     return "f";
175     case NLPSOL_G:     return "g";
176     case NLPSOL_LAM_X: return "lam_x";
177     case NLPSOL_LAM_G: return "lam_g";
178     case NLPSOL_LAM_P: return "lam_p";
179     case NLPSOL_NUM_OUT: break;
180     }
181     return string();
182   }
183 
nlpsol_n_in()184   casadi_int nlpsol_n_in() {
185     return NLPSOL_NUM_IN;
186   }
187 
nlpsol_n_out()188   casadi_int nlpsol_n_out() {
189     return NLPSOL_NUM_OUT;
190   }
191 
Nlpsol(const std::string & name,const Function & oracle)192   Nlpsol::Nlpsol(const std::string& name, const Function& oracle)
193     : OracleFunction(name, oracle) {
194 
195     // Set default options
196     callback_step_ = 1;
197     eval_errors_fatal_ = false;
198     warn_initial_bounds_ = false;
199     iteration_callback_ignore_errors_ = false;
200     print_time_ = true;
201     calc_multipliers_ = false;
202     bound_consistency_ = false;
203     min_lam_ = 0;
204     calc_lam_x_ = calc_f_ = calc_g_ = false;
205     calc_lam_p_ = true;
206     no_nlp_grad_ = false;
207     error_on_fail_ = false;
208     sens_linsol_ = "qr";
209   }
210 
~Nlpsol()211   Nlpsol::~Nlpsol() {
212     clear_mem();
213   }
214 
is_a(const std::string & type,bool recursive) const215   bool Nlpsol::is_a(const std::string& type, bool recursive) const {
216     return type=="Nlpsol" || (recursive && OracleFunction::is_a(type, recursive));
217   }
218 
get_sparsity_in(casadi_int i)219   Sparsity Nlpsol::get_sparsity_in(casadi_int i) {
220     switch (static_cast<NlpsolInput>(i)) {
221     case NLPSOL_X0:
222     case NLPSOL_LBX:
223     case NLPSOL_UBX:
224     case NLPSOL_LAM_X0:
225       return get_sparsity_out(NLPSOL_X);
226     case NLPSOL_LBG:
227     case NLPSOL_UBG:
228     case NLPSOL_LAM_G0:
229       return get_sparsity_out(NLPSOL_G);
230     case NLPSOL_P:
231       return oracle_.sparsity_in(NL_P);
232     case NLPSOL_NUM_IN: break;
233     }
234     return Sparsity();
235   }
236 
get_sparsity_out(casadi_int i)237   Sparsity Nlpsol::get_sparsity_out(casadi_int i) {
238     switch (static_cast<NlpsolOutput>(i)) {
239     case NLPSOL_F:
240       return oracle_.sparsity_out(NL_F);
241     case NLPSOL_X:
242     case NLPSOL_LAM_X:
243       return oracle_.sparsity_in(NL_X);
244     case NLPSOL_LAM_G:
245     case NLPSOL_G:
246       return oracle_.sparsity_out(NL_G);
247     case NLPSOL_LAM_P:
248       return get_sparsity_in(NLPSOL_P);
249     case NLPSOL_NUM_OUT: break;
250     }
251     return Sparsity();
252   }
253 
254   const Options Nlpsol::options_
255   = {{&OracleFunction::options_},
256      {{"iteration_callback",
257        {OT_FUNCTION,
258         "A function that will be called at each iteration with the solver as input. "
259         "Check documentation of Callback."}},
260       {"iteration_callback_step",
261        {OT_INT,
262         "Only call the callback function every few iterations."}},
263       {"iteration_callback_ignore_errors",
264        {OT_BOOL,
265         "If set to true, errors thrown by iteration_callback will be ignored."}},
266       {"ignore_check_vec",
267        {OT_BOOL,
268         "If set to true, the input shape of F will not be checked."}},
269       {"warn_initial_bounds",
270        {OT_BOOL,
271         "Warn if the initial guess does not satisfy LBX and UBX"}},
272       {"eval_errors_fatal",
273        {OT_BOOL,
274         "When errors occur during evaluation of f,g,...,"
275         "stop the iterations"}},
276       {"verbose_init",
277        {OT_BOOL,
278         "Print out timing information about "
279         "the different stages of initialization"}},
280       {"discrete",
281        {OT_BOOLVECTOR,
282         "Indicates which of the variables are discrete, i.e. integer-valued"}},
283       {"calc_multipliers",
284       {OT_BOOL,
285        "Calculate Lagrange multipliers in the Nlpsol base class"}},
286       {"calc_lam_x",
287        {OT_BOOL,
288         "Calculate 'lam_x' in the Nlpsol base class"}},
289       {"calc_lam_p",
290        {OT_BOOL,
291         "Calculate 'lam_p' in the Nlpsol base class"}},
292       {"calc_f",
293        {OT_BOOL,
294         "Calculate 'f' in the Nlpsol base class"}},
295       {"calc_g",
296        {OT_BOOL,
297         "Calculate 'g' in the Nlpsol base class"}},
298       {"no_nlp_grad",
299        {OT_BOOL,
300         "Prevent the creation of the 'nlp_grad' function"}},
301       {"bound_consistency",
302        {OT_BOOL,
303         "Ensure that primal-dual solution is consistent with the bounds"}},
304       {"min_lam",
305        {OT_DOUBLE,
306         "Minimum allowed multiplier value"}},
307       {"oracle_options",
308        {OT_DICT,
309         "Options to be passed to the oracle function"}},
310       {"error_on_fail",
311        {OT_BOOL,
312         "When the numerical process returns unsuccessfully, raise an error (default false)."}},
313       {"sens_linsol",
314        {OT_STRING,
315         "Linear solver used for parametric sensitivities (default 'qr')."}},
316       {"sens_linsol_options",
317        {OT_DICT,
318         "Linear solver options used for parametric sensitivities."}}
319      }
320   };
321 
init(const Dict & opts)322   void Nlpsol::init(const Dict& opts) {
323     // Call the initialization method of the base class
324     OracleFunction::init(opts);
325 
326     // Read options
327     for (auto&& op : opts) {
328       if (op.first=="iteration_callback") {
329         fcallback_ = op.second;
330       } else if (op.first=="iteration_callback_step") {
331         callback_step_ = op.second;
332       } else if (op.first=="eval_errors_fatal") {
333         eval_errors_fatal_ = op.second;
334       } else if (op.first=="warn_initial_bounds") {
335         warn_initial_bounds_ = op.second;
336       } else if (op.first=="iteration_callback_ignore_errors") {
337         iteration_callback_ignore_errors_ = op.second;
338       } else if (op.first=="discrete") {
339         discrete_ = op.second;
340       } else if (op.first=="calc_multipliers") {
341         calc_multipliers_ = op.second;
342       } else if (op.first=="calc_lam_x") {
343         calc_lam_x_ = op.second;
344       } else if (op.first=="calc_lam_p") {
345         calc_lam_p_ = op.second;
346       } else if (op.first=="calc_f") {
347         calc_f_ = op.second;
348       } else if (op.first=="calc_g") {
349         calc_g_ = op.second;
350       } else if (op.first=="no_nlp_grad") {
351         no_nlp_grad_ = op.second;
352       } else if (op.first=="bound_consistency") {
353         bound_consistency_ = op.second;
354       } else if (op.first=="min_lam") {
355         min_lam_ = op.second;
356       } else if (op.first=="error_on_fail") {
357         error_on_fail_ = op.second;
358       } else if (op.first=="sens_linsol") {
359         sens_linsol_ = op.second.to_string();
360       } else if (op.first=="sens_linsol_options") {
361         sens_linsol_options_ = op.second;
362       }
363     }
364 
365     // Deprecated option
366     if (calc_multipliers_) {
367       calc_lam_x_ = true;
368       calc_lam_p_ = true;
369     }
370 
371     // Get dimensions
372     nx_ = nnz_out(NLPSOL_X);
373     np_ = nnz_in(NLPSOL_P);
374     ng_ = nnz_out(NLPSOL_G);
375 
376     // No need to calculate non-existant quantities
377     if (np_==0) calc_lam_p_ = false;
378     if (ng_==0) calc_g_ = false;
379 
380     // Consistency check
381     if (no_nlp_grad_) {
382       casadi_assert(!calc_lam_p_, "Options 'no_nlp_grad' and 'calc_lam_p' inconsistent");
383       casadi_assert(!calc_lam_x_, "Options 'no_nlp_grad' and 'calc_lam_x' inconsistent");
384       casadi_assert(!calc_f_, "Options 'no_nlp_grad' and 'calc_f' inconsistent");
385       casadi_assert(!calc_g_, "Options 'no_nlp_grad' and 'calc_g' inconsistent");
386     }
387 
388     // Dimension checks
389     casadi_assert(sparsity_out_.at(NLPSOL_G).is_dense()
390                           && sparsity_out_.at(NLPSOL_G).is_vector(),
391         "Expected a dense vector 'g', but got " + sparsity_out_.at(NLPSOL_G).dim(true) + ".");
392 
393     casadi_assert(sparsity_out_.at(NLPSOL_F).is_dense(),
394         "Expected a dense 'f', but got " + sparsity_out_.at(NLPSOL_F).dim(true) + ".");
395 
396     casadi_assert(sparsity_out_.at(NLPSOL_X).is_dense()
397                           && sparsity_out_.at(NLPSOL_X).is_vector(),
398       "Expected a dense vector 'x', but got " + sparsity_out_.at(NLPSOL_X).dim(true) + ".");
399 
400     // Discrete marker
401     mi_ = false;
402     if (!discrete_.empty()) {
403       casadi_assert(discrete_.size()==nx_, "\"discrete\" option has wrong length");
404       if (std::find(discrete_.begin(), discrete_.end(), true)!=discrete_.end()) {
405         casadi_assert(integer_support(),
406                               "Discrete variables require a solver with integer support");
407         mi_ = true;
408       }
409     }
410 
411     set_nlpsol_prob();
412 
413     // Allocate memory
414     casadi_int sz_w, sz_iw;
415     casadi_nlpsol_work(&p_nlp_, &sz_iw, &sz_w);
416     alloc_iw(sz_iw, true);
417     alloc_w(sz_w, true);
418 
419     if (!fcallback_.is_null()) {
420       // Consistency checks
421       casadi_assert_dev(!fcallback_.is_null());
422       casadi_assert(fcallback_.n_out()==1 && fcallback_.numel_out()==1,
423         "Callback function must return a scalar.");
424       casadi_assert(fcallback_.n_in()==n_out_,
425         "Callback input signature must match the NLP solver output signature");
426       for (casadi_int i=0; i<n_out_; ++i) {
427         // Ignore empty arguments
428         if (fcallback_.sparsity_in(i).is_empty()) continue;
429         casadi_assert(fcallback_.size_in(i)==size_out(i),
430           "Callback function input size mismatch. For argument '" + nlpsol_out(i) + "', "
431           "callback has shape " + fcallback_.sparsity_in(i).dim() + " while NLP has " +
432           sparsity_out_.at(i).dim() + ".");
433         // TODO(@jaeandersson): Wrap fcallback_ in a function with correct sparsity
434         casadi_assert(fcallback_.sparsity_in(i)==sparsity_out_.at(i),
435           "Callback function input size mismatch. "
436           "For argument " + nlpsol_out(i) + "', callback has shape " +
437           fcallback_.sparsity_in(i).dim() + " while NLP has " +
438           sparsity_out_.at(i).dim() + ".");
439       }
440 
441       // Allocate temporary memory
442       alloc(fcallback_);
443     }
444 
445     // Function calculating f, g and the gradient of the Lagrangian w.r.t. x and p
446     if (!no_nlp_grad_) {
447       create_function("nlp_grad", {"x", "p", "lam:f", "lam:g"},
448                       {"f", "g", "grad:gamma:x", "grad:gamma:p"},
449                       {{"gamma", {"f", "g"}}});
450     }
451   }
452 
set_nlpsol_prob()453   void Nlpsol::set_nlpsol_prob() {
454     p_nlp_.nx = nx_;
455     p_nlp_.ng = ng_;
456     p_nlp_.np = np_;
457   }
458 
init_mem(void * mem) const459   int Nlpsol::init_mem(void* mem) const {
460     if (OracleFunction::init_mem(mem)) return 1;
461     auto m = static_cast<NlpsolMemory*>(mem);
462     m->add_stat("callback_fun");
463     m->success = false;
464     m->unified_return_status = SOLVER_RET_UNKNOWN;
465     return 0;
466   }
467 
check_inputs(void * mem) const468   void Nlpsol::check_inputs(void* mem) const {
469     auto m = static_cast<NlpsolMemory*>(mem);
470     auto d_nlp = &m->d_nlp;
471 
472     // Skip check?
473     if (!inputs_check_) return;
474 
475     const double inf = std::numeric_limits<double>::infinity();
476 
477     // Number of equality constraints
478     casadi_int n_eq = 0;
479 
480     // Detect ill-posed problems (simple bounds)
481     for (casadi_int i=0; i<nx_; ++i) {
482       double lb = d_nlp->lbz[i];
483       double ub = d_nlp->ubz[i];
484       double x0 = d_nlp->z[i];
485       casadi_assert(lb <= ub && lb!=inf && ub!=-inf,
486           "Ill-posed problem detected: "
487           "LBX[" + str(i) + "] <= UBX[" + str(i) + "] was violated. "
488           "Got LBX[" + str(i) + "]=" + str(lb) + " and UBX[" + str(i) + "] = " + str(ub) + ".");
489       if (warn_initial_bounds_ && (x0>ub || x0<lb)) {
490         casadi_warning("Nlpsol: The initial guess does not satisfy LBX and UBX. "
491           "Option 'warn_initial_bounds' controls this warning.");
492         break;
493       }
494       if (lb==ub) n_eq++;
495     }
496 
497     // Detect ill-posed problems (nonlinear bounds)
498     for (casadi_int i=0; i<ng_; ++i) {
499       double lb = d_nlp->lbz[nx_ + i];
500       double ub = d_nlp->ubz[nx_ + i];
501       casadi_assert(lb <= ub && lb!=inf && ub!=-inf,
502         "Ill-posed problem detected: "
503         "LBG[" + str(i) + "] <= UBG[" + str(i) + "] was violated. "
504         "Got LBG[" + str(i) + "] = " + str(lb) + " and UBG[" + str(i) + "] = " + str(ub) + ".");
505       if (lb==ub) n_eq++;
506     }
507 
508     // Make sure enough degrees of freedom
509     using casadi::str; // Workaround, MingGW bug, cf. CasADi issue #890
510     if (n_eq> nx_) {
511       casadi_warning("NLP is overconstrained: There are " + str(n_eq) +
512       " equality constraints but only " + str(nx_) + " variables.");
513     }
514   }
515 
516   std::map<std::string, Nlpsol::Plugin> Nlpsol::solvers_;
517 
518   const std::string Nlpsol::infix_ = "nlpsol";
519 
getReducedHessian()520   DM Nlpsol::getReducedHessian() {
521     casadi_error("getReducedHessian not defined for class " + class_name());
522     return DM();
523   }
524 
setOptionsFromFile(const std::string & file)525   void Nlpsol::setOptionsFromFile(const std::string & file) {
526     casadi_error("setOptionsFromFile not defined for class " + class_name());
527   }
528 
bound_consistency(casadi_int n,double * z,double * lam,const double * lbz,const double * ubz)529   void Nlpsol::bound_consistency(casadi_int n, double* z, double* lam,
530                                  const double* lbz, const double* ubz) {
531     casadi_assert_dev(z!=nullptr);
532     casadi_assert_dev(lam!=nullptr);
533     casadi_assert_dev(lbz!=nullptr);
534     casadi_assert_dev(ubz!=nullptr);
535     // Local variables
536     casadi_int i;
537     // Loop over variables
538     for (i=0; i<n; ++i) {
539       // Make sure bounds are respected
540       z[i] = std::fmin(std::fmax(z[i], lbz[i]), ubz[i]);
541       // Adjust multipliers
542       if (std::isinf(lbz[i]) && std::isinf(ubz[i])) {
543         // Both multipliers are infinite
544         lam[i] = 0.;
545       } else if (std::isinf(lbz[i]) || z[i] - lbz[i] > ubz[i] - z[i]) {
546         // Infinite lower bound or closer to upper bound than lower bound
547         lam[i] = std::fmax(0., lam[i]);
548       } else if (std::isinf(ubz[i]) || z[i] - lbz[i] < ubz[i] - z[i]) {
549         // Infinite upper bound or closer to lower bound than upper bound
550         lam[i] = std::fmin(0., lam[i]);
551       }
552     }
553   }
554 
eval(const double ** arg,double ** res,casadi_int * iw,double * w,void * mem) const555   int Nlpsol::eval(const double** arg, double** res, casadi_int* iw, double* w, void* mem) const {
556     auto m = static_cast<NlpsolMemory*>(mem);
557 
558     auto d_nlp = &m->d_nlp;
559 
560     // Bounds, given parameter values
561     d_nlp->p = arg[NLPSOL_P];
562     const double *lbx = arg[NLPSOL_LBX];
563     const double *ubx = arg[NLPSOL_UBX];
564     const double *lbg = arg[NLPSOL_LBG];
565     const double *ubg = arg[NLPSOL_UBG];
566 
567     // Get input pointers
568     const double *x0 = arg[NLPSOL_X0];
569     const double *lam_x0 = arg[NLPSOL_LAM_X0];
570     const double *lam_g0 = arg[NLPSOL_LAM_G0];
571     arg += NLPSOL_NUM_IN;
572 
573     // Get output pointers
574     double *x = res[NLPSOL_X];
575     double *f = res[NLPSOL_F];
576     double *g = res[NLPSOL_G];
577     double *lam_x = res[NLPSOL_LAM_X];
578     double *lam_g = res[NLPSOL_LAM_G];
579     double *lam_p = res[NLPSOL_LAM_P];
580     res += NLPSOL_NUM_OUT;
581 
582     // Reset the solver, prepare for solution
583     setup(m, arg, res, iw, w);
584 
585     // Set initial guess
586     casadi_copy(x0, nx_, d_nlp->z);
587     casadi_copy(lam_x0, nx_, d_nlp->lam);
588     casadi_copy(lam_g0, ng_, d_nlp->lam + nx_);
589 
590     // Set multipliers to nan
591     casadi_fill(d_nlp->lam_p, np_, nan);
592 
593     // Reset f, g
594     d_nlp->f = nan;
595     casadi_fill(d_nlp->z + nx_, ng_, nan);
596 
597     // Get bounds
598     casadi_copy(lbx, nx_, d_nlp->lbz);
599     casadi_copy(lbg, ng_, d_nlp->lbz + nx_);
600     casadi_copy(ubx, nx_, d_nlp->ubz);
601     casadi_copy(ubg, ng_, d_nlp->ubz + nx_);
602 
603     // Check the provided inputs
604     check_inputs(m);
605 
606     // Solve the NLP
607     int flag = solve(m);
608 
609     // Calculate multiplers
610     if ((calc_f_ || calc_g_ || calc_lam_x_ || calc_lam_p_) && !flag) {
611       const double lam_f = 1.;
612       m->arg[0] = d_nlp->z;
613       m->arg[1] = d_nlp->p;
614       m->arg[2] = &lam_f;
615       m->arg[3] = d_nlp->lam + nx_;
616       m->res[0] = calc_f_ ? &d_nlp->f : nullptr;
617       m->res[1] = calc_g_ ? d_nlp->z + nx_ : nullptr;
618       m->res[2] = calc_lam_x_ ? d_nlp->lam : nullptr;
619       m->res[3] = calc_lam_p_ ? d_nlp->lam_p : nullptr;
620       if (calc_function(m, "nlp_grad")) {
621         casadi_warning("Failed to calculate multipliers");
622       }
623       if (calc_lam_x_) casadi_scal(nx_, -1., d_nlp->lam);
624       if (calc_lam_p_) casadi_scal(np_, -1., d_nlp->lam_p);
625     }
626 
627     // Make sure that an optimal solution is consistant with bounds
628     if (bound_consistency_ && !flag) {
629       bound_consistency(nx_+ng_, d_nlp->z, d_nlp->lam, d_nlp->lbz, d_nlp->ubz);
630     }
631 
632     // Get optimal solution
633     casadi_copy(d_nlp->z, nx_, x);
634     casadi_copy(d_nlp->z + nx_, ng_, g);
635     casadi_copy(d_nlp->lam, nx_, lam_x);
636     casadi_copy(d_nlp->lam + nx_, ng_, lam_g);
637     casadi_copy(d_nlp->lam_p, np_, lam_p);
638     casadi_copy(&d_nlp->f, 1, f);
639 
640     if (error_on_fail_ && !m->success)
641       casadi_error("nlpsol process failed. "
642                    "Set 'error_on_fail' option to false to ignore this error.");
643     return flag;
644   }
645 
set_work(void * mem,const double ** & arg,double ** & res,casadi_int * & iw,double * & w) const646   void Nlpsol::set_work(void* mem, const double**& arg, double**& res,
647                         casadi_int*& iw, double*& w) const {
648     auto m = static_cast<NlpsolMemory*>(mem);
649 
650     // Problem has not been solved at this point
651     m->success = false;
652     m->unified_return_status = SOLVER_RET_UNKNOWN;
653 
654     m->d_nlp.prob = &p_nlp_;
655 
656     casadi_nlpsol_init(&m->d_nlp, &iw, &w);
657   }
658 
nlpsol_options(const std::string & name)659   std::vector<std::string> nlpsol_options(const std::string& name) {
660     return Nlpsol::plugin_options(name).all();
661   }
662 
nlpsol_option_type(const std::string & name,const std::string & op)663   std::string nlpsol_option_type(const std::string& name, const std::string& op) {
664     return Nlpsol::plugin_options(name).type(op);
665   }
666 
nlpsol_option_info(const std::string & name,const std::string & op)667   std::string nlpsol_option_info(const std::string& name, const std::string& op) {
668     return Nlpsol::plugin_options(name).info(op);
669   }
670 
disp_more(std::ostream & stream) const671   void Nlpsol::disp_more(std::ostream& stream) const {
672     stream << "minimize f(x;p) subject to lbx<=x<=ubx, lbg<=g(x;p)<=ubg defined by:\n";
673     oracle_.disp(stream, true);
674   }
675 
kkt() const676   Function Nlpsol::kkt() const {
677     // Quick return if cached
678     if (kkt_.alive()) {
679       return shared_cast<Function>(kkt_.shared());
680     }
681 
682     // Generate KKT function
683     Function ret = oracle_.factory("kkt", {"x", "p", "lam:f", "lam:g"},
684       {"jac:g:x", "sym:hess:gamma:x:x"}, {{"gamma", {"f", "g"}}});
685 
686     // Cache and return
687     kkt_ = ret;
688     return ret;
689   }
690 
691 
692   Function Nlpsol::
get_forward(casadi_int nfwd,const std::string & name,const std::vector<std::string> & inames,const std::vector<std::string> & onames,const Dict & opts) const693   get_forward(casadi_int nfwd, const std::string& name,
694               const std::vector<std::string>& inames,
695               const std::vector<std::string>& onames,
696               const Dict& opts) const {
697 
698     // Symbolic expression for the input
699     vector<MX> arg = mx_in(), res = mx_out();
700 
701     // Initial guesses not used for derivative calculations
702     for (NlpsolInput i : {NLPSOL_X0, NLPSOL_LAM_X0, NLPSOL_LAM_G0}) {
703       std::string name = arg[i].is_symbolic() ? arg[i].name() : "temp";
704       arg[i] = MX::sym(name, Sparsity(arg[i].size()));
705     }
706 
707     // Optimal solution
708     MX x = res[NLPSOL_X];
709     MX lam_g = res[NLPSOL_LAM_G];
710     MX lam_x = res[NLPSOL_LAM_X];
711     MX lam_p = res[NLPSOL_LAM_P];
712     MX f = res[NLPSOL_F];
713     MX g = res[NLPSOL_G];
714 
715     // Inputs used
716     MX lbx = arg[NLPSOL_LBX];
717     MX ubx = arg[NLPSOL_UBX];
718     MX lbg = arg[NLPSOL_LBG];
719     MX ubg = arg[NLPSOL_UBG];
720     MX p = arg[NLPSOL_P];
721 
722     // Get KKT function
723     Function kkt = this->kkt();
724 
725     // Hessian of the Lagrangian, Jacobian of the constraints
726     vector<MX> HJ_res = kkt({x, p, 1, lam_g});
727     MX JG = HJ_res.at(0);
728     MX HL = HJ_res.at(1);
729 
730     // Active set (assumed known and given by the multiplier signs)
731     MX ubIx = lam_x > min_lam_;
732     MX lbIx = lam_x < -min_lam_;
733     MX bIx = ubIx + lbIx;
734     MX iIx = 1-bIx;
735     MX ubIg = lam_g > min_lam_;
736     MX lbIg = lam_g < -min_lam_;
737     MX bIg = ubIg + lbIg;
738     MX iIg = 1-bIg;
739 
740     // KKT matrix
741     MX H_11 = mtimes(diag(iIx), HL) + diag(bIx);
742     MX H_12 = mtimes(diag(iIx), JG.T());
743     MX H_21 = mtimes(diag(bIg), JG);
744     MX H_22 = diag(-iIg);
745     MX H = MX::blockcat({{H_11, H_12}, {H_21, H_22}});
746 
747     // Sensitivity inputs
748     vector<MX> fseed(NLPSOL_NUM_IN);
749     MX fwd_lbx = fseed[NLPSOL_LBX] = MX::sym("fwd_lbx", repmat(x.sparsity(), 1, nfwd));
750     MX fwd_ubx = fseed[NLPSOL_UBX] = MX::sym("fwd_ubx", repmat(x.sparsity(), 1, nfwd));
751     MX fwd_lbg = fseed[NLPSOL_LBG] = MX::sym("fwd_lbg", repmat(g.sparsity(), 1, nfwd));
752     MX fwd_ubg = fseed[NLPSOL_UBG] = MX::sym("fwd_ubg", repmat(g.sparsity(), 1, nfwd));
753     MX fwd_p = fseed[NLPSOL_P] = MX::sym("fwd_p", repmat(p.sparsity(), 1, nfwd));
754 
755     // Guesses are unused
756     for (NlpsolInput i : {NLPSOL_X0, NLPSOL_LAM_X0, NLPSOL_LAM_G0}) {
757       fseed[i] = MX(repmat(Sparsity(arg[i].size()), 1, nfwd));
758     }
759 
760     // nlp_grad has the signature
761     // (x, p, lam_f, lam_g) -> (f, g, grad_x, grad_p)
762     // with lam_f=1 and lam_g=lam_g, grad_x = -lam_x, grad_p=-lam_p
763     Function nlp_grad = get_function("nlp_grad");
764 
765     // fwd_nlp_grad has the signature
766     // (x, p, lam_f, lam_g, f, g, grad_x, grad_p,
767     //  fwd_x, fwd_p, fwd_lam_f, fwd_lam_g)
768     // -> (fwd_f, fwd_g, fwd_grad_x, fwd_grad_p)
769     Function fwd_nlp_grad = nlp_grad.forward(nfwd);
770 
771     // Calculate sensitivities from fwd_p
772     vector<MX> vv = {x, p, 1, lam_g, f, g, -lam_x, -lam_p, 0., fwd_p, 0., 0.};
773     vv = fwd_nlp_grad(vv);
774     MX fwd_g_p = vv.at(1);
775     MX fwd_gL_p = vv.at(2);
776 
777     // Propagate forward seeds
778     MX fwd_alpha_x = (if_else(lbIx, fwd_lbx, 0) + if_else(ubIx, fwd_ubx, 0))
779                    - if_else(iIx, fwd_gL_p, 0);
780     MX fwd_alpha_g = (if_else(ubIg, fwd_ubg, 0) + if_else(lbIg, fwd_lbg, 0))
781                    - if_else(bIg, fwd_g_p, 0);
782     MX v = MX::vertcat({fwd_alpha_x, fwd_alpha_g});
783 
784     // Solve
785     v = MX::solve(H, v, sens_linsol_, sens_linsol_options_);
786 
787     // Extract sensitivities in x, lam_x and lam_g
788     vector<MX> v_split = vertsplit(v, {0, nx_, nx_+ng_});
789     MX fwd_x = v_split.at(0);
790     MX fwd_lam_g = v_split.at(1);
791 
792     // Calculate sensitivities in lam_x, lam_g
793     vv = {x, p, 1, lam_g, f, g, -lam_x, -lam_p,
794           fwd_x, fwd_p, 0, fwd_lam_g};
795     vv = fwd_nlp_grad(vv);
796     MX fwd_f = vv.at(0);
797     MX fwd_g = vv.at(1);
798     MX fwd_lam_x = -vv.at(2);
799     MX fwd_lam_p = -vv.at(3);
800 
801     // Forward sensitivities
802     vector<MX> fsens(NLPSOL_NUM_OUT);
803     fsens[NLPSOL_X] = fwd_x;
804     fsens[NLPSOL_F] = fwd_f;
805     fsens[NLPSOL_G] = fwd_g;
806     fsens[NLPSOL_LAM_X] = fwd_lam_x;
807     fsens[NLPSOL_LAM_G] = fwd_lam_g;
808     fsens[NLPSOL_LAM_P] = fwd_lam_p;
809 
810     // Gather return values
811     arg.insert(arg.end(), res.begin(), res.end());
812     arg.insert(arg.end(), fseed.begin(), fseed.end());
813     res = fsens;
814 
815     return Function(name, arg, res, inames, onames, opts);
816   }
817 
818   Function Nlpsol::
get_reverse(casadi_int nadj,const std::string & name,const std::vector<std::string> & inames,const std::vector<std::string> & onames,const Dict & opts) const819   get_reverse(casadi_int nadj, const std::string& name,
820               const std::vector<std::string>& inames,
821               const std::vector<std::string>& onames,
822               const Dict& opts) const {
823     // Symbolic expression for the input
824     vector<MX> arg = mx_in(), res = mx_out();
825 
826     // Initial guesses not used for derivative calculations
827     for (NlpsolInput i : {NLPSOL_X0, NLPSOL_LAM_X0, NLPSOL_LAM_G0}) {
828       std::string name = arg[i].is_symbolic() ? arg[i].name() : "temp";
829       arg[i] = MX::sym(name, Sparsity(arg[i].size()));
830     }
831 
832     // Optimal solution
833     MX x = res[NLPSOL_X];
834     MX lam_g = res[NLPSOL_LAM_G];
835     MX lam_x = res[NLPSOL_LAM_X];
836     MX lam_p = res[NLPSOL_LAM_P];
837     MX f = res[NLPSOL_F];
838     MX g = res[NLPSOL_G];
839 
840     // Inputs used
841     MX lbx = arg[NLPSOL_LBX];
842     MX ubx = arg[NLPSOL_UBX];
843     MX lbg = arg[NLPSOL_LBG];
844     MX ubg = arg[NLPSOL_UBG];
845     MX p = arg[NLPSOL_P];
846 
847     // Get KKT function
848     Function kkt = this->kkt();
849 
850     // Hessian of the Lagrangian, Jacobian of the constraints
851     vector<MX> HJ_res = kkt({x, p, 1, lam_g});
852     MX JG = HJ_res.at(0);
853     MX HL = HJ_res.at(1);
854 
855     // Active set (assumed known and given by the multiplier signs)
856     MX ubIx = lam_x > min_lam_;
857     MX lbIx = lam_x < -min_lam_;
858     MX bIx = ubIx + lbIx;
859     MX iIx = 1-bIx;
860     MX ubIg = lam_g > min_lam_;
861     MX lbIg = lam_g < -min_lam_;
862     MX bIg = ubIg + lbIg;
863     MX iIg = 1-bIg;
864 
865     // KKT matrix
866     MX H_11 = mtimes(diag(iIx), HL) + diag(bIx);
867     MX H_12 = mtimes(diag(iIx), JG.T());
868     MX H_21 = mtimes(diag(bIg), JG);
869     MX H_22 = diag(-iIg);
870     MX H = MX::blockcat({{H_11, H_12}, {H_21, H_22}});
871 
872     // Sensitivity inputs
873     vector<MX> aseed(NLPSOL_NUM_OUT);
874     MX adj_x = aseed[NLPSOL_X] = MX::sym("adj_x", repmat(x.sparsity(), 1, nadj));
875     MX adj_lam_g = aseed[NLPSOL_LAM_G] = MX::sym("adj_lam_g", repmat(g.sparsity(), 1, nadj));
876     MX adj_lam_x = aseed[NLPSOL_LAM_X] = MX::sym("adj_lam_x", repmat(x.sparsity(), 1, nadj));
877     MX adj_lam_p = aseed[NLPSOL_LAM_P] = MX::sym("adj_lam_p", repmat(p.sparsity(), 1, nadj));
878     MX adj_f = aseed[NLPSOL_F] = MX::sym("adj_f", Sparsity::dense(1, nadj));
879     MX adj_g = aseed[NLPSOL_G] = MX::sym("adj_g", repmat(g.sparsity(), 1, nadj));
880 
881     // nlp_grad has the signature
882     // (x, p, lam_f, lam_g) -> (f, g, grad_x, grad_p)
883     // with lam_f=1 and lam_g=lam_g, grad_x = -lam_x, grad_p=-lam_p
884     Function nlp_grad = get_function("nlp_grad");
885 
886     // rev_nlp_grad has the signature
887     // (x, p, lam_f, lam_g, f, g, grad_x, grad_p,
888     //  adj_f, adj_g, adj_grad_x, adj_grad_p)
889     // -> (adj_x, adj_p, adj_lam_f, adj_lam_g)
890     Function rev_nlp_grad = nlp_grad.reverse(nadj);
891 
892     // Calculate sensitivities from f, g and lam_x
893     vector<MX> vv = {x, p, 1, lam_g, f, g, -lam_x, -lam_p,
894                      adj_f, adj_g, -adj_lam_x, -adj_lam_p};
895     vv = rev_nlp_grad(vv);
896     MX adj_x0 = vv.at(0);
897     MX adj_p0 = vv.at(1);
898     MX adj_lam_g0 = vv.at(3);
899 
900     // Solve to get beta_x_bar, beta_g_bar
901     MX v = MX::vertcat({adj_x + adj_x0, adj_lam_g + adj_lam_g0});
902     v = MX::solve(H.T(), v, sens_linsol_, sens_linsol_options_);
903     vector<MX> v_split = vertsplit(v, {0, nx_, nx_+ng_});
904     MX beta_x_bar = v_split.at(0);
905     MX beta_g_bar = v_split.at(1);
906 
907     // Calculate sensitivities in p
908     vv = {x, p, 1, lam_g, f, g, -lam_x, -lam_p,
909           0, bIg*beta_g_bar, iIx*beta_x_bar, 0};
910     vv = rev_nlp_grad(vv);
911     MX adj_p = vv.at(1);
912 
913     // Reverse sensitivities
914     vector<MX> asens(NLPSOL_NUM_IN);
915     asens[NLPSOL_UBX] = if_else(ubIx, beta_x_bar, 0);
916     asens[NLPSOL_LBX] = if_else(lbIx, beta_x_bar, 0);
917     asens[NLPSOL_UBG] = if_else(ubIg, beta_g_bar, 0);
918     asens[NLPSOL_LBG] = if_else(lbIg, beta_g_bar, 0);
919     asens[NLPSOL_P] = adj_p0 - adj_p;
920 
921     // Guesses are unused
922     for (NlpsolInput i : {NLPSOL_X0, NLPSOL_LAM_X0, NLPSOL_LAM_G0}) {
923       asens[i] = MX(repmat(Sparsity(arg[i].size()), 1, nadj));
924     }
925 
926     // Gather return values
927     arg.insert(arg.end(), res.begin(), res.end());
928     arg.insert(arg.end(), aseed.begin(), aseed.end());
929     res = asens;
930 
931     return Function(name, arg, res, inames, onames, opts);
932   }
933 
callback(NlpsolMemory * m) const934   int Nlpsol::callback(NlpsolMemory* m) const {
935     // Quick return if no callback function
936     if (fcallback_.is_null()) return 0;
937     // Callback inputs
938     fill_n(m->arg, fcallback_.n_in(), nullptr);
939 
940     auto d_nlp = &m->d_nlp;
941 
942     m->arg[NLPSOL_X] = d_nlp->z;
943     m->arg[NLPSOL_F] = &d_nlp->f;
944     m->arg[NLPSOL_G] = d_nlp->z + nx_;
945     m->arg[NLPSOL_LAM_G] = d_nlp->lam + nx_;
946     m->arg[NLPSOL_LAM_X] = d_nlp->lam;
947 
948     // Callback outputs
949     fill_n(m->res, fcallback_.n_out(), nullptr);
950     double ret = 0;
951     m->res[0] = &ret;
952 
953     // Start timer
954     m->fstats.at("callback_fun").tic();
955     try {
956       // Evaluate
957       fcallback_(m->arg, m->res, m->iw, m->w, 0);
958     } catch(KeyboardInterruptException& ex) {
959       throw;
960     } catch(exception& ex) {
961       print("WARNING: intermediate_callback error: %s\n", ex.what());
962       if (!iteration_callback_ignore_errors_) ret=1;
963     }
964 
965     // User user interruption?
966     if (static_cast<casadi_int>(ret)) return 1;
967 
968     // Stop timer
969     m->fstats.at("callback_fun").toc();
970 
971     return 0;
972   }
973 
get_stats(void * mem) const974   Dict Nlpsol::get_stats(void* mem) const {
975     Dict stats = OracleFunction::get_stats(mem);
976     auto m = static_cast<NlpsolMemory*>(mem);
977     stats["success"] = m->success;
978     stats["unified_return_status"] = string_from_UnifiedReturnStatus(m->unified_return_status);
979     return stats;
980   }
981 
nlpsol_codegen_body(CodeGenerator & g) const982   void Nlpsol::nlpsol_codegen_body(CodeGenerator& g) const {
983     g.local("d_nlp", "struct casadi_nlpsol_data");
984     g.local("p_nlp", "struct casadi_nlpsol_prob");
985 
986     g << "d_nlp.prob = &p_nlp;\n";
987     g << "p_nlp.nx = " << nx_ << ";\n";
988     g << "p_nlp.ng = " << ng_ << ";\n";
989     g << "p_nlp.np = " << np_ << ";\n";
990     g << "casadi_nlpsol_init(&d_nlp, &iw, &w);\n";
991   }
992 
serialize_body(SerializingStream & s) const993   void Nlpsol::serialize_body(SerializingStream &s) const {
994     OracleFunction::serialize_body(s);
995 
996     s.version("Nlpsol", 2);
997     s.pack("Nlpsol::nx", nx_);
998     s.pack("Nlpsol::ng", ng_);
999     s.pack("Nlpsol::np", np_);
1000     s.pack("Nlpsol::fcallback", fcallback_);
1001     s.pack("Nlpsol::callback_step", callback_step_);
1002     s.pack("Nlpsol::error_on_fail", error_on_fail_);
1003     s.pack("Nlpsol::eval_errors_fatal", eval_errors_fatal_);
1004     s.pack("Nlpsol::warn_initial_bounds", warn_initial_bounds_);
1005     s.pack("Nlpsol::iteration_callback_ignore_errors", iteration_callback_ignore_errors_);
1006     s.pack("Nlpsol::calc_multipliers", calc_multipliers_);
1007     s.pack("Nlpsol::calc_lam_x", calc_lam_x_);
1008     s.pack("Nlpsol::calc_lam_p", calc_lam_p_);
1009     s.pack("Nlpsol::calc_f", calc_f_);
1010     s.pack("Nlpsol::calc_g", calc_g_);
1011     s.pack("Nlpsol::min_lam", min_lam_);
1012     s.pack("Nlpsol::bound_consistency", bound_consistency_);
1013     s.pack("Nlpsol::no_nlp_grad", no_nlp_grad_);
1014     s.pack("Nlpsol::discrete", discrete_);
1015     s.pack("Nlpsol::mi", mi_);
1016     s.pack("Nlpsol::sens_linsol", sens_linsol_);
1017     s.pack("Nlpsol::sens_linsol_options", sens_linsol_options_);
1018   }
1019 
serialize_type(SerializingStream & s) const1020   void Nlpsol::serialize_type(SerializingStream &s) const {
1021     OracleFunction::serialize_type(s);
1022     PluginInterface<Nlpsol>::serialize_type(s);
1023   }
1024 
deserialize(DeserializingStream & s)1025   ProtoFunction* Nlpsol::deserialize(DeserializingStream& s) {
1026     return PluginInterface<Nlpsol>::deserialize(s);
1027   }
1028 
Nlpsol(DeserializingStream & s)1029   Nlpsol::Nlpsol(DeserializingStream & s) : OracleFunction(s) {
1030     int version = s.version("Nlpsol", 1, 2);
1031     s.unpack("Nlpsol::nx", nx_);
1032     s.unpack("Nlpsol::ng", ng_);
1033     s.unpack("Nlpsol::np", np_);
1034     s.unpack("Nlpsol::fcallback", fcallback_);
1035     s.unpack("Nlpsol::callback_step", callback_step_);
1036     s.unpack("Nlpsol::error_on_fail", error_on_fail_);
1037     s.unpack("Nlpsol::eval_errors_fatal", eval_errors_fatal_);
1038     s.unpack("Nlpsol::warn_initial_bounds", warn_initial_bounds_);
1039     s.unpack("Nlpsol::iteration_callback_ignore_errors", iteration_callback_ignore_errors_);
1040     s.unpack("Nlpsol::calc_multipliers", calc_multipliers_);
1041     s.unpack("Nlpsol::calc_lam_x", calc_lam_x_);
1042     s.unpack("Nlpsol::calc_lam_p", calc_lam_p_);
1043     s.unpack("Nlpsol::calc_f", calc_f_);
1044     s.unpack("Nlpsol::calc_g", calc_g_);
1045     s.unpack("Nlpsol::min_lam", min_lam_);
1046     s.unpack("Nlpsol::bound_consistency", bound_consistency_);
1047     s.unpack("Nlpsol::no_nlp_grad", no_nlp_grad_);
1048     s.unpack("Nlpsol::discrete", discrete_);
1049     s.unpack("Nlpsol::mi", mi_);
1050     if (version>=2) {
1051       s.unpack("Nlpsol::sens_linsol", sens_linsol_);
1052       s.unpack("Nlpsol::sens_linsol_options", sens_linsol_options_);
1053     } else {
1054       sens_linsol_ = "qr";
1055     }
1056     set_nlpsol_prob();
1057   }
1058 
1059 } // namespace casadi
1060