1 /*
2 * This file is part of CasADi.
3 *
4 * CasADi -- A symbolic framework for dynamic optimization.
5 * Copyright (C) 2010-2014 Joel Andersson, Joris Gillis, Moritz Diehl,
6 * K.U. Leuven. All rights reserved.
7 * Copyright (C) 2011-2014 Greg Horn
8 *
9 * CasADi is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Lesser General Public
11 * License as published by the Free Software Foundation; either
12 * version 3 of the License, or (at your option) any later version.
13 *
14 * CasADi is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Lesser General Public License for more details.
18 *
19 * You should have received a copy of the GNU Lesser General Public
20 * License along with CasADi; if not, write to the Free Software
21 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22 *
23 */
24
25 #include "optistack_internal.hpp"
26 #include "nlpsol.hpp"
27 #include "conic.hpp"
28 #include "function_internal.hpp"
29 #include "global_options.hpp"
30
31 using namespace std;
32 namespace casadi {
33
34 class InternalOptiCallback : public FunctionInternal {
35 public:
36
InternalOptiCallback(OptiNode & sol)37 InternalOptiCallback(OptiNode& sol) : FunctionInternal(class_name()), sol_(sol) {}
38
~InternalOptiCallback()39 ~InternalOptiCallback() override {
40 clear_mem();
41 }
42
43 /** \brief Get type name */
class_name() const44 std::string class_name() const override {return "InternalOptiCallback";}
45
46 // Number of inputs and outputs
get_n_in()47 size_t get_n_in() override { return nlpsol_n_out();}
48
get_sparsity_in(casadi_int i)49 Sparsity get_sparsity_in(casadi_int i) override {
50 std::string n = nlpsol_out(i);
51 casadi_int size = 0;
52 if (n=="f") {
53 size = 1;
54 } else if (n=="lam_x" || n=="x") {
55 size = sol_.nx();
56 } else if (n=="lam_g" || n=="g") {
57 size = sol_.ng();
58 } else if (n=="p" || n=="lam_p") {
59 size = sol_.np();
60 if (size==0) return Sparsity::dense(0, 0);
61 } else {
62 return Sparsity::dense(0, 0);
63 }
64 return Sparsity::dense(size, 1);
65 }
66
reset()67 void reset() { i=0; }
68
69 // eval_dm has been defined instead of eval
has_eval_dm() const70 bool has_eval_dm() const override { return true;}
71
72 /// Evaluate the function numerically
eval_dm(const std::vector<DM> & arg) const73 std::vector<DM> eval_dm(const std::vector<DM>& arg) const override {
74 DMDict r;
75
76 for (casadi_int i=0;i<nlpsol_n_out();++i) {
77 r[nlpsol_out(i)] = arg[i];
78 }
79
80 sol_.res(r);
81
82 if (sol_.user_callback_) sol_.user_callback_->call(i);
83
84 i+=1;
85 return {0};
86 }
87
associated_with(const OptiNode * o)88 bool associated_with(const OptiNode* o) { return &sol_==o; }
89
90 private:
91 OptiNode& sol_;
92 mutable casadi_int i;
93 };
94
create(const std::string & problem_type)95 OptiNode* OptiNode::create(const std::string& problem_type) {
96 return new OptiNode(problem_type);
97 }
98
99
callback_class(OptiCallback * callback)100 void OptiNode::callback_class(OptiCallback* callback) {
101 user_callback_ = callback;
102 }
103
callback_class()104 void OptiNode::callback_class() {
105 user_callback_ = nullptr;
106 }
107
has_callback_class() const108 bool OptiNode::has_callback_class() const {
109 return user_callback_ != 0;
110 }
111
format_stacktrace(const Dict & stacktrace,casadi_int indent)112 std::string OptiNode::format_stacktrace(const Dict& stacktrace, casadi_int indent) {
113 std::string s_indent;
114 for (casadi_int i=0;i<indent;++i) {
115 s_indent+= " ";
116 }
117 std::string description;
118 std::string filename = stacktrace.at("file").as_string();
119 casadi_int line = stacktrace.at("line").as_int();
120 description += "defined at " + filename +":"+str(line);
121 std::string name = stacktrace.at("name").as_string();
122 if (name!="Unknown" && name!= "<module>")
123 description += " in " + stacktrace.at("name").as_string();
124 try {
125 ifstream file(filename);
126 for (casadi_int i=0; i<line-1; ++i) {
127 file.ignore(std::numeric_limits<std::streamsize>::max(), '\n');
128 }
129 std::string contents; std::getline(file, contents);
130 auto it = contents.find_first_not_of(" \n");
131 if (it!=std::string::npos) {
132 description += "\n" + s_indent + contents.substr(it);
133 }
134 } catch(...) {
135 // pass
136 }
137 return description;
138 }
139
describe(const MX & expr,casadi_int indent) const140 std::string OptiNode::describe(const MX& expr, casadi_int indent) const {
141 if (problem_dirty()) return baked_copy().describe(expr, indent);
142 std::string s_indent;
143 for (casadi_int i=0;i<indent;++i) {
144 s_indent+= " ";
145 }
146 std::string description = s_indent;
147 if (expr.is_symbolic()) {
148 if (has(expr)) {
149 description += "Opti " + variable_type_to_string(meta(expr).type) + " '" + expr.name() +
150 "' of shape " + expr.dim();
151 const Dict& extra = meta(expr).extra;
152 auto it = extra.find("stacktrace");
153 if (it!=extra.end()) {
154 const Dict& stacktrace = it->second.as_dict();
155 description += ", " + format_stacktrace(stacktrace, indent+1);
156 }
157 } else {
158 VariableType vt;
159 if (parse_opti_name(expr.name(), vt)) {
160 description += "Opti " + variable_type_to_string(vt) + " '" + expr.name() +
161 "' of shape " + expr.dim()+
162 ", belonging to a different instance of Opti.";
163 } else {
164 description += "MX symbol '" + expr.name() + "' of shape " + expr.dim();
165 description += ", declared outside of Opti.";
166 }
167 }
168 } else {
169 if (has_con(expr)) {
170 description = "Opti constraint of shape " + expr.dim();
171 const Dict& extra = meta_con(expr).extra;
172 auto it = extra.find("stacktrace");
173 if (it!=extra.end()) {
174 const Dict& stacktrace = it->second.as_dict();
175 description += ", " + format_stacktrace(stacktrace, indent+1);
176 }
177 } else {
178 std::vector<MX> s = symvar(expr);
179 if (s.empty()) {
180 description+= "Constant epxression.";
181 } else {
182 description+= "General expression, dependent on " + str(s.size()) + " symbols:";
183 for (casadi_int i=0;i<s.size();++i) {
184 description+= "\n"+describe(s[i], indent+1);
185 if (i>5) {
186 description+= "\n...";
187 break;
188 }
189 }
190 }
191 }
192 }
193
194 return description;
195 }
196
g_describe(casadi_int i) const197 std::string OptiNode::g_describe(casadi_int i) const {
198 if (problem_dirty()) return baked_copy().g_describe(i);
199 MX expr = g_lookup(i);
200 casadi_int local_i = i-meta_con(expr).start + GlobalOptions::start_index;
201 std::string description = describe(expr);
202 if (expr.numel()>1)
203 description += "\nAt nonzero " + str(local_i) + ".";
204 return description;
205 }
206
x_describe(casadi_int i) const207 std::string OptiNode::x_describe(casadi_int i) const {
208 if (problem_dirty()) return baked_copy().x_describe(i);
209 MX symbol = x_lookup(i);
210 casadi_int local_i = i-meta(symbol).start + GlobalOptions::start_index;
211 std::string description = describe(symbol);
212 if (symbol.numel()>1)
213 description += "\nAt nonzero " + str(local_i) + ".";
214 return description;
215 }
216
x_lookup(casadi_int i) const217 MX OptiNode::x_lookup(casadi_int i) const {
218 if (problem_dirty()) return baked_copy().x_lookup(i);
219 casadi_assert_dev(i>=0);
220 casadi_assert_dev(i<nx());
221 std::vector<MX> x = active_symvar(OPTI_VAR);
222 for (const auto& e : x) {
223 const MetaVar& m = meta(e);
224 if (i>=m.start && i<m.stop) return e;
225 }
226 casadi_error("Internal error");
227 return MX();
228 }
229
g_lookup(casadi_int i) const230 MX OptiNode::g_lookup(casadi_int i) const {
231 if (problem_dirty()) return baked_copy().g_lookup(i);
232 casadi_assert_dev(i>=0);
233 casadi_assert_dev(i<ng());
234 for (const auto& e : g_) {
235 const MetaCon& m = meta_con(e);
236 if (i>=m.start && i<m.stop) return e;
237 }
238 casadi_error("Internal error");
239 return MX();
240 }
241
OptiNode(const std::string & problem_type)242 OptiNode::OptiNode(const std::string& problem_type) :
243 count_(0), count_var_(0), count_par_(0), count_dual_(0) {
244 f_ = 0;
245 instance_number_ = instance_count_++;
246 user_callback_ = nullptr;
247 store_initial_[OPTI_VAR] = {};
248 store_initial_[OPTI_PAR] = {};
249 store_initial_[OPTI_DUAL_G] = {};
250 store_latest_[OPTI_VAR] = {};
251 store_latest_[OPTI_DUAL_G] = {};
252 casadi_assert(problem_type=="nlp" || problem_type=="conic",
253 "Specified problem type '" + problem_type + "'unknown. "
254 "Choose 'nlp' (default) or 'conic'.");
255 problem_type_ = problem_type;
256 mark_problem_dirty();
257 }
258
~OptiNode()259 OptiNode::~OptiNode() {
260 }
261
variable(casadi_int n,casadi_int m,const std::string & attribute)262 MX OptiNode::variable(casadi_int n, casadi_int m, const std::string& attribute) {
263
264 // Prepare metadata
265 MetaVar meta_data;
266 meta_data.attribute = attribute;
267 meta_data.n = n;
268 meta_data.m = m;
269 meta_data.type = OPTI_VAR;
270 meta_data.count = count_++;
271 meta_data.i = count_var_++;
272
273 MX symbol, ret;
274
275 if (attribute=="symmetric") {
276 casadi_assert(n==m, "You specified attribute 'symmetric', "
277 "while matrix is not even square, but " + str(n) + "-by-" + str(m) + ".");
278 symbol = MX::sym(name_prefix() + "x_" + str(count_var_), n*(n+1)/2);
279 ret = tril2symm(MX(Sparsity::lower(n), symbol));
280 } else if (attribute=="full") {
281 symbol = MX::sym(name_prefix() + "x_" + str(count_var_), n, m);
282 ret = symbol;
283 } else {
284 casadi_error("Unknown attribute '" + attribute + "'. Choose from 'full' or 'symmetric'.");
285 }
286
287 // Store the symbol; preventing it from going ut of scope
288 symbols_.push_back(symbol);
289 store_initial_[OPTI_VAR].push_back(DM::zeros(symbol.sparsity()));
290 store_latest_[OPTI_VAR].push_back(DM::nan(symbol.sparsity()));
291
292 set_meta(symbol, meta_data);
293 return ret;
294 }
295
name_prefix() const296 std::string OptiNode::name_prefix() const {
297 return "opti" + str(instance_number_) + "_";
298 }
299
copy() const300 Opti OptiNode::copy() const {
301 return Opti::create(new OptiNode(*this));
302 }
303
register_dual(MetaCon & c)304 void OptiNode::register_dual(MetaCon& c) {
305
306 // Prepare metadata
307 MetaVar meta_data;
308 meta_data.attribute = "full";
309 meta_data.n = c.canon.size1();
310 meta_data.m = c.canon.size2();;
311 meta_data.type = OPTI_DUAL_G;
312 meta_data.count = count_++;
313 meta_data.i = count_dual_++;
314
315 MX symbol, ret;
316 if (c.type==OPTI_PSD) {
317 symbol = MX();
318 ret = MX();
319 } else {
320 symbol = MX::sym(name_prefix()+"lam_g_"+str(count_dual_), c.canon.sparsity());
321
322 casadi_assert_dev(c.canon.is_dense());
323
324 Sparsity ret_sp = repmat(c.original.sparsity(), 1, c.n);
325
326 casadi_int N = c.canon.sparsity().nnz();
327
328 MX flat = vec(symbol);
329 if (c.type==OPTI_DOUBLE_INEQUALITY) {
330 MX v = MX::sym("v");
331 MX decide_left_right = vertcat(if_else_zero(v<0, -v), if_else_zero(v>=0, v));
332 Function sign = Function("sign", {v}, {decide_left_right});
333 Function sign_map = sign.map(c.canon.sparsity().nnz());
334 ret = MX(ret_sp, sign_map((c.flipped ? -1 : 1)*flat)[0].T());
335 } else {
336 casadi_int block_size = N / c.n;
337 std::vector<MX> original_blocks = vertsplit(fabs(flat), block_size);
338 std::vector<MX> blocks(N);
339 for (casadi_int i=0;i<c.n;++i) {
340 casadi_int p = c.flipped? c.n-i-1: i;
341 blocks[p] = original_blocks[i];
342 }
343 ret = MX(ret_sp, vertcat(blocks));
344 }
345 }
346
347 symbols_.push_back(symbol);
348 store_initial_[OPTI_DUAL_G].push_back(DM::zeros(symbol.sparsity()));
349 store_latest_[OPTI_DUAL_G].push_back(DM::nan(symbol.sparsity()));
350
351 c.dual = ret;
352 c.dual_canon = symbol;
353
354 set_meta(symbol, meta_data);
355 }
356
parameter(casadi_int n,casadi_int m,const std::string & attribute)357 MX OptiNode::parameter(casadi_int n, casadi_int m, const std::string& attribute) {
358 casadi_assert_dev(attribute=="full");
359
360 // Prepare metadata
361 MetaVar meta_data;
362 meta_data.attribute = attribute;
363 meta_data.n = n;
364 meta_data.m = m;
365 meta_data.type = OPTI_PAR;
366 meta_data.count = count_++;
367 meta_data.i = count_par_++;
368
369 MX symbol = MX::sym(name_prefix() + "p_" + str(count_par_), n, m);
370 symbols_.push_back(symbol);
371 store_initial_[OPTI_PAR].push_back(DM::nan(symbol.sparsity()));
372
373 set_meta(symbol, meta_data);
374 return symbol;
375 }
376
stats() const377 Dict OptiNode::stats() const {
378 assert_solved();
379 return solver_.stats();
380 }
381
return_status() const382 std::string OptiNode::return_status() const {
383 Dict mystats;
384 try {
385 mystats = stats();
386 } catch (...) {
387 //
388 }
389 if (mystats.find("return_status")!=mystats.end())
390 return mystats.at("return_status");
391 return "unknown";
392 }
393
return_success(bool accept_limit) const394 bool OptiNode::return_success(bool accept_limit) const {
395 Dict mystats;
396 try {
397 mystats = stats();
398 } catch (...) {
399 //
400 }
401 bool success = false;
402 if (mystats.find("success")!=mystats.end()) success = mystats.at("success");
403 if (!accept_limit) return success;
404
405 bool limited = false;
406 if (mystats.find("unified_return_status")!=mystats.end())
407 limited = mystats.at("unified_return_status")=="SOLVER_RET_LIMITED";
408 return success || limited;
409 }
410
casadi_solver() const411 Function OptiNode::casadi_solver() const {
412 return solver_;
413 }
414
set_meta(const MX & m,const MetaVar & meta)415 void OptiNode::set_meta(const MX& m, const MetaVar& meta) {
416 meta_[m.get()] = meta;
417 }
418
set_meta_con(const MX & m,const MetaCon & meta)419 void OptiNode::set_meta_con(const MX& m, const MetaCon& meta) {
420 meta_con_[m.get()] = meta;
421 }
422
update_user_dict(const MX & m,const Dict & meta)423 void OptiNode::update_user_dict(const MX& m, const Dict& meta) {
424 try {
425 MetaCon m_update = get_meta_con(m);
426 MetaVar m_update2 = get_meta(m_update.dual_canon);
427 for (const auto & it : meta) {
428 m_update.extra[it.first] = it.second;
429 m_update2.extra[it.first] = it.second;
430 }
431 set_meta_con(m, m_update);
432 set_meta(m_update.dual_canon, m_update2);
433 } catch (exception& e) {
434 for (const auto & s : MX::symvar(m)) {
435 MetaVar m_update = get_meta(s);
436 for (const auto & it : meta)
437 m_update.extra[it.first] = it.second;
438 set_meta(s, m_update);
439 }
440 }
441 }
442
user_dict(const MX & m) const443 Dict OptiNode::user_dict(const MX& m) const {
444 try {
445 MetaCon meta = get_meta_con(m);
446 return meta.extra;
447 } catch (exception& e) {
448 MetaVar meta = get_meta(m);
449 return meta.extra;
450 }
451 }
452
dual(const MX & m) const453 MX OptiNode::dual(const MX& m) const {
454 return meta_con(m).dual;
455 }
456
meta(const MX & m) const457 const MetaVar& OptiNode::meta(const MX& m) const {
458 assert_has(m);
459 auto find = meta_.find(m.get());
460 return find->second;
461 }
462
meta_con(const MX & m) const463 const MetaCon& OptiNode::meta_con(const MX& m) const {
464 assert_has_con(m);
465 auto find = meta_con_.find(m.get());
466 return find->second;
467 }
468
meta(const MX & m)469 MetaVar& OptiNode::meta(const MX& m) {
470 assert_has(m);
471 auto find = meta_.find(m.get());
472 return find->second;
473 }
474
meta_con(const MX & m)475 MetaCon& OptiNode::meta_con(const MX& m) {
476 assert_has_con(m);
477 auto find = meta_con_.find(m.get());
478 return find->second;
479 }
480
get_meta(const MX & m) const481 MetaVar OptiNode::get_meta(const MX& m) const {
482 return meta(m);
483 }
484
get_meta_con(const MX & m) const485 MetaCon OptiNode::get_meta_con(const MX& m) const {
486 return meta_con(m);
487 }
488
has(const MX & m) const489 bool OptiNode::has(const MX& m) const {
490 return meta_.find(m.get())!=meta_.end();
491 }
492
has_con(const MX & m) const493 bool OptiNode::has_con(const MX& m) const {
494 return meta_con_.find(m.get())!=meta_con_.end();
495 }
496
assert_has(const MX & m) const497 void OptiNode::assert_has(const MX& m) const {
498 if (!has(m)) {
499 VariableType vt;
500 casadi_assert(m.is_symbolic(), "Symbol expected, got expression.");
501 if (parse_opti_name(m.name(), vt)) {
502 casadi_error("Unknown: " + describe(m));
503 } else {
504 casadi_error("Unknown: " + describe(m) + "\n"
505 "Note: you cannot use a raw MX.sym in your Opti problem,"
506 " only if you package it in a CasADi Function.");
507 }
508 }
509 }
510
assert_has_con(const MX & m) const511 void OptiNode::assert_has_con(const MX& m) const {
512 casadi_assert(has_con(m), "Constraint not present in Opti stack.");
513 }
514
515 casadi_int OptiNode::instance_count_ = 0;
516
parse_opti_name(const std::string & name,VariableType & vt) const517 bool OptiNode::parse_opti_name(const std::string& name, VariableType& vt) const {
518 casadi_int i = name.find("opti");
519 if (i!=0) return false;
520
521 i = name.find("_");
522 i++;
523 if (i==std::string::npos) return false;
524 if (name.substr(i, 1)=="x") {
525 vt = OPTI_VAR;
526 return true;
527 } else if (name.substr(i, 1)=="p") {
528 vt = OPTI_PAR;
529 return true;
530 } else if (name.substr(i, 5)=="lam_g") {
531 vt = OPTI_DUAL_G;
532 return true;
533 }
534
535 return false;
536 }
537
variable_type_to_string(VariableType vt) const538 std::string OptiNode::variable_type_to_string(VariableType vt) const {
539 auto it = VariableType2String_.find(vt);
540 if (it==VariableType2String_.end()) return "unknown variable type";
541 return it->second;
542
543 }
544 std::map<VariableType, std::string> OptiNode::VariableType2String_ =
545 {{OPTI_VAR, "decision variable"},
546 {OPTI_PAR, "parameter"},
547 {OPTI_DUAL_G, "dual variable"}};
548
initial() const549 std::vector<MX> OptiNode::initial() const {
550 std::vector<MX> ret;
551 for (const auto& e : symvar()) {
552 if (meta(e).type==OPTI_VAR || meta(e).type==OPTI_DUAL_G)
553 ret.push_back(e==store_initial_.at(meta(e).type)[meta(e).i]);
554 }
555 return ret;
556 }
557
value_variables() const558 std::vector<MX> OptiNode::value_variables() const {
559 std::vector<MX> ret;
560 for (const auto& e : symvar()) {
561 if (meta(e).type==OPTI_VAR)
562 ret.push_back(e==store_latest_.at(meta(e).type)[meta(e).i]);
563 }
564 return ret;
565 }
566
value_parameters() const567 std::vector<MX> OptiNode::value_parameters() const {
568 std::vector<MX> ret;
569 for (const auto& e : symvar()) {
570 if (meta(e).type==OPTI_PAR)
571 ret.push_back(e==store_initial_.at(meta(e).type)[meta(e).i]);
572 }
573 return ret;
574 }
575
bake()576 void OptiNode::bake() {
577 casadi_assert(!f_.is_empty() || !g_.empty(),
578 "You need to specify at least an objective (y calling 'minimize'), "
579 "or a constraint (by calling 'subject_to').");
580
581 symbol_active_.clear();
582 symbol_active_.resize(symbols_.size());
583
584 // Gather all expressions
585 MX total_expr = vertcat(f_, veccat(g_));
586
587 // Categorize the symbols appearing in those expressions
588 for (const auto& d : symvar(total_expr))
589 symbol_active_[meta(d).count] = true;
590
591 std::vector<MX> x = active_symvar(OPTI_VAR);
592 for (casadi_int i=0;i<x.size();++i) meta(x[i]).active_i = i;
593
594 casadi_int offset = 0;
595 for (const auto& v : x) {
596 meta(v).start = offset;
597 offset+= v.nnz();
598 meta(v).stop = offset;
599 }
600 std::vector<MX> p = active_symvar(OPTI_PAR);
601 for (casadi_int i=0;i<p.size();++i) meta(p[i]).active_i = i;
602
603 // Fill the nlp definition
604 nlp_["x"] = veccat(x);
605 nlp_["p"] = veccat(p);
606
607 nlp_["f"] = f_;
608
609 offset = 0;
610 for (casadi_int i=0;i<g_.size();++i) {
611 MetaCon& r = meta_con(g_[i]);
612 MetaVar& r2 = meta(r.dual_canon);
613 symbol_active_[r2.count] = true;
614
615 // Compute offsets for this constraint:
616 // location into the global constraint variable
617 r.start = offset;
618 offset+= r.canon.nnz();
619 r.stop = offset;
620
621 r2.start = r.start;
622 r2.stop = r.stop;
623
624 }
625
626 std::vector<MX> lam = active_symvar(OPTI_DUAL_G);
627 for (casadi_int i=0;i<lam.size();++i) meta(lam[i]).active_i = i;
628
629 lam_ = veccat(lam);
630
631 // Collect bounds and canonical form of constraints
632 std::vector<MX> g_all;
633 std::vector<MX> h_all;
634 std::vector<MX> lbg_all;
635 std::vector<MX> ubg_all;
636 for (const auto& g : g_) {
637 if (meta_con(g).type==OPTI_PSD) {
638 h_all.push_back(meta_con(g).canon);
639 } else {
640 g_all.push_back(meta_con(g).canon);
641 lbg_all.push_back(meta_con(g).lb);
642 ubg_all.push_back(meta_con(g).ub);
643 }
644 }
645
646 nlp_["g"] = veccat(g_all);
647 if (problem_type_=="conic") {
648 nlp_["h"] = diagcat(h_all);
649 }
650
651 // Create bounds helper function
652 MXDict bounds;
653 bounds["p"] = nlp_["p"];
654 bounds_lbg_ = veccat(lbg_all);
655 bounds_ubg_ = veccat(ubg_all);
656
657 bounds["lbg"] = bounds_lbg_;
658 bounds["ubg"] = bounds_ubg_;
659
660 bounds_ = Function("bounds", bounds, {"p"}, {"lbg", "ubg"});
661 mark_problem_dirty(false);
662 }
663
solver(const std::string & solver_name,const Dict & plugin_options,const Dict & solver_options)664 void OptiNode::solver(const std::string& solver_name, const Dict& plugin_options,
665 const Dict& solver_options) {
666 solver_name_ = solver_name;
667 solver_options_ = plugin_options;
668 if (!solver_options.empty())
669 solver_options_[solver_name] = solver_options;
670 mark_solver_dirty();
671 }
672
sort(const std::vector<MX> & v) const673 std::vector<MX> OptiNode::sort(const std::vector<MX>& v) const {
674 // We exploit the fact that std::map is ordered
675
676 // Populate map
677 std::map<casadi_int, MX> unordered;
678 for (const auto& d : v)
679 unordered[meta(d).count] = d;
680
681 // Loop over map (ordered)
682 std::vector<MX> ret;
683 for (auto const &e : unordered)
684 ret.push_back(e.second);
685 return ret;
686 }
687
symvar() const688 std::vector<MX> OptiNode::symvar() const {
689 return symbols_;
690 }
691
symvar(const MX & expr) const692 std::vector<MX> OptiNode::symvar(const MX& expr) const {
693 return sort(MX::symvar(expr));
694 }
695
ineq_unchain(const MX & a,bool & flipped)696 std::vector<MX> OptiNode::ineq_unchain(const MX& a, bool& flipped) {
697 flipped = false;
698 casadi_assert_dev(a.is_op(OP_LE) || a.is_op(OP_LT));
699
700 // Is there inequalities in the left or right leaf?
701 bool left = a.dep(0).is_op(OP_LE) || a.dep(0).is_op(OP_LT);
702 bool right = a.dep(1).is_op(OP_LE) || a.dep(1).is_op(OP_LT);
703 casadi_assert_dev(!left || !right);
704
705 if (!left && !right)
706 return {a.dep(0), a.dep(1)}; // Simple inequality
707
708 // We have a chain of inequalities
709 bool ineq = !left;
710 std::vector<MX> ret = {a.dep(!ineq)};
711 MX e = a.dep(ineq);
712 while (e.is_op(OP_LE) || e.is_op(OP_LT)) {
713 casadi_assert_dev(!e.is_op(OP_EQ));
714 casadi_assert_dev(!e.dep(!ineq).is_op(OP_LE) && !e.dep(!ineq).is_op(OP_LT));
715 ret.push_back(e.dep(!ineq));
716 e = e.dep(ineq);
717 }
718 ret.push_back(e);
719 if (left) std::reverse(ret.begin(), ret.end());
720 flipped = !left;
721
722 return ret;
723 }
724
assert_only_opti_symbols(const MX & e) const725 void OptiNode::assert_only_opti_symbols(const MX& e) const {
726 std::vector<MX> symbols = MX::symvar(e);
727 for (const auto& s : symbols) assert_has(s);
728 }
729
assert_only_opti_nondual(const MX & e) const730 void OptiNode::assert_only_opti_nondual(const MX& e) const {
731 std::vector<MX> symbols = MX::symvar(e);
732 for (const auto& s : symbols) {
733 assert_has(s);
734 casadi_assert(meta(s).type!=OPTI_DUAL_G, "Dual variables forbidden in this context.");
735 }
736 }
737
is_parametric(const MX & expr) const738 bool OptiNode::is_parametric(const MX& expr) const {
739 return symvar(expr, OPTI_VAR).empty();
740 }
741
canon_expr(const MX & expr) const742 MetaCon OptiNode::canon_expr(const MX& expr) const {
743 MX c = expr;
744
745 MetaCon con;
746 con.original = expr;
747
748 if (c.is_op(OP_LE) || c.is_op(OP_LT)) { // Inequalities
749 std::vector<MX> ret;
750 bool flipped;
751 std::vector<MX> args = ineq_unchain(c, flipped);
752 std::vector<bool> parametric;
753 for (auto &a : args) parametric.push_back(is_parametric(a));
754
755 if (args.size()==2 && (parametric[0] || parametric[1])) {
756 // case: g(x,p) <= bound(p)
757 MX e = args[0]-args[1];
758 if (e.is_vector()) {
759 casadi_assert(!parametric[0] || !parametric[1],
760 "Constraint must contain decision variables.");
761 if (problem_type_=="conic") {
762 if (args[0].op()==OP_NORMF || args[0].op()==OP_NORM2) {
763 args[0] = -soc(args[0].dep(), args[1]);
764 args[1] = 0;
765 }
766 } else {
767 con.type = OPTI_INEQUALITY;
768 if (parametric[0]) {
769 con.lb = args[0]*DM::ones(e.sparsity());
770 con.ub = inf*DM::ones(e.sparsity());
771 con.canon = args[1]*DM::ones(e.sparsity());
772 } else {
773 con.lb = -inf*DM::ones(e.sparsity());
774 con.ub = args[1]*DM::ones(e.sparsity());
775 con.canon = args[0]*DM::ones(e.sparsity());
776 }
777 return con;
778 }
779 }
780 // Fall through to generic inequalities
781 } else if (args.size()==3 && parametric[0] && parametric[2]) {
782 // lb(p) <= g(x,p) <= ub(p)
783 con.type = OPTI_DOUBLE_INEQUALITY;
784 con.lb = args[0]*DM::ones(args[1].sparsity());
785 con.ub = args[2]*DM::ones(args[1].sparsity());
786 con.canon = args[1]*DM::ones(args[1].sparsity());
787 con.flipped = flipped;
788 con.n = 2;
789 return con;
790 }
791
792 bool type_known = false;
793 for (casadi_int j=0;j<args.size()-1;++j) {
794 MX e = args[j]-args[j+1];
795 if (problem_type_=="conic") {
796 if (args[j].op()==OP_NORMF || args[j].op()==OP_NORM2) {
797 args[j] = -soc(args[j].dep(), args[j+1]);
798 args[j+1] = 0;
799 e = args[j]-args[j+1];
800 }
801 }
802 if (e.is_vector()) {
803 // g1(x,p) <= g2(x,p)
804 ret.push_back(e);
805 casadi_assert_dev(!type_known || con.type==OPTI_GENERIC_INEQUALITY);
806 type_known = true;
807 con.type = OPTI_GENERIC_INEQUALITY;
808 con.flipped = flipped;
809 } else {
810 // A(x,p) >= b(p)
811 MX a = args[j+1];
812 MX b = args[j];
813 e = a-b;
814
815 casadi_assert(e.size1()==e.size2(),
816 "Matrix inequalities must be square. Did you mean element-wise inequality instead?");
817 if (a.is_scalar()) a*= MX::eye(e.size1());
818 if (b.is_scalar()) b*= MX::eye(e.size1());
819 e = a-b;
820
821 ret.push_back(e);
822 casadi_assert_dev(!type_known || con.type==OPTI_PSD);
823 type_known = true;
824 con.type = OPTI_PSD;
825 }
826 }
827
828 if (con.type==OPTI_GENERIC_INEQUALITY) {
829 con.canon = veccat(ret);
830 con.lb = -inf*DM::ones(con.canon.sparsity());
831 con.ub = DM::zeros(con.canon.sparsity());
832 con.n = ret.size();
833 } else {
834 con.canon = diagcat(ret);
835 con.n = ret.size();
836 }
837 return con;
838 } else if (c.is_op(OP_EQ)) { // Inequalities
839 casadi_assert(!is_parametric(c.dep(0)) || !is_parametric(c.dep(1)),
840 "Constraint must contain decision variables.");
841 MX e = c.dep(0)-c.dep(1);
842 if (is_parametric(c.dep(0))) {
843 con.canon = c.dep(1)*DM::ones(e.sparsity());
844 con.lb = c.dep(0)*DM::ones(e.sparsity());
845 con.type = OPTI_EQUALITY;
846 casadi_assert(c.dep(0).size1()<=c.dep(1).size1() && c.dep(0).size2()<=c.dep(1).size2(),
847 "Constraint shape mismatch.");
848 } else if (is_parametric(c.dep(1))) {
849 con.canon = c.dep(0)*DM::ones(e.sparsity());
850 con.lb = c.dep(1)*DM::ones(e.sparsity());
851 con.type = OPTI_EQUALITY;
852 casadi_assert(c.dep(1).size1()<=c.dep(0).size1() && c.dep(1).size2()<=c.dep(0).size2(),
853 "Constraint shape mismatch.");
854 } else {
855 con.lb = DM::zeros(e.sparsity());
856 con.canon = e;
857 con.type = OPTI_GENERIC_EQUALITY;
858 }
859 con.ub = con.lb;
860 return con;
861 } else { // Something else
862 con.type = OPTI_UNKNOWN;
863 con.canon = c;
864 return con;
865 }
866
867 }
868
assert_solved() const869 void OptiNode::assert_solved() const {
870 casadi_assert(solved(),
871 "This action is forbidden since you have not solved the Opti stack yet "
872 "(with calling 'solve').");
873 }
874
assert_baked() const875 void OptiNode::assert_baked() const {
876 casadi_assert(!problem_dirty(),
877 "This action is forbidden since you have not baked the Opti stack yet "
878 "(with calling 'solve').");
879 }
880
assert_empty() const881 void OptiNode::assert_empty() const {
882 casadi_assert_dev(g_.empty());
883 casadi_assert_dev(f_.is_empty());
884 }
885
minimize(const MX & f)886 void OptiNode::minimize(const MX& f) {
887 assert_only_opti_nondual(f);
888 mark_problem_dirty();
889 casadi_assert(f.is_scalar(), "Objective must be scalar, got " + f.dim() + ".");
890 f_ = f;
891 }
892
subject_to(const MX & g)893 void OptiNode::subject_to(const MX& g) {
894 assert_only_opti_nondual(g);
895 mark_problem_dirty();
896 g_.push_back(g);
897
898 casadi_assert(!g.is_empty(), "You passed an empty expression to `subject_to`. "
899 "Make sure the number of rows and columns is non-zero. "
900 "Got " + g.dim(true) + ".");
901 casadi_assert(g.nnz()>0, "You passed a fully sparse expression to `subject_to`. "
902 "Make sure the expression has at least one nonzero. "
903 "Got " + g.dim(true) + ".");
904 casadi_assert(!g.is_constant(), "You passed a constant to `subject_to`. "
905 "You need a symbol to form a constraint.");
906
907 // Store the meta-data
908 set_meta_con(g, canon_expr(g));
909 register_dual(meta_con(g));
910 }
911
subject_to()912 void OptiNode::subject_to() {
913 mark_problem_dirty();
914 g_.clear();
915 store_initial_[OPTI_DUAL_G].clear();
916 store_latest_[OPTI_DUAL_G].clear();
917 count_dual_ = 0;
918 }
919
symvar(const MX & expr,VariableType type) const920 std::vector<MX> OptiNode::symvar(const MX& expr, VariableType type) const {
921 std::vector<MX> ret;
922 for (const auto& d : symvar(expr)) {
923 if (meta(d).type==type) ret.push_back(d);
924 }
925
926 return ret;
927 }
928
res(const DMDict & res)929 void OptiNode::res(const DMDict& res) {
930 const std::vector<double> & x_v = res.at("x").nonzeros();
931 for (const auto &v : active_symvar(OPTI_VAR)) {
932 casadi_int i = meta(v).i;
933 std::vector<double> & data_v = store_latest_[OPTI_VAR][i].nonzeros();
934 std::copy(x_v.begin()+meta(v).start, x_v.begin()+meta(v).stop, data_v.begin());
935 }
936 if (res.find("lam_g")!=res.end() && problem_type_!="conic") {
937 const std::vector<double> & lam_v = res.at("lam_g").nonzeros();
938 for (const auto &v : active_symvar(OPTI_DUAL_G)) {
939 casadi_int i = meta(v).i;
940 std::vector<double> & data_v = store_latest_[OPTI_DUAL_G][i].nonzeros();
941 std::copy(lam_v.begin()+meta(v).start, lam_v.begin()+meta(v).stop, data_v.begin());
942 }
943 }
944 res_ = res;
945 mark_solved();
946 }
947
old_callback() const948 bool OptiNode::old_callback() const {
949 if (callback_.is_null()) return false;
950 InternalOptiCallback* cb = static_cast<InternalOptiCallback*>(callback_.get());
951 return !cb->associated_with(this);
952 }
953 // Solve the problem
solve(bool accept_limit)954 OptiSol OptiNode::solve(bool accept_limit) {
955
956 if (problem_dirty()) {
957 bake();
958 }
959 // Verify the constraint types
960 for (const auto& g : g_) {
961 if (problem_type_!="conic") {
962 if (meta_con(g).type==OPTI_PSD)
963 casadi_error("Psd constraints not implemented yet. "
964 "Perhaps you intended an element-wise inequality? "
965 "In that case, make sure that the matrix is flattened (e.g. mat(:)).");
966 }
967 }
968
969 bool solver_update = solver_dirty() || old_callback() || (user_callback_ && callback_.is_null());
970
971 if (solver_update) {
972 Dict opts = solver_options_;
973
974 // Handle callbacks
975 if (user_callback_) {
976 callback_ = Function::create(new InternalOptiCallback(*this), Dict());
977 opts["iteration_callback"] = callback_;
978 }
979
980 casadi_assert(!solver_name_.empty(),
981 "You must call 'solver' on the Opti stack to select a solver. "
982 "Suggestion: opti.solver('ipopt')");
983
984 if (problem_type_=="conic") {
985 solver_ = qpsol("solver", solver_name_, nlp_, opts);
986 } else {
987 solver_ = nlpsol("solver", solver_name_, nlp_, opts);
988 }
989 mark_solver_dirty(false);
990 }
991
992 solve_prepare();
993 res(solve_actual(arg_));
994
995 std::string ret = return_status();
996
997 casadi_assert(return_success(accept_limit),
998 "Solver failed. You may use opti.debug.value to investigate the latest values of variables."
999 " return_status is '" + ret + "'");
1000
1001 return Opti(this);
1002 }
1003
1004 // Solve the problem
solve_prepare()1005 void OptiNode::solve_prepare() {
1006
1007
1008 // Verify the constraint types
1009 for (const auto& g : g_) {
1010 if (meta_con(g).type==OPTI_UNKNOWN)
1011 casadi_error("Constraint type unknown. Use ==, >= or <= .");
1012 }
1013
1014 if (user_callback_) {
1015 InternalOptiCallback* cb = static_cast<InternalOptiCallback*>(callback_.get());
1016 cb->reset();
1017 }
1018
1019 // Get initial guess and parameter values
1020 arg_["x0"] = veccat(active_values(OPTI_VAR));
1021 arg_["p"] = veccat(active_values(OPTI_PAR));
1022 arg_["lam_g0"] = veccat(active_values(OPTI_DUAL_G));
1023 if (!arg_["p"].is_regular()) {
1024 std::vector<MX> s = active_symvar(OPTI_PAR);
1025 std::vector<DM> v = active_values(OPTI_PAR);
1026 for (casadi_int i=0;i<s.size();++i) {
1027 casadi_assert(v[i].is_regular(),
1028 "You have forgotten to assign a value to a parameter ('set_value'), "
1029 "or have set it to NaN/Inf:\n" + describe(s[i], 1));
1030 }
1031 }
1032
1033 // Evaluate bounds for given parameter values
1034 DMDict arg;
1035 arg["p"] = arg_["p"];
1036 DMDict res = bounds_(arg);
1037 arg_["lbg"] = res["lbg"];
1038 arg_["ubg"] = res["ubg"];
1039
1040 }
1041
solve_actual(const DMDict & arg)1042 DMDict OptiNode::solve_actual(const DMDict& arg) {
1043 return solver_(arg);
1044 }
1045
override_num(const std::map<casadi_int,MX> & temp,std::vector<DM> & num,casadi_int i)1046 bool override_num(const std::map<casadi_int, MX> & temp, std::vector<DM>& num, casadi_int i) {
1047 // Override when values are supplied
1048 auto it = temp.find(i);
1049 if (it==temp.end()) {
1050 return true;
1051 } else {
1052 Slice all;
1053 DM t = static_cast<DM>(it->second);
1054 num.back().set(t, false, all, all);
1055 }
1056 return false;
1057 }
1058
value(const MX & expr,const std::vector<MX> & values) const1059 DM OptiNode::value(const MX& expr, const std::vector<MX>& values) const {
1060 std::vector<MX> x = symvar(expr, OPTI_VAR);
1061 std::vector<MX> p = symvar(expr, OPTI_PAR);
1062 std::vector<MX> lam = symvar(expr, OPTI_DUAL_G);
1063
1064 Function helper = Function("helper", std::vector<MX>{veccat(x), veccat(p), veccat(lam)}, {expr});
1065 if (helper.has_free())
1066 casadi_error("This expression has symbols that are not defined "
1067 "within Opti using variable/parameter.");
1068
1069 std::map<VariableType, std::map<casadi_int, MX> > temp;
1070 temp[OPTI_DUAL_G] = std::map<casadi_int, MX>();
1071 for (const auto& v : values) {
1072 casadi_assert_dev(v.is_op(OP_EQ));
1073 casadi_int i = meta(v.dep(1)).i;
1074 casadi_assert_dev(v.dep(0).is_constant());
1075 temp[meta(v.dep(1)).type][i] = v.dep(0);
1076 }
1077
1078 bool undecided_vars = false;
1079 std::vector<DM> x_num;
1080 for (const auto& e : x) {
1081 casadi_int i = meta(e).i;
1082 x_num.push_back(store_latest_.at(OPTI_VAR).at(i));
1083 undecided_vars |= override_num(temp[OPTI_VAR], x_num, i);
1084 }
1085
1086 std::vector<DM> lam_num;
1087 for (const auto& e : lam) {
1088 casadi_int i = meta(e).i;
1089 casadi_assert(i<store_latest_.at(OPTI_DUAL_G).size(),
1090 "This expression has a dual for a constraint that is not given to Opti:\n" +
1091 describe(e, 1));
1092 lam_num.push_back(store_latest_.at(OPTI_DUAL_G).at(i));
1093 undecided_vars |= override_num(temp[OPTI_DUAL_G], lam_num, i);
1094 }
1095
1096 std::vector<DM> p_num;
1097 for (const auto& e : p) {
1098 casadi_int i = meta(e).i;
1099 p_num.push_back(store_initial_.at(OPTI_PAR).at(i));
1100 override_num(temp[OPTI_PAR], p_num, i);
1101 casadi_assert(p_num.back().is_regular(),
1102 "This expression depends on a parameter with unset value:\n"+
1103 describe(e, 1));
1104 }
1105
1106 if (undecided_vars) {
1107 assert_solved();
1108 for (const auto& e : x)
1109 casadi_assert(symbol_active_[meta(e).count],
1110 "This expression has symbols that do not appear in the constraints and objective:\n" +
1111 describe(e, 1));
1112 for (const auto& e : lam)
1113 casadi_assert(symbol_active_[meta(e).count],
1114 "This expression has a dual for a constraint that is not given to Opti:\n" +
1115 describe(e, 1));
1116 }
1117
1118 std::vector<DM> arg = helper(std::vector<DM>{veccat(x_num), veccat(p_num), veccat(lam_num)});
1119 return arg[0];
1120 }
1121
assert_active_symbol(const MX & m) const1122 void OptiNode::assert_active_symbol(const MX& m) const {
1123 assert_has(m);
1124 assert_baked();
1125 casadi_assert(symbol_active_[meta(m).count], "Opti symbol is not used in Solver."
1126 " It does not make sense to assign a value to it:\n" + describe(m, 1));
1127 }
1128
set_initial(const std::vector<MX> & assignments)1129 void OptiNode::set_initial(const std::vector<MX>& assignments) {
1130 for (const auto& v : assignments) {
1131 casadi_assert_dev(v.is_op(OP_EQ));
1132 casadi_assert_dev(v.dep(0).is_constant());
1133 if (has(v.dep(1)))
1134 set_initial(v.dep(1), static_cast<DM>(v.dep(0)));
1135 }
1136 }
1137
set_value(const std::vector<MX> & assignments)1138 void OptiNode::set_value(const std::vector<MX>& assignments) {
1139 for (const auto& v : assignments) {
1140 casadi_assert_dev(v.is_op(OP_EQ));
1141 casadi_assert_dev(v.dep(0).is_constant());
1142 if (has(v.dep(1)))
1143 set_value(v.dep(1), static_cast<DM>(v.dep(0)));
1144 }
1145 }
1146
set_value_internal(const MX & x,const DM & v)1147 void OptiNode::set_value_internal(const MX& x, const DM& v) {
1148 mark_solved(false);
1149 casadi_assert_dev(v.is_regular());
1150 if (x.is_symbolic()) {
1151 DM& target = store_initial_[meta(x).type][meta(x).i];
1152 Slice all;
1153 target.set(v, false, all, all);
1154 return;
1155 }
1156
1157 // Obtain symbolic primitives
1158 std::vector<MX> symbols = MX::symvar(x);
1159 MX symbols_cat = veccat(symbols);
1160
1161 std::string failmessage = "You cannot set initial/value of an arbitrary expression. "
1162 "Use symbols or simple mappings of symbols.";
1163
1164 // Assert x is linear in its symbolic primitives
1165 for (bool b : which_depends(x, symbols_cat, 2, false)) casadi_assert(!b, failmessage);
1166
1167 // Evaluate jacobian of expr wrt symbols
1168 Dict opts = {{"compact", true}};
1169 Function Jf("Jf", std::vector<MX>{}, std::vector<MX>{jacobian(x, veccat(symbols), opts)});
1170 DM J = Jf(std::vector<DM>{})[0];
1171 Sparsity sp_JT = J.T().sparsity();
1172
1173 Function Ff("Ff", symbols, {x});
1174 DM E = Ff(std::vector<DM>(symbols.size(), 0))[0];
1175 std::vector<double>& e = E.nonzeros();
1176
1177 // Cast the v input into the expected sparsity
1178 Slice all;
1179 DM value(x.sparsity());
1180 value.set(v, false, all, all);
1181
1182 // Purge empty rows
1183 std::vector<casadi_int> filled_rows = sum2(J).get_row();
1184 J = J(filled_rows, all); // NOLINT(cppcoreguidelines-slicing)
1185
1186 // Get rows and columns of the mapping
1187 std::vector<casadi_int> row, col;
1188 J.sparsity().get_triplet(row, col);
1189 const std::vector<double>& scaling = J.nonzeros();
1190 const std::vector<double>& data_original = value.nonzeros();
1191
1192 std::vector<double> data; data.reserve(value.nnz());
1193 for (casadi_int i=0;i<value.nnz();++i) {
1194 double v = data_original[i];
1195 casadi_int nz = sp_JT.colind()[i+1]-sp_JT.colind()[i];
1196 casadi_assert(nz<=1, failmessage);
1197 if (nz) {
1198 data.push_back(v);
1199 } else {
1200 casadi_assert(v==e[i], "In initial/value assignment: "
1201 "inconsistent numerical values. At nonzero " + str(i) + ", lhs has "
1202 + str(e[i]) + ", while rhs has " + str(v) + ".");
1203 }
1204 }
1205
1206 // Contiguous workspace for nonzeros of all involved symbols
1207 std::vector<double> temp(symbols_cat.nnz(), casadi::nan);
1208 for (casadi_int k=0;k<data.size();++k) {
1209 double& lhs = temp[col[k]];
1210 double rhs = data[row[k]]/scaling[row[k]];
1211 if (std::isnan(lhs)) {
1212 // Assign in the workspace
1213 lhs = rhs;
1214 } else {
1215 casadi_assert(lhs==rhs, "Initial/value assignment with mapping is ambiguous.");
1216 }
1217 }
1218
1219 casadi_int offset = 0;
1220 for (const auto & s : symbols) {
1221 DM& target = store_initial_[meta(s).type][meta(s).i];
1222 std::vector<double>& data = target.nonzeros();
1223 // Loop over nonzeros in each symbol
1224 for (casadi_int i=0;i<s.nnz();++i) {
1225 // Copy from the workspace (barring fields that were not set)
1226 double v = temp[offset+i];
1227 if (!std::isnan(v)) data[i] = v;
1228 }
1229 offset+=s.nnz();
1230 }
1231
1232 }
1233
set_initial(const MX & x,const DM & v)1234 void OptiNode::set_initial(const MX& x, const DM& v) {
1235 for (const auto & s : MX::symvar(x))
1236 casadi_assert(meta(s).type!=OPTI_PAR,
1237 "You cannot set an initial value for a parameter. Did you mean 'set_value'?");
1238 set_value_internal(x, v);
1239 }
1240
set_value(const MX & x,const DM & v)1241 void OptiNode::set_value(const MX& x, const DM& v) {
1242 for (const auto & s : MX::symvar(x))
1243 casadi_assert(meta(s).type!=OPTI_VAR,
1244 "You cannot set a value for a variable. Did you mean 'set_initial'?");
1245 set_value_internal(x, v);
1246 }
1247
active_symvar(VariableType type) const1248 std::vector<MX> OptiNode::active_symvar(VariableType type) const {
1249 if (symbol_active_.empty()) return std::vector<MX>{};
1250 std::vector<MX> ret;
1251 for (const auto& s : symbols_) {
1252 if (symbol_active_[meta(s).count] && meta(s).type==type)
1253 ret.push_back(s);
1254 }
1255 return ret;
1256 }
1257
active_values(VariableType type) const1258 std::vector<DM> OptiNode::active_values(VariableType type) const {
1259 if (symbol_active_.empty()) return std::vector<DM>{};
1260 std::vector<DM> ret;
1261 for (const auto& s : symbols_) {
1262 if (symbol_active_[meta(s).count] && meta(s).type==type) {
1263 ret.push_back(store_initial_.at(meta(s).type)[meta(s).i]);
1264 }
1265 }
1266 return ret;
1267 }
1268
to_function(const std::string & name,const std::vector<MX> & args,const std::vector<MX> & res,const std::vector<std::string> & name_in,const std::vector<std::string> & name_out,const Dict & opts)1269 Function OptiNode::to_function(const std::string& name,
1270 const std::vector<MX>& args, const std::vector<MX>& res,
1271 const std::vector<std::string>& name_in,
1272 const std::vector<std::string>& name_out,
1273 const Dict& opts) {
1274 if (problem_dirty()) return baked_copy().to_function(name, args, res, name_in, name_out, opts);
1275
1276 Function solver;
1277 if (problem_type_=="conic") {
1278 solver = qpsol("solver", solver_name_, nlp_, solver_options_);
1279 } else {
1280 solver = nlpsol("solver", solver_name_, nlp_, solver_options_);
1281 }
1282
1283 // Get initial guess and parameter values
1284 std::vector<MX> x0, p, lam_g;
1285 assign_vector(active_values(OPTI_VAR), x0);
1286 assign_vector(active_values(OPTI_PAR), p);
1287 assign_vector(active_values(OPTI_DUAL_G), lam_g);
1288
1289 casadi_int k = 0;
1290 for (const auto& a : args) {
1291 casadi_assert(a.is_valid_input(), "Argument " + str(k) + " is not purely symbolic.");
1292 k++;
1293 for (const auto& prim : a.primitives()) {
1294 if (!symbol_active_[meta(prim).count]) continue;
1295 casadi_int i = meta(prim).active_i;
1296 if (meta(prim).type==OPTI_VAR) {
1297 x0.at(i) = prim;
1298 } else if (meta(prim).type==OPTI_PAR) {
1299 p.at(i) = prim;
1300 } else if (meta(prim).type==OPTI_DUAL_G) {
1301 lam_g.at(i) = prim;
1302 } else {
1303 casadi_error("Unknown");
1304 }
1305 }
1306 }
1307 MXDict arg;
1308 arg["p"] = veccat(p);
1309
1310 // Evaluate bounds for given parameter values
1311 MXDict r = bounds_(arg);
1312 arg["x0"] = veccat(x0);
1313 arg["lam_g0"] = veccat(lam_g);
1314 arg["lbg"] = r["lbg"];
1315 arg["ubg"] = r["ubg"];
1316
1317 r = solver(arg);
1318
1319 std::vector<MX> helper_in = {veccat(active_symvar(OPTI_VAR)),
1320 veccat(active_symvar(OPTI_PAR)),
1321 veccat(active_symvar(OPTI_DUAL_G))};
1322 Function helper("helper", helper_in, {res});
1323
1324 std::vector<MX> arg_in = helper(std::vector<MX>{r.at("x"), arg["p"], r.at("lam_g")});
1325
1326 return Function(name, args, arg_in, name_in, name_out, opts);
1327
1328 }
1329
disp(ostream & stream,bool more) const1330 void OptiNode::disp(ostream &stream, bool more) const {
1331
1332 }
1333
instance_number() const1334 casadi_int OptiNode::instance_number() const {
1335 return instance_number_;
1336 }
1337
1338 } // namespace casadi
1339