1 // Copyright (C) 2004, 2006 International Business Machines and others. 2 // All Rights Reserved. 3 // This code is published under the Common Public License. 4 // 5 // $Id: IpBacktrackingLineSearch.cpp 759 2006-07-07 03:07:08Z andreasw $ 6 // 7 // Authors: Carl Laird, Andreas Waechter IBM 2004-08-13 8 // Andreas Waechter IBM 2005-10-13 9 // derived file from IpFilterLineSearch.cpp 10 11 #include "IpBacktrackingLineSearch.hpp" 12 #include "IpJournalist.hpp" 13 #include "IpRestoPhase.hpp" 14 #include "IpAlgTypes.hpp" 15 16 #ifdef HAVE_CMATH 17 # include <cmath> 18 #else 19 # ifdef HAVE_MATH_H 20 # include <math.h> 21 # else 22 # error "don't have header file for math" 23 # endif 24 #endif 25 26 namespace SimTKIpopt 27 { 28 29 #ifdef IP_DEBUG 30 static const Index dbg_verbosity = 0; 31 #endif 32 BacktrackingLineSearch(const SmartPtr<BacktrackingLSAcceptor> & acceptor,const SmartPtr<RestorationPhase> & resto_phase,const SmartPtr<ConvergenceCheck> & conv_check)33 BacktrackingLineSearch::BacktrackingLineSearch( 34 const SmartPtr<BacktrackingLSAcceptor>& acceptor, 35 const SmartPtr<RestorationPhase>& resto_phase, 36 const SmartPtr<ConvergenceCheck>& conv_check) 37 : 38 LineSearch(), 39 acceptor_(acceptor), 40 resto_phase_(resto_phase), 41 conv_check_(conv_check) 42 { 43 DBG_START_FUN("BacktrackingLineSearch::BacktrackingLineSearch", 44 dbg_verbosity); 45 DBG_ASSERT(IsValid(acceptor_)); 46 } 47 ~BacktrackingLineSearch()48 BacktrackingLineSearch::~BacktrackingLineSearch() 49 { 50 DBG_START_FUN("BacktrackingLineSearch::~BacktrackingLineSearch()", 51 dbg_verbosity); 52 } 53 RegisterOptions(SmartPtr<RegisteredOptions> roptions)54 void BacktrackingLineSearch::RegisterOptions(SmartPtr<RegisteredOptions> roptions) 55 { 56 roptions->AddBoundedNumberOption( 57 "alpha_red_factor", 58 "Fractional reduction of the trial step size in the backtracking line search.", 59 0.0, true, 1.0, true, 0.5, 60 "At every step of the backtracking line search, the trial step size is " 61 "reduced by this factor."); 62 63 std::string prev_category = roptions->RegisteringCategory(); 64 roptions->SetRegisteringCategory("Undocumented"); 65 roptions->AddStringOption2( 66 "magic_steps", 67 "Enables magic steps.", 68 "no", 69 "no", "don't take magic steps", 70 "yes", "take magic steps", 71 "DOESN'T REALLY WORK YET!"); 72 roptions->SetRegisteringCategory(prev_category); 73 74 roptions->AddStringOption2( 75 "accept_every_trial_step", 76 "Always accept the frist trial step.", 77 "no", 78 "no", "don't arbitrarily accept the full step", 79 "yes", "always accept the full step", 80 "Setting this option to \"yes\" essentially disables the line search " 81 "and makes the algorithm take aggressive steps, without global " 82 "convergence guarantees."); 83 84 roptions->AddStringOption7( 85 "alpha_for_y", 86 "Method to determine the step size for constraint multipliers.", 87 "primal", 88 "primal", "use primal step size", 89 "bound_mult", "use step size for the bound multipliers (good for LPs)", 90 "min", "use the min of primal and bound multipliers", 91 "max", "use the max of primal and bound multipliers", 92 "full", "take a full step of size one", 93 "min_dual_infeas", "choose step size minimizing new dual infeasibility", 94 "safe_min_dual_infeas", "like \"min_dual_infeas\", but safeguarded by \"min\" and \"max\"", 95 "This option determines how the step size (alpha_y) will be calculated when updating the " 96 "constraint multipliers."); 97 98 roptions->AddLowerBoundedNumberOption( 99 "tiny_step_tol", 100 "Tolerance for detecting numerically insignificant steps.", 101 Number(0), false, 10*std::numeric_limits<Number>::epsilon(), 102 "If the search direction in the primal variables (x and s) is, in " 103 "relative terms for each component, less than this value, the " 104 "algorithm accepts the full step without line search. If this happens " 105 "repeatedly, the algorithm will terminate with a corresponding exit " 106 "message. The default value is 10 times machine precision."); 107 roptions->AddLowerBoundedNumberOption( 108 "tiny_step_y_tol", 109 "Tolerance for quitting because of numerically insignificant steps.", 110 Number(0), false, Number(1e-2), 111 "If the search direction in the primal variables (x and s) is, in " 112 "relative terms for each component, repeatedly less than tiny_step_tol, " 113 "and the step in the y variables is smaller than this threshold, the " 114 "algorithm will terminate."); 115 roptions->AddLowerBoundedIntegerOption( 116 "watchdog_shortened_iter_trigger", 117 "Number of shortened iterations that trigger the watchdog.", 118 0, 10, 119 "If the number of successive iterations in which the backtracking line search " 120 "did not accept the first trial point exceeds this number, the " 121 "watchdog procedure is activated. Choosing \"0\" here disables the " 122 "watchdog procedure."); 123 roptions->AddLowerBoundedIntegerOption( 124 "watchdog_trial_iter_max", 125 "Maximum number of watchdog iterations.", 126 1, 3, 127 "This option determines the number of trial iterations " 128 "allowed before the watchdog " 129 "procedure is aborted and the algorithm returns to the stored point."); 130 131 roptions->SetRegisteringCategory("Restoration Phase"); 132 roptions->AddStringOption2( 133 "expect_infeasible_problem", 134 "Enable heuristics to quickly detect an infeasible problem.", 135 "no", 136 "no", "the problem probably be feasible", 137 "yes", "the problem has a good chance to be infeasible", 138 "This options is meant to activate heuristics that may speed up the " 139 "infeasibility determination if you expect that there is a good chance for the problem to be " 140 "infeasible. In the filter line search procedure, the restoration " 141 "phase is called more quickly than usually, and more reduction in " 142 "the constraint violation is enforced before the restoration phase is " 143 "left. If the problem is square, this option is enabled automatically."); 144 roptions->AddLowerBoundedNumberOption( 145 "expect_infeasible_problem_ctol", 146 "Threshold for disabling \"expect_infeasible_problem\" option.", 147 Number(0), false, Number(1e-3), 148 "If the constraint violation becomes smaller than this threshold, " 149 "the \"expect_infeasible_problem\" heuristics in the filter line " 150 "search are disabled. If the problem is square, this options is set to " 151 "0."); 152 roptions->AddStringOption2( 153 "start_with_resto", 154 "Tells algorithm to switch to restoration phase in first iteration.", 155 "no", 156 "no", "don't force start in restoration phase", 157 "yes", "force start in restoration phase", 158 "Setting this option to \"yes\" forces the algorithm to switch to the " 159 "feasibility restoration phase in the first iteration. If the initial " 160 "point is feasible, the algorithm will abort with a failure."); 161 roptions->AddLowerBoundedNumberOption( 162 "soft_resto_pderror_reduction_factor", 163 "Required reduction in primal-dual error in the soft restoration phase.", 164 Number(0), false, Number(1.0 - 1e-4), 165 "The soft restoration phase attempts to reduce the " 166 "primal-dual error with regular steps. If the damped " 167 "primal-dual step (damped only to satisfy the " 168 "fraction-to-the-boundary rule) is not decreasing the primal-dual error " 169 "by at least this factor, then the regular restoration phase is called. " 170 "Choosing \"0\" here disables the soft " 171 "restoration phase."); 172 roptions->AddLowerBoundedIntegerOption( 173 "max_soft_resto_iters", 174 "Maximum number of iterations performed successively in soft restoration phase.", 175 0, 10, 176 "If the soft restoration phase is performed for more than so many " 177 "iteratins in a row, the regular restoration phase is called."); 178 } 179 InitializeImpl(const OptionsList & options,const std::string & prefix)180 bool BacktrackingLineSearch::InitializeImpl(const OptionsList& options, 181 const std::string& prefix) 182 { 183 options.GetNumericValue("alpha_red_factor", alpha_red_factor_, prefix); 184 options.GetBoolValue("magic_steps", magic_steps_, prefix); 185 options.GetBoolValue("accept_every_trial_step", accept_every_trial_step_, prefix); 186 Index enum_int; 187 options.GetEnumValue("alpha_for_y", enum_int, prefix); 188 alpha_for_y_ = AlphaForYEnum(enum_int); 189 options.GetNumericValue("expect_infeasible_problem_ctol", expect_infeasible_problem_ctol_, prefix); 190 options.GetBoolValue("expect_infeasible_problem", expect_infeasible_problem_, prefix); 191 192 options.GetBoolValue("start_with_resto", start_with_resto_, prefix); 193 194 options.GetNumericValue("tiny_step_tol", tiny_step_tol_, prefix); 195 options.GetNumericValue("tiny_step_y_tol", tiny_step_y_tol_, prefix); 196 options.GetIntegerValue("watchdog_trial_iter_max", watchdog_trial_iter_max_, prefix); 197 options.GetIntegerValue("watchdog_shortened_iter_trigger", watchdog_shortened_iter_trigger_, prefix); 198 options.GetNumericValue("soft_resto_pderror_reduction_factor", 199 soft_resto_pderror_reduction_factor_, prefix); 200 options.GetIntegerValue("max_soft_resto_iters", max_soft_resto_iters_, 201 prefix); 202 203 bool retvalue = true; 204 if (IsValid(resto_phase_)) { 205 if (!resto_phase_->Initialize(Jnlst(), IpNLP(), IpData(), IpCq(), 206 options, prefix)) { 207 return false; 208 } 209 if (!acceptor_->Initialize(Jnlst(), IpNLP(), IpData(), IpCq(), 210 options, prefix)) { 211 return false; 212 } 213 } 214 215 rigorous_ = true; 216 skipped_line_search_ = false; 217 tiny_step_last_iteration_ = false; 218 fallback_activated_ = false; 219 220 Reset(); 221 222 count_successive_shortened_steps_ = 0; 223 224 acceptable_iterate_ = NULL; 225 acceptable_iteration_number_ = -1; 226 227 last_mu_ = -1.; 228 229 return retvalue; 230 } 231 FindAcceptableTrialPoint()232 void BacktrackingLineSearch::FindAcceptableTrialPoint() 233 { 234 DBG_START_METH("BacktrackingLineSearch::FindAcceptableTrialPoint", 235 dbg_verbosity); 236 Jnlst().Printf(J_DETAILED, J_LINE_SEARCH, 237 "--> Starting filter line search in iteration %d <--\n", 238 IpData().iter_count()); 239 240 Number curr_mu = IpData().curr_mu(); 241 if (last_mu_!=curr_mu) { 242 Jnlst().Printf(J_DETAILED, J_LINE_SEARCH, 243 "Mu has changed in line search - resetting watchdog counters.\n"); 244 // Inactivate the watchdog and release all stored data 245 in_watchdog_ = false; 246 watchdog_iterate_ = NULL; 247 watchdog_delta_ = NULL; 248 watchdog_shortened_iter_ = 0; 249 last_mu_ = curr_mu; 250 } 251 252 // If the problem is square, we want to enable the 253 // expect_infeasible_problem option automatically so that the 254 // restoration phase is entered soon 255 if (IpCq().IsSquareProblem()) { 256 expect_infeasible_problem_ = true; 257 expect_infeasible_problem_ctol_ = 0.; 258 } 259 260 // Store current iterate if the optimality error is on acceptable 261 // level to restored if things fail later 262 if (CurrentIsAcceptable()) { 263 Jnlst().Printf(J_DETAILED, J_LINE_SEARCH, 264 "Storing current iterate as backup acceptable point.\n"); 265 StoreAcceptablePoint(); 266 } 267 268 // First assume that line search will find an acceptable trial point 269 skipped_line_search_ = false; 270 271 // Get the search directions (this will store the actual search 272 // direction, possibly including higher order corrections) 273 SmartPtr<IteratesVector> actual_delta; 274 275 bool goto_resto = false; 276 if (fallback_activated_) { 277 // In this case, the algorithm had trouble to continue and wants 278 // to call the restoration phase immediately 279 goto_resto = true; 280 fallback_activated_ = false; // reset the flag 281 } 282 else { 283 // Initialize the acceptor for this backtracking line search 284 acceptor_->InitThisLineSearch(in_watchdog_); 285 actual_delta = IpData().delta()->MakeNewContainer(); 286 } 287 288 if (start_with_resto_) { 289 // If the user requested to start with the restoration phase, 290 // skip the line search and do exactly that. Reset the flag so 291 // that this happens only once. 292 goto_resto = true; 293 start_with_resto_= false; 294 } 295 296 if (expect_infeasible_problem_ && 297 Max(IpData().curr()->y_c()->Amax(), 298 IpData().curr()->y_d()->Amax()) > 1e8) { 299 goto_resto = true; 300 } 301 302 bool accept = false; 303 bool corr_taken = false; 304 bool soc_taken = false; 305 Index n_steps = 0; 306 Number alpha_primal = 0.; 307 308 // Check if search direction becomes too small 309 bool tiny_step = (!goto_resto && DetectTinyStep()); 310 311 if (in_watchdog_ && (goto_resto || tiny_step)) { 312 // If the step could not be computed or is too small and the 313 // watchdog is active, stop the watch dog and resume everything 314 // from reference point 315 StopWatchDog(actual_delta); 316 goto_resto = false; 317 tiny_step = false; 318 } 319 320 // Check if we want to wake up the watchdog 321 if (watchdog_shortened_iter_trigger_ > 0 && 322 !in_watchdog_ && !goto_resto && !tiny_step && 323 !in_soft_resto_phase_ && !expect_infeasible_problem_ && 324 watchdog_shortened_iter_ >= watchdog_shortened_iter_trigger_) { 325 StartWatchDog(); 326 } 327 328 // Handle the situation of a tiny step 329 if (tiny_step) { 330 alpha_primal = 331 IpCq().curr_primal_frac_to_the_bound(IpData().curr_tau()); 332 Jnlst().Printf(J_DETAILED, J_LINE_SEARCH, 333 "Tiny step detected. Use step size alpha = %e unchecked\n", 334 alpha_primal); 335 IpData().SetTrialPrimalVariablesFromStep(alpha_primal, *IpData().delta()->x(), *IpData().delta()->s()); 336 337 // Evaluate functions at trial point - if that fails, don't use 338 // the tiny step and continue with regular line search 339 try { 340 IpCq().trial_barrier_obj(); 341 IpCq().trial_constraint_violation(); 342 } 343 catch(IpoptNLP::Eval_Error& e) { 344 e.ReportException(Jnlst(), J_DETAILED); 345 tiny_step = false; 346 } 347 348 if (tiny_step) { 349 IpData().Set_info_ls_count(0); 350 351 if (tiny_step_last_iteration_) { 352 IpData().Set_info_alpha_primal_char('T'); 353 IpData().Set_tiny_step_flag(true); 354 } 355 } 356 else { 357 IpData().Set_info_alpha_primal_char('t'); 358 } 359 360 // If the step in the dual variables is also small, we remember 361 // that we just did a tiny step so that next time we might 362 // decide to quit 363 Number delta_y_norm = Max(IpData().curr()->y_c()->Amax(), 364 IpData().curr()->y_d()->Amax()); 365 if (delta_y_norm < tiny_step_y_tol_) { 366 tiny_step_last_iteration_ = true; 367 } 368 else { 369 tiny_step_last_iteration_ = false; 370 } 371 accept = true; 372 } 373 else { 374 tiny_step_last_iteration_ = false; 375 } 376 377 if (!goto_resto && !tiny_step) { 378 379 if (in_soft_resto_phase_) { 380 soft_resto_counter_++; 381 if (soft_resto_counter_ > max_soft_resto_iters_) { 382 accept = false; 383 } 384 else { 385 // If we are currently in the soft restoration phase, continue 386 // that way, and switch back if enough progress is made to the 387 // original criterion (e.g., the filter) 388 bool satisfies_original_criterion = false; 389 // ToDo use tiny_step in TrySoftRestoStep? 390 accept = TrySoftRestoStep(actual_delta, 391 satisfies_original_criterion); 392 if (accept) { 393 IpData().Set_info_alpha_primal_char('s'); 394 if (satisfies_original_criterion) { 395 in_soft_resto_phase_ = false; 396 soft_resto_counter_ = 0; 397 IpData().Set_info_alpha_primal_char('S'); 398 } 399 } 400 } 401 } 402 else { 403 // Start the backtracking line search 404 bool done = false; 405 bool skip_first_trial_point = false; 406 bool evaluation_error; 407 while (!done) { 408 accept = DoBacktrackingLineSearch(skip_first_trial_point, 409 alpha_primal, 410 corr_taken, 411 soc_taken, 412 n_steps, 413 evaluation_error, 414 actual_delta); 415 DBG_PRINT((1, "evaluation_error = %d\n", evaluation_error)); 416 if (in_watchdog_) { 417 if (accept) { 418 in_watchdog_ = false; 419 IpData().Append_info_string("W"); 420 Jnlst().Printf(J_DETAILED, J_LINE_SEARCH, 421 "Watch dog procedure successful!\n"); 422 done = true; 423 } 424 else { 425 watchdog_trial_iter_++; 426 if (evaluation_error || 427 watchdog_trial_iter_ > watchdog_trial_iter_max_) { 428 StopWatchDog(actual_delta); 429 skip_first_trial_point = true; 430 } 431 else { 432 done = true; 433 accept = true; 434 } 435 } 436 } 437 else { 438 done = true; 439 } 440 } 441 } /* else: if (in_soft_resto_phase_) { */ 442 } /* if (!goto_resto && !tiny_step) { */ 443 444 // If line search has been aborted because the step size becomes 445 // too small, go to the restoration phase or continue with soft 446 // restoration phase 447 if (!accept) { 448 // If we are not asked to do a rigorous line search, do no call 449 // the restoration phase. 450 if (!rigorous_) { 451 Jnlst().Printf(J_DETAILED, J_LINE_SEARCH, "Skipping call of restoration phase...\n"); 452 skipped_line_search_=true; 453 } 454 else { 455 // Check if we should start the soft restoration phase 456 if (!in_soft_resto_phase_ 457 && !goto_resto && !expect_infeasible_problem_) { 458 Jnlst().Printf(J_DETAILED, J_LINE_SEARCH, 459 "--> Starting soft restoration phase <--\n"); 460 // Prepare the restoration phase, e.g., augment the filter 461 // with the current point. 462 acceptor_->PrepareRestoPhaseStart(); 463 464 // Try the current search direction for the soft restoration phase 465 bool satisfies_original_criterion; 466 accept = TrySoftRestoStep(actual_delta, 467 satisfies_original_criterion); 468 // If it has been accepted: If the original criterion is also 469 // satisfied, we can just take that step and continue with 470 // the regular algorithm, otherwise we stay in the soft 471 // restoration phase 472 if (accept) { 473 if (satisfies_original_criterion) { 474 IpData().Set_info_alpha_primal_char('S'); 475 } 476 else { 477 in_soft_resto_phase_ = true; 478 IpData().Set_info_alpha_primal_char('s'); 479 } 480 } 481 } 482 483 if (!accept) { 484 // Go to the restoration phase 485 if (!in_soft_resto_phase_) { 486 // Prepare the restoration phase, e.g., augment the filter 487 // with the current point. If we are already in the soft 488 // restoration phase, this has been done earlier 489 acceptor_->PrepareRestoPhaseStart(); 490 } 491 if (CurrentIsAcceptable()) { 492 THROW_EXCEPTION(ACCEPTABLE_POINT_REACHED, 493 "Restoration phase called at acceptable point."); 494 } 495 496 if (!IsValid(resto_phase_)) { 497 THROW_EXCEPTION(IpoptException, "No Restoration Phase given to this Filter Line Search Object!"); 498 } 499 // ToDo make the 1e-2 below a parameter? 500 if (IpCq().curr_constraint_violation()<= 501 1e-2*IpData().tol()) { 502 bool found_acceptable = RestoreAcceptablePoint(); 503 if (found_acceptable) { 504 Jnlst().Printf(J_WARNING, J_LINE_SEARCH, 505 "Restoration phase is called at almost feasible point,\n but acceptable point from iteration %d could be restored.\n", acceptable_iteration_number_); 506 THROW_EXCEPTION(ACCEPTABLE_POINT_REACHED, 507 "Restoration phase called at almost feasible point, but acceptable point could be restored.\n"); 508 } 509 else { 510 // ToDo does that happen too often? 511 Jnlst().Printf(J_ERROR, J_LINE_SEARCH, 512 "Restoration phase is called at point that is almost feasible,\n with constraint violation %e. Abort.\n", IpCq().curr_constraint_violation()); 513 THROW_EXCEPTION(RESTORATION_FAILED, 514 "Restoration phase called, but point is almost feasible."); 515 } 516 } 517 518 // Set the info fields for the first output line in the 519 // restoration phase which reflects why the restoration phase 520 // was called 521 IpData().Set_info_alpha_primal(alpha_primal); 522 IpData().Set_info_alpha_dual(0.); 523 IpData().Set_info_alpha_primal_char('R'); 524 IpData().Set_info_ls_count(n_steps+1); 525 526 accept = resto_phase_->PerformRestoration(); 527 if (!accept) { 528 bool found_acceptable = RestoreAcceptablePoint(); 529 if (found_acceptable) { 530 THROW_EXCEPTION(ACCEPTABLE_POINT_REACHED, 531 "Restoration phase failed, but acceptable point could be restore.\n"); 532 } 533 else { 534 THROW_EXCEPTION(RESTORATION_FAILED, 535 "Failed restoration phase!!!"); 536 } 537 } 538 count_successive_shortened_steps_ = 0; 539 if (expect_infeasible_problem_) { 540 expect_infeasible_problem_ = false; 541 } 542 in_soft_resto_phase_ = false; 543 soft_resto_counter_ = 0; 544 watchdog_shortened_iter_ = 0; 545 } 546 } 547 } 548 else if (!in_soft_resto_phase_ || tiny_step) { 549 // we didn't do the restoration phase and are now updating the 550 // dual variables of the trial point 551 Number alpha_dual_max = 552 IpCq().dual_frac_to_the_bound(IpData().curr_tau(), 553 *actual_delta->z_L(), *actual_delta->z_U(), 554 *actual_delta->v_L(), *actual_delta->v_U()); 555 556 PerformDualStep(alpha_primal, alpha_dual_max, actual_delta); 557 558 if (n_steps==0) { 559 // accepted this if a full step was 560 // taken 561 count_successive_shortened_steps_ = 0; 562 watchdog_shortened_iter_ = 0; 563 } 564 else { 565 count_successive_shortened_steps_++; 566 watchdog_shortened_iter_++; 567 } 568 569 if (expect_infeasible_problem_ && 570 IpCq().curr_constraint_violation() <= expect_infeasible_problem_ctol_) { 571 Jnlst().Printf(J_DETAILED, J_LINE_SEARCH, 572 "Constraint violation is with %e less than expect_infeasible_problem_ctol.\nDisable expect_infeasible_problem_heuristic.\n", IpCq().curr_constraint_violation()); 573 expect_infeasible_problem_ = false; 574 } 575 } 576 } 577 DoBacktrackingLineSearch(bool skip_first_trial_point,Number & alpha_primal,bool & corr_taken,bool & soc_taken,Index & n_steps,bool & evaluation_error,SmartPtr<IteratesVector> & actual_delta)578 bool BacktrackingLineSearch::DoBacktrackingLineSearch(bool skip_first_trial_point, 579 Number& alpha_primal, 580 bool& corr_taken, 581 bool& soc_taken, 582 Index& n_steps, 583 bool& evaluation_error, 584 SmartPtr<IteratesVector>& actual_delta) 585 { 586 evaluation_error = false; 587 bool accept = false; 588 589 DBG_START_METH("BacktrackingLineSearch::DoBacktrackingLineSearch", 590 dbg_verbosity); 591 592 // Compute primal fraction-to-the-boundary value 593 Number alpha_primal_max = 594 IpCq().primal_frac_to_the_bound(IpData().curr_tau(), 595 *actual_delta->x(), 596 *actual_delta->s()); 597 598 // Compute smallest step size allowed 599 Number alpha_min = alpha_primal_max; 600 if (!in_watchdog_) { 601 alpha_min = acceptor_->CalculateAlphaMin(); 602 } 603 Jnlst().Printf(J_DETAILED, J_LINE_SEARCH, 604 "minimal step size ALPHA_MIN = %E\n", alpha_min); 605 606 // Start line search from maximal step size 607 alpha_primal = alpha_primal_max; 608 609 // Step size used in ftype and armijo tests 610 Number alpha_primal_test = alpha_primal; 611 if (in_watchdog_) { 612 alpha_primal_test = watchdog_alpha_primal_test_; 613 } 614 615 if (skip_first_trial_point) { 616 alpha_primal *= alpha_red_factor_; 617 } 618 619 if (!skip_first_trial_point) { 620 // Before we do the actual backtracking line search for the 621 // regular primal-dual search direction, let's see if a step 622 // including a higher-order correctior is already acceptable 623 accept = acceptor_->TryCorrector(alpha_primal_test, 624 alpha_primal, 625 actual_delta); 626 } 627 if (accept) { 628 corr_taken = true; 629 } 630 631 if (!accept) { 632 // Loop over decreaseing step sizes until acceptable point is 633 // found or until step size becomes too small 634 635 while (alpha_primal>alpha_min || 636 n_steps == 0) { // always allow the "full" step if it is 637 // acceptable (even if alpha_primal<=alpha_min) 638 Jnlst().Printf(J_DETAILED, J_LINE_SEARCH, 639 "Starting checks for alpha (primal) = %8.2e\n", 640 alpha_primal); 641 642 try { 643 // Compute the primal trial point 644 IpData().SetTrialPrimalVariablesFromStep(alpha_primal, *actual_delta->x(), *actual_delta->s()); 645 646 if (magic_steps_) { 647 PerformMagicStep(); 648 } 649 650 // If it is acceptable, stop the search 651 alpha_primal_test = alpha_primal; 652 if (accept_every_trial_step_) { 653 // We call the evaluation at the trial point here, so that an 654 // exception will the thrown if there are problem during the 655 // evaluation of the functions (in that case, we want to further 656 // reduce the step size 657 IpCq().trial_barrier_obj(); 658 IpCq().trial_constraint_violation(); 659 accept = true; 660 } 661 else { 662 accept = acceptor_->CheckAcceptabilityOfTrialPoint(alpha_primal_test); 663 } 664 } 665 catch(IpoptNLP::Eval_Error& e) { 666 e.ReportException(Jnlst(), J_DETAILED); 667 Jnlst().Printf(J_WARNING, J_LINE_SEARCH, 668 "Warning: Cutting back alpha due to evaluation error\n"); 669 IpData().Append_info_string("e"); 670 accept = false; 671 evaluation_error = true; 672 } 673 674 if (accept) { 675 break; 676 } 677 678 if (in_watchdog_) { 679 break; 680 } 681 682 // Decide if we want to go to the restoration phase in a 683 // short cut to check if the problem is infeasible 684 if (expect_infeasible_problem_) { 685 if (count_successive_shortened_steps_>=5) { 686 break; 687 } 688 } 689 690 // try second order correction step if the function could 691 // be evaluated 692 // DoTo: check if we want to do SOC when watchdog is active 693 if (!evaluation_error) { 694 Number theta_curr = IpCq().curr_constraint_violation(); 695 Number theta_trial = IpCq().trial_constraint_violation(); 696 if (alpha_primal==alpha_primal_max && // i.e. first trial point 697 theta_curr<=theta_trial) { 698 // Try second order correction 699 accept = acceptor_->TrySecondOrderCorrection(alpha_primal_test, 700 alpha_primal, 701 actual_delta); 702 } 703 if (accept) { 704 soc_taken = true; 705 break; 706 } 707 } 708 709 // Point is not yet acceptable, try a shorter one 710 alpha_primal *= alpha_red_factor_; 711 n_steps++; 712 } 713 } /* if (!accept) */ 714 715 char info_alpha_primal_char='?'; 716 if (!accept && in_watchdog_) { 717 info_alpha_primal_char = 'w'; 718 } 719 else if (accept) { 720 info_alpha_primal_char = 721 acceptor_->UpdateForNextIteration(alpha_primal_test); 722 } 723 if (soc_taken) { 724 info_alpha_primal_char = toupper(info_alpha_primal_char); 725 } 726 IpData().Set_info_alpha_primal_char(info_alpha_primal_char); 727 IpData().Set_info_ls_count(n_steps+1); 728 if (corr_taken) { 729 IpData().Append_info_string("C"); 730 } 731 732 return accept; 733 } 734 StartWatchDog()735 void BacktrackingLineSearch::StartWatchDog() 736 { 737 DBG_START_FUN("BacktrackingLineSearch::StartWatchDog", dbg_verbosity); 738 739 Jnlst().Printf(J_DETAILED, J_LINE_SEARCH, 740 "Starting Watch Dog\n"); 741 742 in_watchdog_ = true; 743 watchdog_iterate_ = IpData().curr(); 744 watchdog_delta_ = IpData().delta(); 745 watchdog_trial_iter_ = 0; 746 watchdog_alpha_primal_test_ = 747 IpCq().curr_primal_frac_to_the_bound(IpData().curr_tau()); 748 749 acceptor_->StartWatchDog(); 750 } 751 StopWatchDog(SmartPtr<IteratesVector> & actual_delta)752 void BacktrackingLineSearch::StopWatchDog(SmartPtr<IteratesVector>& actual_delta) 753 { 754 DBG_START_FUN("BacktrackingLineSearch::StopWatchDog", dbg_verbosity); 755 756 Jnlst().Printf(J_DETAILED, J_LINE_SEARCH, 757 "Stopping Watch Dog\n"); 758 759 IpData().Append_info_string("w"); 760 761 in_watchdog_ = false; 762 763 // Reset all fields in IpData to reference point 764 SmartPtr<IteratesVector> old_trial = watchdog_iterate_->MakeNewContainer(); 765 IpData().set_trial(old_trial); 766 IpData().AcceptTrialPoint(); 767 actual_delta = watchdog_delta_->MakeNewContainer(); 768 IpData().SetHaveAffineDeltas(false); 769 770 // reset the stored watchdog iterates 771 watchdog_iterate_ = NULL; 772 watchdog_delta_ = NULL; 773 774 watchdog_shortened_iter_ = 0; 775 776 acceptor_->StopWatchDog(); 777 } 778 Reset()779 void BacktrackingLineSearch::Reset() 780 { 781 DBG_START_FUN("BacktrackingLineSearch::Reset", dbg_verbosity); 782 in_soft_resto_phase_ = false; 783 soft_resto_counter_ = 0; 784 785 acceptor_->Reset(); 786 } 787 PerformDualStep(Number alpha_primal,Number alpha_dual,SmartPtr<IteratesVector> & delta)788 void BacktrackingLineSearch::PerformDualStep(Number alpha_primal, 789 Number alpha_dual, 790 SmartPtr<IteratesVector>& delta) 791 { 792 DBG_START_FUN("BacktrackingLineSearch::PerformDualStep", dbg_verbosity); 793 794 // set the bound multipliers from the step 795 IpData().SetTrialBoundMultipliersFromStep(alpha_dual, *delta->z_L(), *delta->z_U(), *delta->v_L(), *delta->v_U()); 796 797 Number alpha_y=-1.; 798 switch (alpha_for_y_) { 799 case PRIMAL_ALPHA_FOR_Y: 800 alpha_y = alpha_primal; 801 break; 802 case DUAL_ALPHA_FOR_Y: 803 alpha_y = alpha_dual; 804 break; 805 case MIN_ALPHA_FOR_Y: 806 alpha_y = Min(alpha_dual, alpha_primal); 807 break; 808 case MAX_ALPHA_FOR_Y: 809 alpha_y = Max(alpha_dual, alpha_primal); 810 break; 811 case FULL_STEP_FOR_Y: 812 alpha_y = 1; 813 break; 814 case MIN_DUAL_INFEAS_ALPHA_FOR_Y: 815 case SAFE_MIN_DUAL_INFEAS_ALPHA_FOR_Y: 816 // Here we compute the step size for y so that the dual 817 // infeasibility is minimized along delta_y 818 819 // compute the dual infeasibility at new point with old y 820 SmartPtr<IteratesVector> temp_trial 821 = IpData().trial()->MakeNewContainer(); 822 temp_trial->Set_y_c(*IpData().curr()->y_c()); 823 temp_trial->Set_y_d(*IpData().curr()->y_d()); 824 IpData().set_trial(temp_trial); 825 SmartPtr<const Vector> dual_inf_x = IpCq().trial_grad_lag_x(); 826 SmartPtr<const Vector> dual_inf_s = IpCq().trial_grad_lag_s(); 827 828 SmartPtr<Vector> new_jac_times_delta_y = 829 IpData().curr()->x()->MakeNew(); 830 new_jac_times_delta_y->AddTwoVectors(1., *IpCq().trial_jac_cT_times_vec(*delta->y_c()), 831 1., *IpCq().trial_jac_dT_times_vec(*delta->y_d()), 832 0.); 833 834 Number a = std::pow(new_jac_times_delta_y->Nrm2(), Number(2)) + 835 std::pow(delta->y_d()->Nrm2(), Number(2)); 836 Number b = dual_inf_x->Dot(*new_jac_times_delta_y) - 837 dual_inf_s->Dot(*delta->y_d()); 838 839 Number alpha = - b/a; 840 841 if (alpha_for_y_==SAFE_MIN_DUAL_INFEAS_ALPHA_FOR_Y) { 842 alpha_y = Min(Max(alpha_primal, alpha_dual), Max(alpha, Min(alpha_primal, alpha_dual))); 843 } 844 else { 845 alpha_y = Min(1., Max(0., alpha)); 846 } 847 break; 848 } 849 850 // Set the eq multipliers from the step now that alpha_y 851 // has been calculated. 852 DBG_PRINT((1, "alpha_y = %e\n", alpha_y)); 853 DBG_PRINT_VECTOR(2, "delta_y_c", *delta->y_c()); 854 DBG_PRINT_VECTOR(2, "delta_y_d", *delta->y_d()); 855 IpData().SetTrialEqMultipliersFromStep(alpha_y, *delta->y_c(), *delta->y_d()); 856 857 // Set some information for iteration summary output 858 IpData().Set_info_alpha_primal(alpha_primal); 859 IpData().Set_info_alpha_dual(alpha_dual); 860 } 861 862 void PerformMagicStep()863 BacktrackingLineSearch::PerformMagicStep() 864 { 865 DBG_START_METH("BacktrackingLineSearch::PerformMagicStep", 866 2);//dbg_verbosity); 867 868 DBG_PRINT((1,"Incoming barr = %e and constrviol %e\n", 869 IpCq().trial_barrier_obj(), 870 IpCq().trial_constraint_violation())); 871 DBG_PRINT_VECTOR(2, "s in", *IpData().trial()->s()); 872 DBG_PRINT_VECTOR(2, "d minus s in", *IpCq().trial_d_minus_s()); 873 DBG_PRINT_VECTOR(2, "slack_s_L in", *IpCq().trial_slack_s_L()); 874 DBG_PRINT_VECTOR(2, "slack_s_U in", *IpCq().trial_slack_s_U()); 875 876 SmartPtr<const Vector> d_L = IpNLP().d_L(); 877 SmartPtr<const Matrix> Pd_L = IpNLP().Pd_L(); 878 SmartPtr<Vector> delta_s_magic_L = d_L->MakeNew(); 879 delta_s_magic_L->Set(0.); 880 SmartPtr<Vector> tmp = d_L->MakeNew(); 881 Pd_L->TransMultVector(1., *IpCq().trial_d_minus_s(), 0., *tmp); 882 delta_s_magic_L->ElementWiseMax(*tmp); 883 884 SmartPtr<const Vector> d_U = IpNLP().d_U(); 885 SmartPtr<const Matrix> Pd_U = IpNLP().Pd_U(); 886 SmartPtr<Vector> delta_s_magic_U = d_U->MakeNew(); 887 delta_s_magic_U->Set(0.); 888 tmp = d_U->MakeNew(); 889 Pd_U->TransMultVector(1., *IpCq().trial_d_minus_s(), 0., *tmp); 890 delta_s_magic_U->ElementWiseMin(*tmp); 891 892 SmartPtr<Vector> delta_s_magic = IpData().trial()->s()->MakeNew(); 893 Pd_L->MultVector(1., *delta_s_magic_L, 0., *delta_s_magic); 894 Pd_U->MultVector(1., *delta_s_magic_U, 1., *delta_s_magic); 895 delta_s_magic_L = NULL; // free memory 896 delta_s_magic_U = NULL; // free memory 897 898 // Now find those entries with both lower and upper bounds, there 899 // the step is too large 900 // ToDo this should only be done if there are inequality 901 // constraints with two bounds 902 // also this can be done in a smaller space (d_L or d_U whichever 903 // is smaller) 904 tmp = delta_s_magic->MakeNew(); 905 tmp->Copy(*IpData().trial()->s()); 906 Pd_L->MultVector(1., *d_L, -2., *tmp); 907 Pd_U->MultVector(1., *d_U, 1., *tmp); 908 SmartPtr<Vector> tmp2 = tmp->MakeNew(); 909 tmp2->Copy(*tmp); 910 tmp2->ElementWiseAbs(); 911 tmp->Axpy(-2., *delta_s_magic); 912 tmp->ElementWiseAbs(); 913 // now, tmp2 = |d_L + d_u - 2*s| and tmp = |d_L + d_u - 2*(s+Delta s)| 914 // we want to throw out those for which tmp2 > tmp 915 tmp->Axpy(-1., *tmp2); 916 tmp->ElementWiseSgn(); 917 tmp2->Set(0.); 918 tmp2->ElementWiseMax(*tmp); 919 tmp = d_L->MakeNew(); 920 Pd_L->TransMultVector(1., *tmp2, 0., *tmp); 921 Pd_L->MultVector(1., *tmp, 0., *tmp2); 922 tmp = d_U->MakeNew(); 923 Pd_U->TransMultVector(1., *tmp2, 0., *tmp); 924 Pd_U->MultVector(1., *tmp, 0., *tmp2); 925 DBG_PRINT_VECTOR(2, "tmp indicator", *tmp2) 926 // tmp2 now is one for those entries with both bounds, for which 927 // no step should be taken 928 929 tmp = delta_s_magic->MakeNew(); 930 tmp->Copy(*delta_s_magic); 931 tmp->ElementWiseMultiply(*tmp2); 932 delta_s_magic->Axpy(-1., *tmp); 933 934 Number delta_s_magic_max = delta_s_magic->Amax(); 935 Number mach_eps = std::numeric_limits<Number>::epsilon(); 936 if (delta_s_magic_max>0.) { 937 if (delta_s_magic_max > 10*mach_eps*IpData().trial()->s()->Amax()) { 938 IpData().Append_info_string("M"); 939 Jnlst().Printf(J_DETAILED, J_LINE_SEARCH, "Magic step with max-norm %.6e taken.\n", delta_s_magic->Amax()); 940 delta_s_magic->Print(Jnlst(), J_MOREVECTOR, J_LINE_SEARCH, 941 "delta_s_magic"); 942 } 943 944 // now finally compute the new overall slacks 945 delta_s_magic->Axpy(1., *IpData().trial()->s()); 946 SmartPtr<IteratesVector> trial = IpData().trial()->MakeNewContainer(); 947 trial->Set_s(*delta_s_magic); 948 949 // also update the set in the dual variables 950 951 952 IpData().set_trial(trial); 953 } 954 955 DBG_PRINT((1,"Outgoing barr = %e and constrviol %e\n", IpCq().trial_barrier_obj(), IpCq().trial_constraint_violation())); 956 DBG_PRINT_VECTOR(2, "s out", *IpData().trial()->s()); 957 DBG_PRINT_VECTOR(2, "d minus s out", *IpCq().trial_d_minus_s()); 958 DBG_PRINT_VECTOR(2, "slack_s_L out", *IpCq().trial_slack_s_L()); 959 DBG_PRINT_VECTOR(2, "slack_s_U out", *IpCq().trial_slack_s_U()); 960 } 961 TrySoftRestoStep(SmartPtr<IteratesVector> & actual_delta,bool & satisfies_original_criterion)962 bool BacktrackingLineSearch::TrySoftRestoStep(SmartPtr<IteratesVector>& actual_delta, 963 bool &satisfies_original_criterion) 964 { 965 DBG_START_FUN("FilterLSAcceptor::TrySoftRestoStep", dbg_verbosity); 966 967 if (soft_resto_pderror_reduction_factor_==0.) { 968 return false; 969 } 970 971 satisfies_original_criterion = false; 972 973 // ToDo: Need to decide if we want to try a corrector step first 974 975 // Compute the maximal step sizes (we use identical step sizes for 976 // primal and dual variables 977 Number alpha_primal_max = 978 IpCq().primal_frac_to_the_bound(IpData().curr_tau(), 979 *actual_delta->x(), 980 *actual_delta->s()); 981 Number alpha_dual_max = 982 IpCq().dual_frac_to_the_bound(IpData().curr_tau(), 983 *actual_delta->z_L(), 984 *actual_delta->z_U(), 985 *actual_delta->v_L(), 986 *actual_delta->v_U()); 987 Number alpha = Min(alpha_primal_max, alpha_dual_max); 988 989 Jnlst().Printf(J_DETAILED, J_LINE_SEARCH, 990 "Trying soft restoration phase step with step length %13.6e\n", 991 alpha); 992 993 // We allow up to three trials in case there is an evaluation 994 // error for the functions 995 bool done=false; 996 Index count=3; 997 while (!done && count>0) { 998 // Set the trial point 999 IpData().SetTrialPrimalVariablesFromStep(alpha, *actual_delta->x(), *actual_delta->s()); 1000 PerformDualStep(alpha, alpha, actual_delta); 1001 1002 // Check if that point is acceptable with respect to the current 1003 // original filter 1004 try { 1005 IpCq().trial_barrier_obj(); 1006 IpCq().trial_constraint_violation(); 1007 done=true; 1008 } 1009 catch(IpoptNLP::Eval_Error& e) { 1010 e.ReportException(Jnlst(), J_DETAILED); 1011 Jnlst().Printf(J_WARNING, J_LINE_SEARCH, 1012 "Warning: Evaluation error during soft restoration phase step.\n"); 1013 IpData().Append_info_string("e"); 1014 count--; 1015 } 1016 } 1017 if (!done) { 1018 return false; 1019 } 1020 1021 if (acceptor_->CheckAcceptabilityOfTrialPoint(0.)) { 1022 Jnlst().Printf(J_DETAILED, J_LINE_SEARCH, 1023 " Trial step acceptable with respect to original backtracking globalization.\n"); 1024 satisfies_original_criterion = true; 1025 return true; 1026 } 1027 1028 // Evaluate the optimality error at the new point 1029 Number mu = .0; 1030 if (!IpData().FreeMuMode()) { 1031 mu = IpData().curr_mu(); 1032 } 1033 Number trial_pderror; 1034 Number curr_pderror; 1035 try { 1036 trial_pderror = IpCq().trial_primal_dual_system_error(mu); 1037 curr_pderror = IpCq().curr_primal_dual_system_error(mu); 1038 } 1039 catch(IpoptNLP::Eval_Error& e) { 1040 e.ReportException(Jnlst(), J_DETAILED); 1041 Jnlst().Printf(J_WARNING, J_LINE_SEARCH, 1042 "Warning: Evaluation error during soft restoration phase step.\n"); 1043 IpData().Append_info_string("e"); 1044 return false; 1045 } 1046 1047 Jnlst().Printf(J_DETAILED, J_LINE_SEARCH, 1048 " Primal-dual error at current point: %23.16e\n", curr_pderror); 1049 Jnlst().Printf(J_DETAILED, J_LINE_SEARCH, 1050 " Primal-dual error at trial point : %23.16e\n", trial_pderror); 1051 // Check if there is sufficient reduction in the optimality error 1052 if (trial_pderror <= soft_resto_pderror_reduction_factor_*curr_pderror) { 1053 Jnlst().Printf(J_DETAILED, J_LINE_SEARCH, 1054 " Trial step accepted.\n"); 1055 return true; 1056 } 1057 1058 Jnlst().Printf(J_DETAILED, J_LINE_SEARCH, 1059 " Trial step rejected.\n"); 1060 return false; 1061 } 1062 1063 bool DetectTinyStep()1064 BacktrackingLineSearch::DetectTinyStep() 1065 { 1066 DBG_START_METH("BacktrackingLineSearch::DetectTinyStep", 1067 dbg_verbosity); 1068 1069 Number max_step_x; 1070 Number max_step_s; 1071 1072 if (tiny_step_tol_==0.) 1073 return false; 1074 1075 // ToDo try to find more efficient implementation 1076 DBG_PRINT_VECTOR(2, "curr_x", *IpData().curr()->x()); 1077 DBG_PRINT_VECTOR(2, "delta_x", *IpData().delta()->x()); 1078 1079 SmartPtr<Vector> tmp = IpData().curr()->x()->MakeNewCopy(); 1080 tmp->ElementWiseAbs(); 1081 tmp->AddScalar(1.); 1082 1083 SmartPtr<Vector> tmp2 = IpData().delta()->x()->MakeNewCopy(); 1084 tmp2->ElementWiseDivide(*tmp); 1085 max_step_x = tmp2->Amax(); 1086 Jnlst().Printf(J_MOREDETAILED, J_LINE_SEARCH, 1087 "Relative step size for delta_x = %e\n", 1088 max_step_x); 1089 if (max_step_x > tiny_step_tol_) 1090 return false; 1091 1092 tmp = IpData().curr()->s()->MakeNew(); 1093 tmp->Copy(*IpData().curr()->s()); 1094 tmp->ElementWiseAbs(); 1095 tmp->AddScalar(1.); 1096 1097 tmp2 = IpData().curr()->s()->MakeNew(); 1098 tmp2->Copy(*IpData().delta()->s()); 1099 tmp2->ElementWiseDivide(*tmp); 1100 max_step_s = tmp2->Amax(); 1101 Jnlst().Printf(J_MOREDETAILED, J_LINE_SEARCH, 1102 "Relative step size for delta_s = %e\n", 1103 max_step_s); 1104 if (max_step_s > tiny_step_tol_) 1105 return false; 1106 1107 Jnlst().Printf(J_DETAILED, J_LINE_SEARCH, 1108 "Tiny step of relative size %e detected.\n", 1109 Max(max_step_x, max_step_s)); 1110 1111 return true; 1112 } 1113 CurrentIsAcceptable()1114 bool BacktrackingLineSearch::CurrentIsAcceptable() 1115 { 1116 return (IsValid(conv_check_) && 1117 conv_check_->CurrentIsAcceptable()); 1118 } 1119 StoreAcceptablePoint()1120 void BacktrackingLineSearch::StoreAcceptablePoint() 1121 { 1122 DBG_START_METH("BacktrackingLineSearch::StoreAcceptablePoint", 1123 dbg_verbosity); 1124 1125 acceptable_iterate_ = IpData().curr(); 1126 acceptable_iteration_number_ = IpData().iter_count(); 1127 } 1128 RestoreAcceptablePoint()1129 bool BacktrackingLineSearch::RestoreAcceptablePoint() 1130 { 1131 DBG_START_METH("BacktrackingLineSearch::RestoreAcceptablePoint", 1132 dbg_verbosity); 1133 1134 if (!IsValid(acceptable_iterate_)) { 1135 return false; 1136 } 1137 1138 SmartPtr<IteratesVector> prev_iterate = acceptable_iterate_->MakeNewContainer(); 1139 IpData().set_trial(prev_iterate); 1140 IpData().AcceptTrialPoint(); 1141 1142 return true; 1143 } 1144 ActivateFallbackMechanism()1145 bool BacktrackingLineSearch::ActivateFallbackMechanism() 1146 { 1147 // If we don't have a restoration phase, we don't know what to do 1148 if (IsNull(resto_phase_)) { 1149 return false; 1150 } 1151 1152 // Reverting to the restoration phase only makes sense if there 1153 // are constraints 1154 if (IpData().curr()->y_c()->Dim()+IpData().curr()->y_d()->Dim()==0) { 1155 return false; 1156 } 1157 1158 fallback_activated_ = true; 1159 rigorous_ = true; 1160 1161 Jnlst().Printf(J_DETAILED, J_LINE_SEARCH, 1162 "Fallback option activated in BacktrackingLineSearch!\n"); 1163 1164 return true; 1165 } 1166 1167 } // namespace Ipopt 1168