1#  ___________________________________________________________________________
2#
3#  Pyomo: Python Optimization Modeling Objects
4#  Copyright 2017 National Technology and Engineering Solutions of Sandia, LLC
5#  Under the terms of Contract DE-NA0003525 with National Technology and
6#  Engineering Solutions of Sandia, LLC, the U.S. Government retains certain
7#  rights in this software.
8#  This software is distributed under the 3-clause BSD License.
9#  ___________________________________________________________________________
10
11"""Functions for solving the nonlinear subproblem."""
12from __future__ import division
13
14from math import fabs
15
16from pyomo.common.collections import ComponentSet
17from pyomo.common.errors import InfeasibleConstraintException
18from pyomo.contrib.gdpopt.data_class import SubproblemResult
19from pyomo.contrib.gdpopt.util import (SuppressInfeasibleWarning,
20                                       is_feasible, get_main_elapsed_time)
21from pyomo.core import Constraint, TransformationFactory, minimize, value, Objective
22from pyomo.core.expr import current as EXPR
23from pyomo.opt import SolverFactory, SolverResults
24from pyomo.opt import TerminationCondition as tc
25
26
27def solve_disjunctive_subproblem(mip_result, solve_data, config):
28    """Set up and solve the disjunctive subproblem."""
29    if config.force_subproblem_nlp:
30        if config.strategy in {"LOA", "RIC"}:
31            return solve_local_NLP(mip_result.var_values, solve_data, config)
32        elif config.strategy == 'GLOA':
33            return solve_global_subproblem(mip_result, solve_data, config)
34        else:
35            raise ValueError('Unrecognized strategy: ' + config.strategy)
36    else:
37        if config.strategy in {"LOA", "RIC"}:
38            return solve_local_subproblem(mip_result, solve_data, config)
39        elif config.strategy == 'GLOA':
40            return solve_global_subproblem(mip_result, solve_data, config)
41        else:
42            raise ValueError('Unrecognized strategy: ' + config.strategy)
43
44
45def solve_linear_subproblem(mip_model, solve_data, config):
46    GDPopt = mip_model.GDPopt_utils
47
48    initialize_subproblem(mip_model, solve_data)
49
50    # Callback immediately before solving NLP subproblem
51    config.call_before_subproblem_solve(mip_model, solve_data)
52
53    mip_solver = SolverFactory(config.mip_solver)
54    if not mip_solver.available():
55        raise RuntimeError("MIP solver %s is not available." % config.mip_solver)
56    with SuppressInfeasibleWarning():
57        mip_args = dict(config.mip_solver_args)
58        elapsed = get_main_elapsed_time(solve_data.timing)
59        remaining = max(config.time_limit - elapsed, 1)
60        if config.mip_solver == 'gams':
61            mip_args['add_options'] = mip_args.get('add_options', [])
62            mip_args['add_options'].append('option reslim=%s;' % remaining)
63        elif config.mip_solver == 'multisolve':
64            mip_args['time_limit'] = min(mip_args.get('time_limit', float('inf')), remaining)
65        results = mip_solver.solve(mip_model, **mip_args)
66
67    subprob_result = SubproblemResult()
68    subprob_result.feasible = True
69    subprob_result.var_values = list(v.value for v in GDPopt.variable_list)
70    subprob_result.pyomo_results = results
71    subprob_result.dual_values = list(mip_model.dual.get(c, None) for c in GDPopt.constraint_list)
72
73    subprob_terminate_cond = results.solver.termination_condition
74    if subprob_terminate_cond is tc.optimal:
75        pass
76    elif subprob_terminate_cond is tc.infeasible:
77        config.logger.info('MIP subproblem was infeasible.')
78        subprob_result.feasible = False
79    else:
80        raise ValueError(
81            'GDPopt unable to handle MIP subproblem termination '
82            'condition of %s. Results: %s'
83            % (subprob_terminate_cond, results))
84
85    # Call the NLP post-solve callback
86    config.call_after_subproblem_solve(mip_model, solve_data)
87
88    # if feasible, call the NLP post-feasible callback
89    if subprob_result.feasible:
90        config.call_after_subproblem_feasible(mip_model, solve_data)
91
92    return subprob_result
93
94
95def solve_NLP(nlp_model, solve_data, config):
96    """Solve the NLP subproblem."""
97    config.logger.info(
98        'Solving nonlinear subproblem for '
99        'fixed binaries and logical realizations.')
100
101    # Error checking for unfixed discrete variables
102    unfixed_discrete_vars = detect_unfixed_discrete_vars(nlp_model)
103    assert len(unfixed_discrete_vars) == 0, \
104        "Unfixed discrete variables exist on the NLP subproblem: {0}".format(
105        list(v.name for v in unfixed_discrete_vars))
106
107    GDPopt = nlp_model.GDPopt_utils
108
109    initialize_subproblem(nlp_model, solve_data)
110
111    # Callback immediately before solving NLP subproblem
112    config.call_before_subproblem_solve(nlp_model, solve_data)
113
114    nlp_solver = SolverFactory(config.nlp_solver)
115    if not nlp_solver.available():
116        raise RuntimeError("NLP solver %s is not available." %
117                           config.nlp_solver)
118    with SuppressInfeasibleWarning():
119        try:
120            nlp_args = dict(config.nlp_solver_args)
121            elapsed = get_main_elapsed_time(solve_data.timing)
122            remaining = max(config.time_limit - elapsed, 1)
123            if config.nlp_solver == 'gams':
124                nlp_args['add_options'] = nlp_args.get('add_options', [])
125                nlp_args['add_options'].append('option reslim=%s;' % remaining)
126            elif config.nlp_solver == 'multisolve':
127                nlp_args['time_limit'] = min(nlp_args.get('time_limit', float('inf')), remaining)
128            results = nlp_solver.solve(nlp_model, **nlp_args)
129        except ValueError as err:
130            if 'Cannot load a SolverResults object with bad status: error' in str(err):
131                results = SolverResults()
132                results.solver.termination_condition = tc.error
133                results.solver.message = str(err)
134            else:
135                raise
136
137    nlp_result = SubproblemResult()
138    nlp_result.feasible = True
139    nlp_result.var_values = list(v.value for v in GDPopt.variable_list)
140    nlp_result.pyomo_results = results
141    nlp_result.dual_values = list(
142        nlp_model.dual.get(c, None)
143        for c in GDPopt.constraint_list)
144
145    term_cond = results.solver.termination_condition
146    if any(term_cond == cond for cond in (tc.optimal, tc.locallyOptimal, tc.feasible)):
147        pass
148    elif term_cond == tc.infeasible:
149        config.logger.info('NLP subproblem was infeasible.')
150        nlp_result.feasible = False
151    elif term_cond == tc.maxIterations:
152        # TODO try something else? Reinitialize with different initial
153        # value?
154        config.logger.info(
155            'NLP subproblem failed to converge within iteration limit.')
156        if is_feasible(nlp_model, config):
157            config.logger.info(
158                'NLP solution is still feasible. '
159                'Using potentially suboptimal feasible solution.')
160        else:
161            nlp_result.feasible = False
162    elif term_cond == tc.internalSolverError:
163        # Possible that IPOPT had a restoration failure
164        config.logger.info(
165            "NLP solver had an internal failure: %s" % results.solver.message)
166        nlp_result.feasible = False
167    elif (term_cond == tc.other and
168          "Too few degrees of freedom" in str(results.solver.message)):
169        # Possible IPOPT degrees of freedom error
170        config.logger.info(
171            "IPOPT has too few degrees of freedom: %s" %
172            results.solver.message)
173        nlp_result.feasible = False
174    elif term_cond == tc.other:
175        config.logger.info(
176            "NLP solver had a termination condition of 'other': %s" %
177            results.solver.message)
178        nlp_result.feasible = False
179    elif term_cond == tc.error:
180        config.logger.info("NLP solver had a termination condition of 'error': %s" % results.solver.message)
181        nlp_result.feasible = False
182    elif term_cond == tc.maxTimeLimit:
183        config.logger.info("NLP solver ran out of time. Assuming infeasible for now.")
184        nlp_result.feasible = False
185    else:
186        raise ValueError(
187            'GDPopt unable to handle NLP subproblem termination '
188            'condition of %s. Results: %s'
189            % (term_cond, results))
190
191    # Call the NLP post-solve callback
192    config.call_after_subproblem_solve(nlp_model, solve_data)
193
194    # if feasible, call the NLP post-feasible callback
195    if nlp_result.feasible:
196        config.call_after_subproblem_feasible(nlp_model, solve_data)
197
198    return nlp_result
199
200
201def solve_MINLP(model, solve_data, config):
202    """Solve the MINLP subproblem."""
203    config.logger.info(
204        "Solving MINLP subproblem for fixed logical realizations."
205    )
206
207    GDPopt = model.GDPopt_utils
208
209    initialize_subproblem(model, solve_data)
210
211    # Callback immediately before solving MINLP subproblem
212    config.call_before_subproblem_solve(model, solve_data)
213
214    minlp_solver = SolverFactory(config.minlp_solver)
215    if not minlp_solver.available():
216        raise RuntimeError("MINLP solver %s is not available." %
217                           config.minlp_solver)
218    with SuppressInfeasibleWarning():
219        minlp_args = dict(config.minlp_solver_args)
220        elapsed = get_main_elapsed_time(solve_data.timing)
221        remaining = max(config.time_limit - elapsed, 1)
222        if config.minlp_solver == 'gams':
223            minlp_args['add_options'] = minlp_args.get('add_options', [])
224            minlp_args['add_options'].append('option reslim=%s;' % remaining)
225        elif config.minlp_solver == 'multisolve':
226            minlp_args['time_limit'] = min(minlp_args.get('time_limit', float('inf')), remaining)
227        results = minlp_solver.solve(model, **minlp_args)
228
229    subprob_result = SubproblemResult()
230    subprob_result.feasible = True
231    subprob_result.var_values = list(v.value for v in GDPopt.variable_list)
232    subprob_result.pyomo_results = results
233    subprob_result.dual_values = list(
234        model.dual.get(c, None)
235        for c in GDPopt.constraint_list)
236
237    term_cond = results.solver.termination_condition
238    if any(term_cond == cond for cond in (tc.optimal, tc.locallyOptimal, tc.feasible)):
239        pass
240    elif term_cond == tc.infeasible:
241        config.logger.info('MINLP subproblem was infeasible.')
242        subprob_result.feasible = False
243    elif term_cond == tc.maxIterations:
244        # TODO try something else? Reinitialize with different initial
245        # value?
246        config.logger.info(
247            'MINLP subproblem failed to converge within iteration limit.')
248        if is_feasible(model, config):
249            config.logger.info(
250                'MINLP solution is still feasible. '
251                'Using potentially suboptimal feasible solution.')
252        else:
253            subprob_result.feasible = False
254    elif term_cond == tc.maxTimeLimit:
255        config.logger.info('MINLP subproblem failed to converge within time limit.')
256        if is_feasible(model, config):
257            config.logger.info(
258                'MINLP solution is still feasible. '
259                'Using potentially suboptimal feasible solution.')
260        else:
261            subprob_result.feasible = False
262    elif term_cond == tc.intermediateNonInteger:
263        config.logger.info(
264            "MINLP solver could not find feasible integer solution: %s" % results.solver.message)
265        subprob_result.feasible = False
266    else:
267        raise ValueError(
268            'GDPopt unable to handle MINLP subproblem termination '
269            'condition of %s. Results: %s'
270            % (term_cond, results))
271
272    # Call the subproblem post-solve callback
273    config.call_after_subproblem_solve(model, solve_data)
274
275    # if feasible, call the subproblem post-feasible callback
276    if subprob_result.feasible:
277        config.call_after_subproblem_feasible(model, solve_data)
278
279    return subprob_result
280
281
282def detect_unfixed_discrete_vars(model):
283    """Detect unfixed discrete variables in use on the model."""
284    var_set = ComponentSet()
285    for constr in model.component_data_objects(
286            Constraint, active=True, descend_into=True):
287        var_set.update(
288            v for v in EXPR.identify_variables(
289                constr.body, include_fixed=False)
290            if not v.is_continuous())
291    for obj in model.component_data_objects(Objective, active=True):
292        var_set.update(v for v in EXPR.identify_variables(obj.expr, include_fixed=False)
293                       if not v.is_continuous())
294    return var_set
295
296
297def preprocess_subproblem(m, config):
298    """Applies preprocessing transformations to the model."""
299    # fbbt(m, integer_tol=config.integer_tolerance)
300    xfrm = TransformationFactory
301    xfrm('contrib.propagate_eq_var_bounds').apply_to(m)
302    xfrm('contrib.detect_fixed_vars').apply_to(
303        m, tolerance=config.variable_tolerance)
304    xfrm('contrib.propagate_fixed_vars').apply_to(m)
305    xfrm('contrib.remove_zero_terms').apply_to(m)
306    xfrm('contrib.propagate_zero_sum').apply_to(m)
307    xfrm('contrib.constraints_to_var_bounds').apply_to(
308        m, tolerance=config.variable_tolerance)
309    xfrm('contrib.detect_fixed_vars').apply_to(
310        m, tolerance=config.variable_tolerance)
311    xfrm('contrib.propagate_zero_sum').apply_to(m)
312    xfrm('contrib.deactivate_trivial_constraints').apply_to(
313        m, tolerance=config.constraint_tolerance)
314
315
316def initialize_subproblem(model, solve_data):
317    """Perform initialization of the subproblem.
318
319    Presently, this just restores the continuous variables to the original model values.
320
321    """
322    # restore original continuous variable values
323    for var, old_value in zip(model.GDPopt_utils.variable_list,
324                              solve_data.initial_var_values):
325        if not var.fixed and var.is_continuous():
326            if old_value is not None:
327                # Adjust value if it falls outside the bounds
328                if var.has_lb() and old_value < var.lb:
329                    old_value = var.lb
330                if var.has_ub() and old_value > var.ub:
331                    old_value = var.ub
332                # Set the value
333                var.set_value(old_value)
334
335
336def update_subproblem_progress_indicators(solved_model, solve_data, config):
337    """Update the progress indicators for the subproblem."""
338    GDPopt = solved_model.GDPopt_utils
339    objective = next(solved_model.component_data_objects(Objective, active=True))
340    if objective.sense == minimize:
341        old_UB = solve_data.UB
342        solve_data.UB = min(value(objective.expr), solve_data.UB)
343        solve_data.feasible_solution_improved = (solve_data.UB < old_UB)
344    else:
345        old_LB = solve_data.LB
346        solve_data.LB = max(value(objective.expr), solve_data.LB)
347        solve_data.feasible_solution_improved = (solve_data.LB > old_LB)
348    solve_data.iteration_log[
349        (solve_data.master_iteration,
350         solve_data.mip_iteration,
351         solve_data.nlp_iteration)
352    ] = (
353        value(objective.expr),
354        value(objective.expr),
355        [v.value for v in GDPopt.variable_list]
356    )
357
358    if solve_data.feasible_solution_improved:
359        solve_data.best_solution_found = solved_model.clone()
360
361    improvement_tag = (
362        "(IMPROVED) " if solve_data.feasible_solution_improved else "")
363    lb_improved, ub_improved = (
364        ("", improvement_tag)
365        if objective.sense == minimize
366        else (improvement_tag, ""))
367    config.logger.info(
368        'ITER {:d}.{:d}.{:d}-NLP: OBJ: {:.10g}  LB: {:.10g} {:s} UB: {:.10g} {:s}'.format(
369            solve_data.master_iteration,
370            solve_data.mip_iteration,
371            solve_data.nlp_iteration,
372            value(objective.expr),
373            solve_data.LB, lb_improved,
374            solve_data.UB, ub_improved))
375
376
377def solve_local_NLP(mip_var_values, solve_data, config):
378    """Set up and solve the local LOA subproblem."""
379    nlp_model = solve_data.working_model.clone()
380    solve_data.nlp_iteration += 1
381    # copy in the discrete variable values
382    for var, val in zip(nlp_model.GDPopt_utils.variable_list, mip_var_values):
383        if val is None:
384            continue
385        if var.is_continuous():
386            var.value = val
387        elif ((fabs(val) > config.integer_tolerance and
388               fabs(val - 1) > config.integer_tolerance)):
389            raise ValueError(
390                "Binary variable %s value %s is not "
391                "within tolerance %s of 0 or 1." %
392                (var.name, var.value, config.integer_tolerance))
393        else:
394            # variable is binary and within tolerances
395            if config.round_discrete_vars:
396                var.fix(int(round(val)))
397            else:
398                var.fix(val)
399    TransformationFactory('gdp.fix_disjuncts').apply_to(nlp_model)
400
401    nlp_result = solve_NLP(nlp_model, solve_data, config)
402    if nlp_result.feasible:  # NLP is feasible
403        update_subproblem_progress_indicators(nlp_model, solve_data, config)
404    return nlp_result
405
406
407def solve_local_subproblem(mip_result, solve_data, config):
408    """Set up and solve the local MINLP or NLP subproblem."""
409    subprob = solve_data.working_model.clone()
410    solve_data.nlp_iteration += 1
411
412    # TODO also copy over the variable values?
413
414    for disj, val in zip(subprob.GDPopt_utils.disjunct_list,
415                         mip_result.disjunct_values):
416        rounded_val = int(round(val))
417        if (fabs(val - rounded_val) > config.integer_tolerance or
418                rounded_val not in (0, 1)):
419            raise ValueError(
420                "Disjunct %s indicator value %s is not "
421                "within tolerance %s of 0 or 1." %
422                (disj.name, val.value, config.integer_tolerance)
423            )
424        else:
425            if config.round_discrete_vars:
426                disj.indicator_var.fix(bool(rounded_val))
427            else:
428                disj.indicator_var.fix(bool(val))
429
430    if config.force_subproblem_nlp:
431        # We also need to copy over the discrete variable values
432        for var, val in zip(subprob.GDPopt_utils.variable_list,
433                            mip_result.var_values):
434            if var.is_continuous():
435                continue
436            rounded_val = int(round(val))
437            if fabs(val - rounded_val) > config.integer_tolerance:
438                raise ValueError(
439                    "Discrete variable %s value %s is not "
440                    "within tolerance %s of %s." %
441                    (var.name, var.value, config.integer_tolerance, rounded_val))
442            else:
443                # variable is binary and within tolerances
444                if config.round_discrete_vars:
445                    var.fix(rounded_val)
446                else:
447                    var.fix(val)
448
449    TransformationFactory('gdp.fix_disjuncts').apply_to(subprob)
450
451    # for disj in subprob.component_data_objects(Disjunct, active=True):
452    #     disj.deactivate()  # TODO this is a HACK for something that isn't happening correctly in fix_disjuncts
453
454    if config.subproblem_presolve:
455        try:
456            preprocess_subproblem(subprob, config)
457        except InfeasibleConstraintException:
458            return get_infeasible_result_object(
459                subprob, "Preprocessing determined problem to be infeasible.")
460
461    if not any(constr.body.polynomial_degree() not in (1, 0)
462               for constr in subprob.component_data_objects(Constraint, active=True)):
463        subprob_result = solve_linear_subproblem(subprob, solve_data, config)
464    else:
465        unfixed_discrete_vars = detect_unfixed_discrete_vars(subprob)
466        if config.force_subproblem_nlp and len(unfixed_discrete_vars) > 0:
467            raise RuntimeError("Unfixed discrete variables found on the NLP subproblem.")
468        elif len(unfixed_discrete_vars) == 0:
469            subprob_result = solve_NLP(subprob, solve_data, config)
470        else:
471            subprob_result = solve_MINLP(subprob, solve_data, config)
472
473    if subprob_result.feasible:  # subproblem is feasible
474        update_subproblem_progress_indicators(subprob, solve_data, config)
475    return subprob_result
476
477
478def solve_global_subproblem(mip_result, solve_data, config):
479    subprob = solve_data.working_model.clone()
480    solve_data.nlp_iteration += 1
481
482    # copy in the discrete variable values
483    for disj, val in zip(subprob.GDPopt_utils.disjunct_list,
484                         mip_result.disjunct_values):
485        rounded_val = int(round(val))
486        if (fabs(val - rounded_val) > config.integer_tolerance or
487                rounded_val not in (0, 1)):
488            raise ValueError(
489                "Disjunct %s indicator value %s is not "
490                "within tolerance %s of 0 or 1." %
491                (disj.name, val.value, config.integer_tolerance)
492            )
493        else:
494            if config.round_discrete_vars:
495                disj.indicator_var.fix(bool(rounded_val))
496            else:
497                disj.indicator_var.fix(bool(val))
498
499    if config.force_subproblem_nlp:
500        # We also need to copy over the discrete variable values
501        for var, val in zip(subprob.GDPopt_utils.variable_list,
502                            mip_result.var_values):
503            if var.is_continuous():
504                continue
505            rounded_val = int(round(val))
506            if fabs(val - rounded_val) > config.integer_tolerance:
507                raise ValueError(
508                    "Discrete variable %s value %s is not "
509                    "within tolerance %s of %s." %
510                    (var.name, var.value, config.integer_tolerance, rounded_val))
511            else:
512                # variable is binary and within tolerances
513                if config.round_discrete_vars:
514                    var.fix(rounded_val)
515                else:
516                    var.fix(val)
517
518    TransformationFactory('gdp.fix_disjuncts').apply_to(subprob)
519    subprob.dual.deactivate()  # global solvers may not give dual info
520
521    if config.subproblem_presolve:
522        try:
523            preprocess_subproblem(subprob, config)
524        except InfeasibleConstraintException as e:
525            # FBBT found the problem to be infeasible
526            return get_infeasible_result_object(
527                subprob, "Preprocessing determined problem to be infeasible.")
528
529    unfixed_discrete_vars = detect_unfixed_discrete_vars(subprob)
530    if config.force_subproblem_nlp and len(unfixed_discrete_vars) > 0:
531        raise RuntimeError("Unfixed discrete variables found on the NLP subproblem.")
532    elif len(unfixed_discrete_vars) == 0:
533        subprob_result = solve_NLP(subprob, solve_data, config)
534    else:
535        subprob_result = solve_MINLP(subprob, solve_data, config)
536    if subprob_result.feasible:  # NLP is feasible
537        update_subproblem_progress_indicators(subprob, solve_data, config)
538    return subprob_result
539
540
541def get_infeasible_result_object(model, message=""):
542    infeas_result = SubproblemResult()
543    infeas_result.feasible = False
544    infeas_result.var_values = list(v.value for v in model.GDPopt_utils.variable_list)
545    infeas_result.pyomo_results = SolverResults()
546    infeas_result.pyomo_results.solver.termination_condition = tc.infeasible
547    infeas_result.pyomo_results.message = message
548    infeas_result.dual_values = list(None for _ in model.GDPopt_utils.constraint_list)
549    return infeas_result
550