1# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
2#
3# This source code is licensed under the MIT license found in the
4# LICENSE file in the root directory of this source tree.
5
6import pickle
7import warnings
8from pathlib import Path
9from numbers import Real
10from collections import deque
11import numpy as np
12import nevergrad.common.typing as tp
13from nevergrad.parametrization import parameter as p
14from nevergrad.common import tools as ngtools
15from nevergrad.common import errors as errors
16from nevergrad.common.decorators import Registry
17from . import utils
18from . import multiobjective as mobj
19
20
21OptCls = tp.Union["ConfiguredOptimizer", tp.Type["Optimizer"]]
22registry: Registry[OptCls] = Registry()
23_OptimCallBack = tp.Union[
24    tp.Callable[["Optimizer", "p.Parameter", float], None], tp.Callable[["Optimizer"], None]
25]
26X = tp.TypeVar("X", bound="Optimizer")
27Y = tp.TypeVar("Y")
28IntOrParameter = tp.Union[int, p.Parameter]
29_PruningCallable = tp.Callable[[utils.Archive[utils.MultiValue]], utils.Archive[utils.MultiValue]]
30
31
32def _loss(param: p.Parameter) -> float:
33    """Returns the loss if available, or inf otherwise.
34    Used to simplify handling of losses
35    """
36    return param.loss if param.loss is not None else float("inf")
37
38
39def load(cls: tp.Type[X], filepath: tp.PathLike) -> X:
40    """Loads a pickle file and checks that it contains an optimizer.
41    The optimizer class is not always fully reliable though (e.g.: optimizer families) so the user is responsible for it.
42    """
43    filepath = Path(filepath)
44    with filepath.open("rb") as f:
45        opt = pickle.load(f)
46    assert isinstance(opt, cls), f"You should only load {cls} with this method (found {type(opt)})"
47    return opt
48
49
50class Optimizer:  # pylint: disable=too-many-instance-attributes
51    """Algorithm framework with 3 main functions:
52
53    - :code:`ask()` which provides a candidate on which to evaluate the function to optimize.
54    - :code:`tell(candidate, loss)` which lets you provide the loss associated to points.
55    - :code:`provide_recommendation()` which provides the best final candidate.
56
57    Typically, one would call :code:`ask()` num_workers times, evaluate the
58    function on these num_workers points in parallel, update with the fitness value when the
59    evaluations is finished, and iterate until the budget is over. At the very end,
60    one would call provide_recommendation for the estimated optimum.
61
62    This class is abstract, it provides internal equivalents for the 3 main functions,
63    among which at least :code:`_internal_ask_candidate` has to be overridden.
64
65    Each optimizer instance should be used only once, with the initial provided budget
66
67    Parameters
68    ----------
69    parametrization: int or Parameter
70        either the dimension of the optimization space, or its parametrization
71    budget: int/None
72        number of allowed evaluations
73    num_workers: int
74        number of evaluations which will be run in parallel at once
75    """
76
77    # pylint: disable=too-many-locals
78
79    # optimizer qualifiers
80    recast = False  # algorithm which were not designed to work with the suggest/update pattern
81    one_shot = False  # algorithm designed to suggest all budget points at once
82    no_parallelization = False  # algorithm which is designed to run sequentially only
83
84    def __init__(
85        self, parametrization: IntOrParameter, budget: tp.Optional[int] = None, num_workers: int = 1
86    ) -> None:
87        if self.no_parallelization and num_workers > 1:
88            raise ValueError(f"{self.__class__.__name__} does not support parallelization")
89        # "seedable" random state: externally setting the seed will provide deterministic behavior
90        # you can also replace or reinitialize this random state
91        self.num_workers = int(num_workers)
92        self.budget = budget
93
94        # How do we deal with cheap constraints i.e. constraints which are fast and use low resources and easy ?
95        # True ==> we penalize them (infinite values for candidates which violate the constraint).
96        # False ==> we repeat the ask until we solve the problem.
97        self._constraints_manager = utils.ConstraintManager()
98        self._penalize_cheap_violations = False
99
100        self.parametrization = (
101            parametrization
102            if not isinstance(parametrization, (int, np.int))
103            else p.Array(shape=(parametrization,))
104        )
105        self.parametrization.freeze()  # avoids issues!
106        if not self.dimension:
107            raise ValueError("No variable to optimize in this parametrization.")
108        self.name = self.__class__.__name__  # printed name in repr
109        # keep a record of evaluations, and current bests which are updated at each new evaluation
110        self.archive: utils.Archive[
111            utils.MultiValue
112        ] = utils.Archive()  # dict like structure taking np.ndarray as keys and Value as values
113        self.current_bests = {
114            x: utils.MultiValue(self.parametrization, np.inf, reference=self.parametrization)
115            for x in ["optimistic", "pessimistic", "average", "minimum"]
116        }
117        # pruning function, called at each "tell"
118        # this can be desactivated or modified by each implementation
119        self.pruning: tp.Optional[_PruningCallable] = utils.Pruning.sensible_default(
120            num_workers=num_workers, dimension=self.parametrization.dimension
121        )
122        # multiobjective
123        self._MULTIOBJECTIVE_AUTO_BOUND = mobj.AUTO_BOUND
124        self._hypervolume_pareto: tp.Optional[mobj.HypervolumePareto] = None
125        # instance state
126        self._asked: tp.Set[str] = set()
127        self._num_objectives = 0
128        self._suggestions: tp.Deque[p.Parameter] = deque()
129        self._num_ask = 0
130        self._num_tell = 0  # increases after each successful tell
131        self._num_tell_not_asked = 0
132        self._callbacks: tp.Dict[str, tp.List[tp.Any]] = {}
133        # to make optimize function stoppable halway through
134        self._running_jobs: tp.List[tp.Tuple[p.Parameter, tp.JobLike[tp.Loss]]] = []
135        self._finished_jobs: tp.Deque[tp.Tuple[p.Parameter, tp.JobLike[tp.Loss]]] = deque()
136
137    @property
138    def _rng(self) -> np.random.RandomState:
139        """np.random.RandomState: parametrization random state the optimizer must pull from.
140        It can be seeded or updated directly on the parametrization instance (`optimizer.parametrization.random_state`)
141        """
142        return self.parametrization.random_state
143
144    @property
145    def dimension(self) -> int:
146        """int: Dimension of the optimization space."""
147        return self.parametrization.dimension
148
149    @property
150    def num_objectives(self) -> int:
151        """Provides 0 if the number is not known yet, else the number of objectives
152        to optimize upon.
153        """
154        if (
155            self._hypervolume_pareto is not None
156            and self._num_objectives != self._hypervolume_pareto.num_objectives
157        ):
158            raise RuntimeError("Number of objectives is incorrectly set. Please create a nevergrad issue")
159        return self._num_objectives
160
161    @num_objectives.setter
162    def num_objectives(self, num: int) -> None:
163        num = int(num)
164        if num <= 0:
165            raise ValueError("Number of objectives must be strictly positive")
166        if not self._num_objectives:
167            self._num_objectives = num
168            self._num_objectives_set_callback()
169        elif num != self._num_objectives:
170            raise ValueError(f"Expected {self._num_objectives} loss(es), but received {num}.")
171
172    def _num_objectives_set_callback(self) -> None:
173        """Callback for when num objectives is first known"""
174
175    @property
176    def num_ask(self) -> int:
177        """int: Number of time the `ask` method was called."""
178        return self._num_ask
179
180    @property
181    def num_tell(self) -> int:
182        """int: Number of time the `tell` method was called."""
183        return self._num_tell
184
185    @property
186    def num_tell_not_asked(self) -> int:
187        """int: Number of time the :code:`tell` method was called on candidates that were not asked for by the optimizer
188        (or were suggested).
189        """
190        return self._num_tell_not_asked
191
192    def pareto_front(
193        self, size: tp.Optional[int] = None, subset: str = "random", subset_tentatives: int = 12
194    ) -> tp.List[p.Parameter]:
195        """Pareto front, as a list of Parameter. The losses can be accessed through
196        parameter.losses
197
198        Parameters
199        ------------
200        size:  int (optional)
201            if provided, selects a subset of the full pareto front with the given maximum size
202        subset: str
203            method for selecting the subset ("random, "loss-covering", "domain-covering", "hypervolume")
204        subset_tentatives: int
205            number of random tentatives for finding a better subset
206
207        Returns
208        --------
209        list
210            the list of Parameter of the pareto front
211
212        Note
213        ----
214        During non-multiobjective optimization, this returns the current pessimistic best
215        """
216        pareto = (
217            []
218            if self._hypervolume_pareto is None
219            else self._hypervolume_pareto.pareto_front(
220                size=size, subset=subset, subset_tentatives=subset_tentatives
221            )
222        )
223        return pareto if pareto else [self.provide_recommendation()]
224
225    def dump(self, filepath: tp.Union[str, Path]) -> None:
226        """Pickles the optimizer into a file."""
227        filepath = Path(filepath)
228        with filepath.open("wb") as f:
229            pickle.dump(self, f)
230
231    @classmethod
232    def load(cls: tp.Type[X], filepath: tp.Union[str, Path]) -> X:
233        """Loads a pickle and checks that the class is correct."""
234        return load(cls, filepath)
235
236    def __repr__(self) -> str:
237        inststr = self.parametrization.name
238        return f"Instance of {self.name}(parametrization={inststr}, budget={self.budget}, num_workers={self.num_workers})"
239
240    def register_callback(self, name: str, callback: _OptimCallBack) -> None:
241        """Add a callback method called either when `tell` or `ask` are called, with the same
242        arguments (including the optimizer / self). This can be useful for custom logging.
243
244        Parameters
245        ----------
246        name: str
247            name of the method to register the callback for (either :code:`ask` or :code:`tell`)
248        callback: callable
249            a callable taking the same parameters as the method it is registered upon (including self)
250        """
251        assert name in ["ask", "tell"], f'Only "ask" and "tell" methods can have callbacks (not {name})'
252        self._callbacks.setdefault(name, []).append(callback)
253
254    def remove_all_callbacks(self) -> None:
255        """Removes all registered callables"""
256        self._callbacks = {}
257
258    def suggest(self, *args: tp.Any, **kwargs: tp.Any) -> None:
259        """Suggests a new point to ask.
260        It will be asked at the next call (last in first out).
261
262        Parameters
263        ----------
264        *args: Any
265            positional arguments matching the parametrization pattern.
266        *kwargs: Any
267            keyword arguments matching the parametrization pattern.
268
269        Note
270        ----
271        - This relies on optmizers implementing a way to deal with unasked candidate.
272          Some optimizers may not support it and will raise a :code:`TellNotAskedNotSupportedError`
273          at :code:`tell` time.
274        - LIFO is used so as to be able to suggest and ask straightaway, as an alternative to
275          creating a new candidate with :code:`optimizer.parametrization.spawn_child(new_value)`
276        """
277        if isinstance(self.parametrization, p.Instrumentation):
278            new_value: tp.Any = (args, kwargs)
279        else:
280            assert len(args) == 1 and not kwargs
281            new_value = args[0]
282        self._suggestions.append(self.parametrization.spawn_child(new_value=new_value))
283
284    # pylint: disable=too-many-branches
285    def tell(self, candidate: p.Parameter, loss: tp.Loss) -> None:
286        """Provides the optimizer with the evaluation of a fitness value for a candidate.
287
288        Parameters
289        ----------
290        x: np.ndarray
291            point where the function was evaluated
292        loss: float/list/np.ndarray
293            loss of the function (or multi-objective function
294
295        Note
296        ----
297        The candidate should generally be one provided by :code:`ask()`, but can be also
298        a non-asked candidate. To create a p.Parameter instance from args and kwargs,
299        you can use :code:`candidate = optimizer.parametrization.spawn_child(new_value=your_value)`:
300
301        - for an :code:`Array(shape(2,))`: :code:`optimizer.parametrization.spawn_child(new_value=[12, 12])`
302
303        - for an :code:`Instrumentation`: :code:`optimizer.parametrization.spawn_child(new_value=(args, kwargs))`
304
305        Alternatively, you can provide a suggestion with :code:`optimizer.suggest(*args, **kwargs)`, the next :code:`ask`
306        will use this suggestion.
307        """
308        # Check loss type
309        if isinstance(loss, (Real, float)):
310            # using "float" along "Real" because mypy does not understand "Real" for now Issue #3186
311            loss = float(loss)
312            # Non-sense values including NaNs should not be accepted.
313            # We do not use max-float as various later transformations could lead to greater values.
314            if not loss < 5.0e20:  # pylint: disable=unneeded-not
315                warnings.warn(
316                    f"Clipping very high value {loss} in tell (rescale the cost function?).",
317                    errors.LossTooLargeWarning,
318                )
319                loss = 5.0e20  # sys.float_info.max leads to numerical problems so let us do this.
320        elif isinstance(loss, (tuple, list, np.ndarray)):
321            loss = np.array(loss, copy=False, dtype=float).ravel() if len(loss) != 1 else loss[0]
322        elif not isinstance(loss, np.ndarray):
323            raise TypeError(
324                f'"tell" method only supports float values but the passed loss was: {loss} (type: {type(loss)}.'
325            )
326        # check Parameter
327        if not isinstance(candidate, p.Parameter):
328            raise TypeError(
329                "'tell' must be provided with the candidate.\n"
330                "Use optimizer.parametrization.spawn_child(new_value)) if you want to "
331                "create a candidate that as not been asked for, "
332                "or optimizer.suggest(*args, **kwargs) to suggest a point that should be used for "
333                "the next ask"
334            )
335        # check loss length
336        self.num_objectives = 1 if isinstance(loss, float) else loss.size
337        # checks are done, start processing
338        candidate.freeze()  # make sure it is not modified somewhere
339        # add reference if provided
340        if isinstance(candidate, p.MultiobjectiveReference):
341            if self._hypervolume_pareto is not None:
342                raise RuntimeError("MultiobjectiveReference can only be provided before the first tell.")
343            if not isinstance(loss, np.ndarray):
344                raise RuntimeError("MultiobjectiveReference must only be used for multiobjective losses")
345            self._hypervolume_pareto = mobj.HypervolumePareto(upper_bounds=loss, seed=self._rng)
346            if candidate.value is None:
347                return  # no value, so stopping processing there
348            candidate = candidate.value
349        # preprocess multiobjective loss
350        if isinstance(loss, np.ndarray):
351            candidate._losses = loss
352        if not isinstance(loss, float):
353            loss = self._preprocess_multiobjective(candidate)
354        # call callbacks for logging etc...
355        candidate.loss = loss
356        assert isinstance(loss, float)
357        for callback in self._callbacks.get("tell", []):
358            # multiobjective reference is not handled :s
359            # but this allows obtaining both scalar and multiobjective loss (through losses)
360            callback(self, candidate, loss)
361        if not candidate.satisfies_constraints() and self.budget is not None:
362            penalty = self._constraints_manager.penalty(candidate, self.num_ask, self.budget)
363            loss = loss + penalty
364        if isinstance(loss, float):
365            self._update_archive_and_bests(candidate, loss)
366        if candidate.uid in self._asked:
367            self._internal_tell_candidate(candidate, loss)
368            self._asked.remove(candidate.uid)
369        else:
370            self._internal_tell_not_asked(candidate, loss)
371            self._num_tell_not_asked += 1
372        self._num_tell += 1
373
374    def _preprocess_multiobjective(self, candidate: p.Parameter) -> tp.FloatLoss:
375        if self._hypervolume_pareto is None:
376            self._hypervolume_pareto = mobj.HypervolumePareto(auto_bound=self._MULTIOBJECTIVE_AUTO_BOUND)
377        return self._hypervolume_pareto.add(candidate)
378
379    def _update_archive_and_bests(self, candidate: p.Parameter, loss: tp.FloatLoss) -> None:
380        x = candidate.get_standardized_data(reference=self.parametrization)
381        if not isinstance(
382            loss, (Real, float)
383        ):  # using "float" along "Real" because mypy does not understand "Real" for now Issue #3186
384            raise TypeError(
385                f'"tell" method only supports float values but the passed loss was: {loss} (type: {type(loss)}.'
386            )
387        if np.isnan(loss) or loss == np.inf:
388            warnings.warn(f"Updating fitness with {loss} value", errors.BadLossWarning)
389        mvalue: tp.Optional[utils.MultiValue] = None
390        if x not in self.archive:
391            self.archive[x] = utils.MultiValue(candidate, loss, reference=self.parametrization)
392        else:
393            mvalue = self.archive[x]
394            mvalue.add_evaluation(loss)
395            # both parameters should be non-None
396            if mvalue.parameter.loss > candidate.loss:  # type: ignore
397                mvalue.parameter = candidate  # keep best candidate
398        # update current best records
399        # this may have to be improved if we want to keep more kinds of best losss
400
401        for name in self.current_bests:
402            if mvalue is self.current_bests[name]:  # reboot
403                best = min(self.archive.values(), key=lambda mv, n=name: mv.get_estimation(n))  # type: ignore
404                # rebuild best point may change, and which value did not track the updated value anyway
405                self.current_bests[name] = best
406            else:
407                if self.archive[x].get_estimation(name) <= self.current_bests[name].get_estimation(name):
408                    self.current_bests[name] = self.archive[x]
409                # deactivated checks
410                # if not (np.isnan(loss) or loss == np.inf):
411                #     if not self.current_bests[name].x in self.archive:
412                #         bval = self.current_bests[name].get_estimation(name)
413                #         avals = (min(v.get_estimation(name) for v in self.archive.values()),
414                #                  max(v.get_estimation(name) for v in self.archive.values()))
415                #         raise RuntimeError(f"Best value should exist in the archive at num_tell={self.num_tell})\n"
416                #                            f"Best value is {bval} and archive is within range {avals} for {name}")
417        if self.pruning is not None:
418            self.archive = self.pruning(self.archive)
419
420    def ask(self) -> p.Parameter:
421        """Provides a point to explore.
422        This function can be called multiple times to explore several points in parallel
423
424        Returns
425        -------
426        p.Parameter:
427            The candidate to try on the objective function. :code:`p.Parameter` have field :code:`args` and :code:`kwargs`
428            which can be directly used on the function (:code:`objective_function(*candidate.args, **candidate.kwargs)`).
429        """
430        # call callbacks for logging etc...
431        for callback in self._callbacks.get("ask", []):
432            callback(self)
433        current_num_ask = self.num_ask
434        # tentatives if a cheap constraint is available
435        max_trials = max(1, self._constraints_manager.max_trials // 2)
436        # half will be used for sub-optimization --- if the optimization method does not need/use a budget.
437        # TODO(oteytaud): actually we could do this even when the budget is known, if we are sure that
438        # exceeding the budget is not a problem.
439        # Very simple constraint solver:
440        # - we use a simple algorithm.
441        # - no memory of previous iterations.
442        # - just projection to constraint satisfaction.
443        # We try using the normal tool during half constraint budget, in order to reduce the impact on the normal run.
444        for _ in range(max_trials):
445            is_suggestion = False
446            if self._suggestions:  # use suggestions if available
447                is_suggestion = True
448                candidate = self._suggestions.pop()
449            else:
450                candidate = self._internal_ask_candidate()
451            if candidate.satisfies_constraints():
452                break  # good to go!
453            if self._penalize_cheap_violations:
454                # Warning! This might be a tell not asked.
455                self._internal_tell_candidate(candidate, float("Inf"))  # DE requires a tell
456            # updating num_ask  is necessary for some algorithms which need new num to ask another point
457            self._num_ask += 1
458        satisfies = candidate.satisfies_constraints()
459        if not satisfies:
460            # still not solving, let's run sub-optimization
461            candidate = _constraint_solver(candidate, budget=max_trials)
462        if not (satisfies or candidate.satisfies_constraints()):
463            warnings.warn(
464                f"Could not bypass the constraint after {max_trials} tentatives, "
465                "sending candidate anyway.",
466                errors.FailedConstraintWarning,
467            )
468        if not is_suggestion:
469            if candidate.uid in self._asked:
470                raise RuntimeError(
471                    "Cannot submit the same candidate twice: please recreate a new candidate from data.\n"
472                    "This is to make sure that stochastic parametrizations are resampled."
473                )
474            self._asked.add(candidate.uid)
475        self._num_ask = current_num_ask + 1
476        assert (
477            candidate is not None
478        ), f"{self.__class__.__name__}._internal_ask method returned None instead of a point."
479        # make sure to call value getter which may update the value, before we freeze the paremeter
480        candidate.value  # pylint: disable=pointless-statement
481        candidate.freeze()  # make sure it is not modified somewhere
482        return candidate
483
484    def provide_recommendation(self) -> p.Parameter:
485        """Provides the best point to use as a minimum, given the budget that was used
486
487        Returns
488        -------
489        p.Parameter
490            The candidate with minimal value. p.Parameters have field :code:`args` and :code:`kwargs` which can be directly used
491            on the function (:code:`objective_function(*candidate.args, **candidate.kwargs)`).
492        """
493        return self.recommend()  # duplicate method
494
495    def recommend(self) -> p.Parameter:
496        """Provides the best candidate to use as a minimum, given the budget that was used.
497
498        Returns
499        -------
500        p.Parameter
501            The candidate with minimal loss. :code:`p.Parameters` have field :code:`args` and :code:`kwargs` which can be directly used
502            on the function (:code:`objective_function(*candidate.args, **candidate.kwargs)`).
503        """
504        recom_data = self._internal_provide_recommendation()  # pylint: disable=assignment-from-none
505        if recom_data is None or any(np.isnan(recom_data)):
506            name = "minimum" if self.parametrization.function.deterministic else "pessimistic"
507            return self.current_bests[name].parameter
508        out = self.parametrization.spawn_child()
509        with p.helpers.deterministic_sampling(out):
510            out.set_standardized_data(recom_data)
511        return out
512
513    def _internal_tell_not_asked(self, candidate: p.Parameter, loss: tp.FloatLoss) -> None:
514        """Called whenever calling :code:`tell` on a candidate that was not "asked".
515        Defaults to the standard tell pipeline.
516        """
517        self._internal_tell_candidate(candidate, loss)
518
519    def _internal_tell_candidate(self, candidate: p.Parameter, loss: tp.FloatLoss) -> None:
520        """Called whenever calling :code:`tell` on a candidate that was "asked"."""
521        data = candidate.get_standardized_data(reference=self.parametrization)
522        self._internal_tell(data, loss)
523
524    def _internal_ask_candidate(self) -> p.Parameter:
525        return self.parametrization.spawn_child().set_standardized_data(self._internal_ask())
526
527    # Internal methods which can be overloaded (or must be, in the case of _internal_ask)
528    def _internal_tell(self, x: tp.ArrayLike, loss: tp.FloatLoss) -> None:
529        pass
530
531    def _internal_ask(self) -> tp.ArrayLike:
532        raise RuntimeError("Not implemented, should not be called.")
533
534    def _internal_provide_recommendation(self) -> tp.Optional[tp.ArrayLike]:
535        """Override to provide a recommendation in standardized space"""
536        return None
537
538    def minimize(
539        self,
540        objective_function: tp.Callable[..., tp.Loss],
541        executor: tp.Optional[tp.ExecutorLike] = None,
542        batch_mode: bool = False,
543        verbosity: int = 0,
544    ) -> p.Parameter:
545        """Optimization (minimization) procedure
546
547        Parameters
548        ----------
549        objective_function: callable
550            A callable to optimize (minimize)
551        executor: Executor
552            An executor object, with method :code:`submit(callable, *args, **kwargs)` and returning a Future-like object
553            with methods :code:`done() -> bool` and :code:`result() -> float`. The executor role is to dispatch the execution of
554            the jobs locally/on a cluster/with multithreading depending on the implementation.
555            Eg: :code:`concurrent.futures.ThreadPoolExecutor`
556        batch_mode: bool
557            when :code:`num_workers = n > 1`, whether jobs are executed by batch (:code:`n` function evaluations are launched,
558            we wait for all results and relaunch n evals) or not (whenever an evaluation is finished, we launch
559            another one)
560        verbosity: int
561            print information about the optimization (0: None, 1: fitness values, 2: fitness values and recommendation)
562
563        Returns
564        -------
565        ng.p.Parameter
566            The candidate with minimal value. :code:`ng.p.Parameters` have field :code:`args` and :code:`kwargs` which can
567            be directly used on the function (:code:`objective_function(*candidate.args, **candidate.kwargs)`).
568
569        Note
570        ----
571        for evaluation purpose and with the current implementation, it is better to use batch_mode=True
572        """
573        # pylint: disable=too-many-branches
574        if self.budget is None:
575            raise ValueError("Budget must be specified")
576        if executor is None:
577            executor = utils.SequentialExecutor()  # defaults to run everything locally and sequentially
578            if self.num_workers > 1:
579                warnings.warn(
580                    f"num_workers = {self.num_workers} > 1 is suboptimal when run sequentially",
581                    errors.InefficientSettingsWarning,
582                )
583        assert executor is not None
584        tmp_runnings: tp.List[tp.Tuple[p.Parameter, tp.JobLike[tp.Loss]]] = []
585        tmp_finished: tp.Deque[tp.Tuple[p.Parameter, tp.JobLike[tp.Loss]]] = deque()
586        # go
587        sleeper = ngtools.Sleeper()  # manages waiting time depending on execution time of the jobs
588        remaining_budget = self.budget - self.num_ask
589        first_iteration = True
590        #
591        while remaining_budget or self._running_jobs or self._finished_jobs:
592            # # # # # Update optimizer with finished jobs # # # # #
593            # this is the first thing to do when resuming an existing optimization run
594            # process finished
595            if self._finished_jobs:
596                if (remaining_budget or sleeper._start is not None) and not first_iteration:
597                    # ignore stop if no more suggestion is sent
598                    # this is an ugly hack to avoid warnings at the end of steady mode
599                    sleeper.stop_timer()
600                while self._finished_jobs:
601                    x, job = self._finished_jobs[0]
602                    result = job.result()
603                    self.tell(x, result)
604                    self._finished_jobs.popleft()  # remove it after the tell to make sure it was indeed "told" (in case of interruption)
605                    if verbosity:
606                        print(f"Updating fitness with value {job.result()}")
607                if verbosity:
608                    print(f"{remaining_budget} remaining budget and {len(self._running_jobs)} running jobs")
609                    if verbosity > 1:
610                        print("Current pessimistic best is: {}".format(self.current_bests["pessimistic"]))
611            elif not first_iteration:
612                sleeper.sleep()
613            # # # # # Start new jobs # # # # #
614            if not batch_mode or not self._running_jobs:
615                new_sugg = max(0, min(remaining_budget, self.num_workers - len(self._running_jobs)))
616                if verbosity and new_sugg:
617                    print(f"Launching {new_sugg} jobs with new suggestions")
618                for _ in range(new_sugg):
619                    try:
620                        args = self.ask()
621                    except errors.NevergradEarlyStopping:
622                        remaining_budget = 0
623                        break
624                    self._running_jobs.append(
625                        (args, executor.submit(objective_function, *args.args, **args.kwargs))
626                    )
627                if new_sugg:
628                    sleeper.start_timer()
629            if remaining_budget > 0:  # early stopping sets it to 0
630                remaining_budget = self.budget - self.num_ask
631            # split (repopulate finished and runnings in only one loop to avoid
632            # weird effects if job finishes in between two list comprehensions)
633            tmp_runnings, tmp_finished = [], deque()
634            for x_job in self._running_jobs:
635                (tmp_finished if x_job[1].done() else tmp_runnings).append(x_job)
636            self._running_jobs, self._finished_jobs = tmp_runnings, tmp_finished
637            first_iteration = False
638        return self.provide_recommendation()
639
640
641# Adding a comparison-only functionality to an optimizer.
642def addCompare(optimizer: Optimizer) -> None:
643    def compare(self: Optimizer, winners: tp.List[p.Parameter], losers: tp.List[p.Parameter]) -> None:
644        # This means that for any i and j, winners[i] is better than winners[i+1], and better than losers[j].
645        # This is for cases in which we do not know fitness values, we just know comparisons.
646
647        ref = self.parametrization
648        # Evaluate the best fitness value among losers.
649        best_fitness_value = 0.0
650        for candidate in losers:
651            data = candidate.get_standardized_data(reference=self.parametrization)
652            if data in self.archive:
653                best_fitness_value = min(best_fitness_value, self.archive[data].get_estimation("average"))
654
655        # Now let us decide the fitness value of winners.
656        for i, candidate in enumerate(winners):
657            self.tell(candidate, best_fitness_value - len(winners) + i)
658            data = candidate.get_standardized_data(reference=self.parametrization)
659            self.archive[data] = utils.MultiValue(
660                candidate, best_fitness_value - len(winners) + i, reference=ref
661            )
662
663    setattr(optimizer.__class__, "compare", compare)
664
665
666class ConfiguredOptimizer:
667    """Creates optimizer-like instances with configuration.
668
669    Parameters
670    ----------
671    OptimizerClass: type
672        class of the optimizer to configure
673    config: dict
674        dictionnary of all the configurations
675    as_config: bool
676        whether to provide all config as kwargs to the optimizer instantiation (default, see ConfiguredCMA for an example),
677        or through a config kwarg referencing self. (if True, see EvolutionStrategy for an example)
678
679    Note
680    ----
681    This provides a default repr which can be bypassed through set_name
682    """
683
684    # optimizer qualifiers
685    recast = False  # algorithm which were not designed to work with the suggest/update pattern
686    one_shot = False  # algorithm designed to suggest all budget points at once
687    no_parallelization = False  # algorithm which is designed to run sequentially only
688
689    def __init__(
690        self, OptimizerClass: tp.Type[Optimizer], config: tp.Dict[str, tp.Any], as_config: bool = False
691    ) -> None:
692        self._OptimizerClass = OptimizerClass
693        config.pop("self", None)  # self comes from "locals()"
694        config.pop("__class__", None)  # self comes from "locals()"
695        self._as_config = as_config
696        self._config = config  # keep all, to avoid weird behavior at mismatch between optim and configoptim
697        diff = ngtools.different_from_defaults(instance=self, instance_dict=config, check_mismatches=True)
698        params = ", ".join(f"{x}={y!r}" for x, y in sorted(diff.items()))
699        self.name = f"{self.__class__.__name__}({params})"
700        if not as_config:
701            # try instantiating for init checks
702            # if as_config: check can be done before setting attributes
703            self(parametrization=4, budget=100)
704
705    def config(self) -> tp.Dict[str, tp.Any]:
706        return dict(self._config)
707
708    def __call__(
709        self, parametrization: IntOrParameter, budget: tp.Optional[int] = None, num_workers: int = 1
710    ) -> Optimizer:
711        """Creates an optimizer from the parametrization
712
713        Parameters
714        ----------
715        instrumentation: int or Instrumentation
716            either the dimension of the optimization space, or its instrumentation
717        budget: int/None
718            number of allowed evaluations
719        num_workers: int
720            number of evaluations which will be run in parallel at once
721        """
722        config = dict(config=self) if self._as_config else self._config
723        run = self._OptimizerClass(parametrization=parametrization, budget=budget, num_workers=num_workers, **config)  # type: ignore
724        run.name = self.name
725        # hacky but convenient to have around:
726        run._configured_optimizer = self  # type: ignore
727        return run
728
729    def __repr__(self) -> str:
730        return self.name
731
732    def set_name(self, name: str, register: bool = False) -> "ConfiguredOptimizer":
733        """Set a new representation for the instance"""
734        self.name = name
735        if register:
736            registry.register_name(name, self)
737        return self
738
739    def load(self, filepath: tp.Union[str, Path]) -> "Optimizer":
740        """Loads a pickle and checks that it is an Optimizer."""
741        return self._OptimizerClass.load(filepath)
742
743    def __eq__(self, other: tp.Any) -> tp.Any:
744        if self.__class__ == other.__class__:
745            if self._config == other._config:
746                return True
747        return False
748
749
750def _constraint_solver(parameter: p.Parameter, budget: int) -> p.Parameter:
751    """Runs a suboptimization to solve the parameter constraints"""
752    parameter_without_constraint = parameter.copy()
753    parameter_without_constraint._constraint_checkers.clear()
754    opt = registry["OnePlusOne"](parameter_without_constraint, num_workers=1, budget=budget)
755    for _ in range(budget):
756        cand = opt.ask()
757        # Our objective function is minimum for the point the closest to
758        # the original candidate under the constraints.
759        penalty = sum(utils._float_penalty(func(cand.value)) for func in parameter._constraint_checkers)
760
761        # TODO: this may not scale well with dimension
762        distance = np.tanh(np.sum(cand.get_standardized_data(reference=parameter) ** 2))
763        # TODO: because of the return whenever constraints are satisfied, the first case never arises
764        loss = distance if penalty <= 0 else penalty + distance + 1.0
765        opt.tell(cand, loss)
766        if penalty <= 0:  # constraints are satisfied
767            break
768    data = opt.recommend().get_standardized_data(reference=parameter_without_constraint)
769    return parameter.spawn_child().set_standardized_data(data)
770