1"""
2Notes
3-----
4Code written using below textbook as a reference.
5Results are checked against the expected outcomes in the text book.
6
7Properties:
8Hyndman, Rob J., and George Athanasopoulos. Forecasting: principles and
9practice. OTexts, 2014.
10
11Author: Terence L van Zyl
12Modified: Kevin Sheppard
13"""
14from statsmodels.compat.pandas import deprecate_kwarg
15
16import contextlib
17from typing import Any, Hashable, Sequence
18import warnings
19
20import numpy as np
21import pandas as pd
22from scipy.optimize import basinhopping, least_squares, minimize
23from scipy.special import inv_boxcox
24from scipy.stats import boxcox
25
26from statsmodels.tools.validation import (
27    array_like,
28    bool_like,
29    dict_like,
30    float_like,
31    int_like,
32    string_like,
33)
34from statsmodels.tsa.base.tsa_model import TimeSeriesModel
35from statsmodels.tsa.exponential_smoothing.ets import (
36    _initialization_heuristic,
37    _initialization_simple,
38)
39from statsmodels.tsa.holtwinters import (
40    _exponential_smoothers as smoothers,
41    _smoothers as py_smoothers,
42)
43from statsmodels.tsa.holtwinters._exponential_smoothers import HoltWintersArgs
44from statsmodels.tsa.holtwinters._smoothers import (
45    to_restricted,
46    to_unrestricted,
47)
48from statsmodels.tsa.holtwinters.results import (
49    HoltWintersResults,
50    HoltWintersResultsWrapper,
51)
52from statsmodels.tsa.tsatools import freq_to_period
53
54SMOOTHERS = {
55    ("mul", "add"): smoothers.holt_win_add_mul_dam,
56    ("mul", "mul"): smoothers.holt_win_mul_mul_dam,
57    ("mul", None): smoothers.holt_win__mul,
58    ("add", "add"): smoothers.holt_win_add_add_dam,
59    ("add", "mul"): smoothers.holt_win_mul_add_dam,
60    ("add", None): smoothers.holt_win__add,
61    (None, "add"): smoothers.holt_add_dam,
62    (None, "mul"): smoothers.holt_mul_dam,
63    (None, None): smoothers.holt__,
64}
65
66PY_SMOOTHERS = {
67    ("mul", "add"): py_smoothers.holt_win_add_mul_dam,
68    ("mul", "mul"): py_smoothers.holt_win_mul_mul_dam,
69    ("mul", None): py_smoothers.holt_win__mul,
70    ("add", "add"): py_smoothers.holt_win_add_add_dam,
71    ("add", "mul"): py_smoothers.holt_win_mul_add_dam,
72    ("add", None): py_smoothers.holt_win__add,
73    (None, "add"): py_smoothers.holt_add_dam,
74    (None, "mul"): py_smoothers.holt_mul_dam,
75    (None, None): py_smoothers.holt__,
76}
77
78
79def opt_wrapper(func):
80    def f(*args, **kwargs):
81        err = func(*args, **kwargs)
82        if isinstance(err, np.ndarray):
83            return err.T @ err
84        return err
85
86    return f
87
88
89class _OptConfig(object):
90    alpha: float
91    beta: float
92    phi: float
93    gamma: float
94    level: float
95    trend: float
96    seasonal: np.ndarray
97    y: np.ndarray
98    params: np.ndarray
99    mask: np.ndarray
100    mle_retvals: Any
101
102    def unpack_parameters(self, params) -> "_OptConfig":
103        self.alpha = params[0]
104        self.beta = params[1]
105        self.gamma = params[2]
106        self.level = params[3]
107        self.trend = params[4]
108        self.phi = params[5]
109        self.seasonal = params[6:]
110
111        return self
112
113
114class ExponentialSmoothing(TimeSeriesModel):
115    """
116    Holt Winter's Exponential Smoothing
117
118    Parameters
119    ----------
120    endog : array_like
121        The time series to model.
122    trend : {"add", "mul", "additive", "multiplicative", None}, optional
123        Type of trend component.
124    damped_trend : bool, optional
125        Should the trend component be damped.
126    seasonal : {"add", "mul", "additive", "multiplicative", None}, optional
127        Type of seasonal component.
128    seasonal_periods : int, optional
129        The number of periods in a complete seasonal cycle, e.g., 4 for
130        quarterly data or 7 for daily data with a weekly cycle.
131    initialization_method : str, optional
132        Method for initialize the recursions. One of:
133
134        * None
135        * 'estimated'
136        * 'heuristic'
137        * 'legacy-heuristic'
138        * 'known'
139
140        None defaults to the pre-0.12 behavior where initial values
141        are passed as part of ``fit``. If any of the other values are
142        passed, then the initial values must also be set when constructing
143        the model. If 'known' initialization is used, then `initial_level`
144        must be passed, as well as `initial_trend` and `initial_seasonal` if
145        applicable. Default is 'estimated'. "legacy-heuristic" uses the same
146        values that were used in statsmodels 0.11 and earlier.
147    initial_level : float, optional
148        The initial level component. Required if estimation method is "known".
149        If set using either "estimated" or "heuristic" this value is used.
150        This allows one or more of the initial values to be set while
151        deferring to the heuristic for others or estimating the unset
152        parameters.
153    initial_trend : float, optional
154        The initial trend component. Required if estimation method is "known".
155        If set using either "estimated" or "heuristic" this value is used.
156        This allows one or more of the initial values to be set while
157        deferring to the heuristic for others or estimating the unset
158        parameters.
159    initial_seasonal : array_like, optional
160        The initial seasonal component. An array of length `seasonal`
161        or length `seasonal - 1` (in which case the last initial value
162        is computed to make the average effect zero). Only used if
163        initialization is 'known'. Required if estimation method is "known".
164        If set using either "estimated" or "heuristic" this value is used.
165        This allows one or more of the initial values to be set while
166        deferring to the heuristic for others or estimating the unset
167        parameters.
168    use_boxcox : {True, False, 'log', float}, optional
169        Should the Box-Cox transform be applied to the data first? If 'log'
170        then apply the log. If float then use the value as lambda.
171    bounds : dict[str, tuple[float, float]], optional
172        An dictionary containing bounds for the parameters in the model,
173        excluding the initial values if estimated. The keys of the dictionary
174        are the variable names, e.g., smoothing_level or initial_slope.
175        The initial seasonal variables are labeled initial_seasonal.<j>
176        for j=0,...,m-1 where m is the number of period in a full season.
177        Use None to indicate a non-binding constraint, e.g., (0, None)
178        constrains a parameter to be non-negative.
179    dates : array_like of datetime, optional
180        An array-like object of datetime objects. If a Pandas object is given
181        for endog, it is assumed to have a DateIndex.
182    freq : str, optional
183        The frequency of the time-series. A Pandas offset or 'B', 'D', 'W',
184        'M', 'A', or 'Q'. This is optional if dates are given.
185    missing : str
186        Available options are 'none', 'drop', and 'raise'. If 'none', no nan
187        checking is done. If 'drop', any observations with nans are dropped.
188        If 'raise', an error is raised. Default is 'none'.
189
190    Notes
191    -----
192    This is a full implementation of the holt winters exponential smoothing as
193    per [1]_. This includes all the unstable methods as well as the stable
194    methods. The implementation of the library covers the functionality of the
195    R library as much as possible whilst still being Pythonic.
196
197    References
198    ----------
199    .. [1] Hyndman, Rob J., and George Athanasopoulos. Forecasting: principles
200        and practice. OTexts, 2014.
201    """
202
203    @deprecate_kwarg("damped", "damped_trend")
204    def __init__(
205        self,
206        endog,
207        trend=None,
208        damped_trend=False,
209        seasonal=None,
210        *,
211        seasonal_periods=None,
212        initialization_method="estimated",
213        initial_level=None,
214        initial_trend=None,
215        initial_seasonal=None,
216        use_boxcox=False,
217        bounds=None,
218        dates=None,
219        freq=None,
220        missing="none",
221    ):
222        super().__init__(endog, None, dates, freq, missing=missing)
223        self._y = self._data = array_like(
224            endog, "endog", ndim=1, contiguous=True, order="C"
225        )
226        options = ("add", "mul", "additive", "multiplicative")
227        trend = string_like(trend, "trend", options=options, optional=True)
228        if trend in ["additive", "multiplicative"]:
229            trend = {"additive": "add", "multiplicative": "mul"}[trend]
230        self.trend = trend
231        self.damped_trend = bool_like(damped_trend, "damped_trend")
232        seasonal = string_like(
233            seasonal, "seasonal", options=options, optional=True
234        )
235        if seasonal in ["additive", "multiplicative"]:
236            seasonal = {"additive": "add", "multiplicative": "mul"}[seasonal]
237        self.seasonal = seasonal
238        self.has_trend = trend in ["mul", "add"]
239        self.has_seasonal = seasonal in ["mul", "add"]
240        if (self.trend == "mul" or self.seasonal == "mul") and not np.all(
241            self._data > 0.0
242        ):
243            raise ValueError(
244                "endog must be strictly positive when using"
245                "multiplicative trend or seasonal components."
246            )
247        if self.damped_trend and not self.has_trend:
248            raise ValueError("Can only dampen the trend component")
249        if self.has_seasonal:
250            self.seasonal_periods = int_like(
251                seasonal_periods, "seasonal_periods", optional=True
252            )
253            if seasonal_periods is None:
254                try:
255                    self.seasonal_periods = freq_to_period(self._index_freq)
256                except Exception:
257                    raise ValueError(
258                        "seasonal_periods has not been provided and index "
259                        "does not have a known freq. You must provide "
260                        "seasonal_periods"
261                    )
262            if self.seasonal_periods <= 1:
263                raise ValueError("seasonal_periods must be larger than 1.")
264            assert self.seasonal_periods is not None
265        else:
266            self.seasonal_periods = 0
267        self.nobs = len(self.endog)
268        options = ("known", "estimated", "heuristic", "legacy-heuristic")
269        self._initialization_method = string_like(
270            initialization_method,
271            "initialization_method",
272            optional=False,
273            options=options,
274        )
275        self._initial_level = float_like(
276            initial_level, "initial_level", optional=True
277        )
278        self._initial_trend = float_like(
279            initial_trend, "initial_trend", optional=True
280        )
281        self._initial_seasonal = array_like(
282            initial_seasonal, "initial_seasonal", optional=True
283        )
284        estimated = self._initialization_method == "estimated"
285        self._estimate_level = estimated
286        self._estimate_trend = estimated and self.trend
287        self._estimate_seasonal = estimated and self.seasonal
288        self._bounds = self._check_bounds(bounds)
289        self._use_boxcox = use_boxcox
290        self._lambda = np.nan
291        self._y = self._boxcox()
292        self._initialize()
293        self._fixed_parameters = {}
294
295    def _check_bounds(self, bounds):
296        bounds = dict_like(bounds, "bounds", optional=True)
297        if bounds is None:
298            return
299        msg = (
300            "bounds must be a dictionary of 2-element tuples of the form"
301            " (lb, ub) where lb < ub, lb>=0 and ub<=1"
302        )
303        variables = self._ordered_names()
304        for key in bounds:
305            if key not in variables:
306                supported = ", ".join(variables[:-1])
307                supported += ", and " + variables[-1]
308                raise KeyError(
309                    f"{key} does not match the list of supported variables "
310                    f"names: {supported}."
311                )
312            bound = bounds[key]
313            if not isinstance(bound, tuple):
314                raise TypeError(msg)
315            lb = bound[0] if bound[0] is not None else -np.inf
316            ub = bound[1] if bound[1] is not None else np.inf
317            if len(bound) != 2 or lb >= ub:
318                raise ValueError(msg)
319            if ("smoothing" in key or "damp" in key) and (
320                bound[0] < 0.0 or bound[1] > 1.0
321            ):
322                raise ValueError(
323                    f"{key} must have a lower bound >= 0.0 and <= 1.0"
324                )
325        return bounds
326
327    def _boxcox(self):
328        if self._use_boxcox is None or self._use_boxcox is False:
329            self._lambda = np.nan
330            return self._y
331        if self._use_boxcox is True:
332            y, self._lambda = boxcox(self._y)
333        elif isinstance(self._use_boxcox, (int, float)):
334            self._lambda = float(self._use_boxcox)
335            y = boxcox(self._y, self._use_boxcox)
336        else:
337            raise TypeError("use_boxcox must be True, False or a float.")
338        return y
339
340    @contextlib.contextmanager
341    def fix_params(self, values):
342        """
343        Temporarily fix parameters for estimation.
344
345        Parameters
346        ----------
347        values : dict
348            Values to fix. The key is the parameter name and the value is the
349            fixed value.
350
351        Yields
352        ------
353        None
354            No value returned.
355
356        Examples
357        --------
358        >>> from statsmodels.datasets.macrodata import load_pandas
359        >>> data = load_pandas()
360        >>> import statsmodels.tsa.api as tsa
361        >>> mod = tsa.ExponentialSmoothing(data.data.realcons, trend="add",
362        ...                                initialization_method="estimated")
363        >>> with mod.fix_params({"smoothing_level": 0.2}):
364        ...     mod.fit()
365        """
366        values = dict_like(values, "values")
367        valid_keys = ("smoothing_level",)
368        if self.has_trend:
369            valid_keys += ("smoothing_trend",)
370        if self.has_seasonal:
371            valid_keys += ("smoothing_seasonal",)
372            m = self.seasonal_periods
373            valid_keys += tuple([f"initial_seasonal.{i}" for i in range(m)])
374        if self.damped_trend:
375            valid_keys += ("damping_trend",)
376        if self._initialization_method in ("estimated", None):
377            extra_keys = [
378                key.replace("smoothing_", "initial_")
379                for key in valid_keys
380                if "smoothing_" in key
381            ]
382            valid_keys += tuple(extra_keys)
383
384        for key in values:
385            if key not in valid_keys:
386                valid = ", ".join(valid_keys[:-1]) + ", and " + valid_keys[-1]
387                raise KeyError(
388                    f"{key} if not allowed. Only {valid} are supported in "
389                    f"this specification."
390                )
391
392        if "smoothing_level" in values:
393            alpha = values["smoothing_level"]
394            if alpha <= 0.0:
395                raise ValueError("smoothing_level must be in (0, 1)")
396            beta = values.get("smoothing_trend", 0.0)
397            if beta > alpha:
398                raise ValueError("smoothing_trend must be <= smoothing_level")
399            gamma = values.get("smoothing_seasonal", 0.0)
400            if gamma > 1 - alpha:
401                raise ValueError(
402                    "smoothing_seasonal must be <= 1 - smoothing_level"
403                )
404
405        try:
406            self._fixed_parameters = values
407            yield
408        finally:
409            self._fixed_parameters = {}
410
411    def _initialize(self):
412        if self._initialization_method == "known":
413            return self._initialize_known()
414        msg = (
415            f"initialization method is {self._initialization_method} but "
416            "initial_{0} has been set."
417        )
418        if self._initial_level is not None:
419            raise ValueError(msg.format("level"))
420        if self._initial_trend is not None:
421            raise ValueError(msg.format("trend"))
422        if self._initial_seasonal is not None:
423            raise ValueError(msg.format("seasonal"))
424        if self._initialization_method == "legacy-heuristic":
425            return self._initialize_legacy()
426        elif self._initialization_method == "heuristic":
427            return self._initialize_heuristic()
428        elif self._initialization_method == "estimated":
429            if self.nobs < 10 + 2 * (self.seasonal_periods // 2):
430                return self._initialize_simple()
431            else:
432                return self._initialize_heuristic()
433
434    def _initialize_simple(self):
435        trend = self.trend if self.has_trend else False
436        seasonal = self.seasonal if self.has_seasonal else False
437        lvl, trend, seas = _initialization_simple(
438            self._y, trend, seasonal, self.seasonal_periods
439        )
440        self._initial_level = lvl
441        self._initial_trend = trend
442        self._initial_seasonal = seas
443
444    def _initialize_heuristic(self):
445        trend = self.trend if self.has_trend else False
446        seasonal = self.seasonal if self.has_seasonal else False
447        lvl, trend, seas = _initialization_heuristic(
448            self._y, trend, seasonal, self.seasonal_periods
449        )
450        self._initial_level = lvl
451        self._initial_trend = trend
452        self._initial_seasonal = seas
453
454    def _initialize_legacy(self):
455        lvl, trend, seasonal = self.initial_values(force=True)
456        self._initial_level = lvl
457        self._initial_trend = trend
458        self._initial_seasonal = seasonal
459
460    def _initialize_known(self):
461        msg = "initialization is 'known' but initial_{0} not given"
462        if self._initial_level is None:
463            raise ValueError(msg.format("level"))
464        excess = "initial_{0} set but model has no {0} component"
465        if self.has_trend and self._initial_trend is None:
466            raise ValueError(msg.format("trend"))
467        elif not self.has_trend and self._initial_trend is not None:
468            raise ValueError(excess.format("trend"))
469        if self.has_seasonal and self._initial_seasonal is None:
470            raise ValueError(msg.format("seasonal"))
471        elif not self.has_seasonal and self._initial_seasonal is not None:
472            raise ValueError(excess.format("seasonal"))
473
474    def predict(self, params, start=None, end=None):
475        """
476        In-sample and out-of-sample prediction.
477
478        Parameters
479        ----------
480        params : ndarray
481            The fitted model parameters.
482        start : int, str, or datetime
483            Zero-indexed observation number at which to start forecasting, ie.,
484            the first forecast is start. Can also be a date string to
485            parse or a datetime type.
486        end : int, str, or datetime
487            Zero-indexed observation number at which to end forecasting, ie.,
488            the first forecast is start. Can also be a date string to
489            parse or a datetime type.
490
491        Returns
492        -------
493        ndarray
494            The predicted values.
495        """
496        if start is None:
497            freq = getattr(self._index, "freq", 1)
498            if isinstance(freq, int):
499                start = self._index.shape[0]
500            else:
501                start = self._index[-1] + freq
502        start, end, out_of_sample, _ = self._get_prediction_index(
503            start=start, end=end
504        )
505        if out_of_sample > 0:
506            res = self._predict(h=out_of_sample, **params)
507        else:
508            res = self._predict(h=0, **params)
509        return res.fittedfcast[start : end + out_of_sample + 1]
510
511    def _enforce_bounds(self, p, sel, lb, ub):
512        initial_p = p[sel]
513
514        # Ensure strictly inbounds
515        loc = initial_p <= lb
516        upper = ub[loc].copy()
517        upper[~np.isfinite(upper)] = 100.0
518        eps = 1e-4
519        initial_p[loc] = lb[loc] + eps * (upper - lb[loc])
520
521        loc = initial_p >= ub
522        lower = lb[loc].copy()
523        lower[~np.isfinite(lower)] = -100.0
524        eps = 1e-4
525        initial_p[loc] = ub[loc] - eps * (ub[loc] - lower)
526
527        return initial_p
528
529    @staticmethod
530    def _check_blocked_keywords(
531        d: dict, keys: Sequence[Hashable], name="kwargs"
532    ):
533        for key in keys:
534            if key in d:
535                raise ValueError(f"{name} must not contain '{key}'")
536
537    def _check_bound_feasibility(self, bounds):
538        if bounds[1][0] > bounds[0][1]:
539            raise ValueError(
540                "The bounds for smoothing_trend and smoothing_level are "
541                "incompatible since smoothing_trend <= smoothing_level."
542            )
543        if bounds[2][0] > (1 - bounds[0][1]):
544            raise ValueError(
545                "The bounds for smoothing_seasonal and smoothing_level "
546                "are incompatible since smoothing_seasonal <= "
547                "1 - smoothing_level."
548            )
549
550    @staticmethod
551    def _setup_brute(sel, bounds, alpha):
552        # More points when fewer parameters
553        ns = 87 // sel[:3].sum()
554
555        if not sel[0]:
556            # Easy case since no cross-constraints
557            nparams = int(sel[1]) + int(sel[2])
558            args = []
559            for i in range(1, 3):
560                if sel[i]:
561                    bound = bounds[i]
562                    step = bound[1] - bound[0]
563                    lb = bound[0] + 0.005 * step
564                    if i == 1:
565                        ub = min(bound[1], alpha) - 0.005 * step
566                    else:
567                        ub = min(bound[1], 1 - alpha) - 0.005 * step
568                    args.append(np.linspace(lb, ub, ns))
569            points = np.stack(np.meshgrid(*args))
570            points = points.reshape((nparams, -1)).T
571            return np.ascontiguousarray(points)
572
573        bound = bounds[0]
574        step = 0.005 * (bound[1] - bound[0])
575        points = np.linspace(bound[0] + step, bound[1] - step, ns)
576        if not sel[1] and not sel[2]:
577            return points[:, None]
578
579        combined = []
580        b_bounds = bounds[1]
581        g_bounds = bounds[2]
582        if sel[1] and sel[2]:
583            for a in points:
584                b_lb = b_bounds[0]
585                b_ub = min(b_bounds[1], a)
586                g_lb = g_bounds[0]
587                g_ub = min(g_bounds[1], 1 - a)
588                if b_lb > b_ub or g_lb > g_ub:
589                    # infeasible point
590                    continue
591                nb = int(np.ceil(ns * np.sqrt(a)))
592                ng = int(np.ceil(ns * np.sqrt(1 - a)))
593                b = np.linspace(b_lb, b_ub, nb)
594                g = np.linspace(g_lb, g_ub, ng)
595                both = np.stack(np.meshgrid(b, g)).reshape(2, -1).T
596                final = np.empty((both.shape[0], 3))
597                final[:, 0] = a
598                final[:, 1:] = both
599                combined.append(final)
600        elif sel[1]:
601            for a in points:
602                b_lb = b_bounds[0]
603                b_ub = min(b_bounds[1], a)
604                if b_lb > b_ub:
605                    # infeasible point
606                    continue
607                nb = int(np.ceil(ns * np.sqrt(a)))
608                final = np.empty((nb, 2))
609                final[:, 0] = a
610                final[:, 1] = np.linspace(b_lb, b_ub, nb)
611                combined.append(final)
612        else:  # sel[2]
613            for a in points:
614                g_lb = g_bounds[0]
615                g_ub = min(g_bounds[1], 1 - a)
616                if g_lb > g_ub:
617                    # infeasible point
618                    continue
619                ng = int(np.ceil(ns * np.sqrt(1 - a)))
620                final = np.empty((ng, 2))
621                final[:, 1] = np.linspace(g_lb, g_ub, ng)
622                final[:, 0] = a
623                combined.append(final)
624
625        return np.vstack(combined)
626
627    def _ordered_names(self):
628        names = (
629            "smoothing_level",
630            "smoothing_trend",
631            "smoothing_seasonal",
632            "initial_level",
633            "initial_trend",
634            "damping_trend",
635        )
636        m = self.seasonal_periods
637        names += tuple([f"initial_seasonal.{i}" for i in range(m)])
638        return names
639
640    def _update_for_fixed(self, sel, alpha, beta, gamma, phi, l0, b0, s0):
641        if self._fixed_parameters:
642            fixed = self._fixed_parameters
643            names = self._ordered_names()
644            not_fixed = np.array([name not in fixed for name in names])
645            if (~sel[~not_fixed]).any():
646                invalid = []
647                for name, s, nf in zip(names, sel, not_fixed):
648                    if not s and not nf:
649                        invalid.append(name)
650                invalid_names = ", ".join(invalid)
651                raise ValueError(
652                    "Cannot fix a parameter that is not being "
653                    f"estimated: {invalid_names}"
654                )
655
656            sel &= not_fixed
657            alpha = fixed.get("smoothing_level", alpha)
658            beta = fixed.get("smoothing_trend", beta)
659            gamma = fixed.get("smoothing_seasonal", gamma)
660            phi = fixed.get("damping_trend", phi)
661            l0 = fixed.get("initial_level", l0)
662            b0 = fixed.get("initial_trend", l0)
663            for i in range(self.seasonal_periods):
664                s0[i] = fixed.get(f"initial_seasonal.{i}", s0[i])
665        return sel, alpha, beta, gamma, phi, l0, b0, s0
666
667    def _construct_bounds(self):
668        trend_lb = 0.0 if self.trend == "mul" else None
669        season_lb = 0.0 if self.seasonal == "mul" else None
670        lvl_lb = None if trend_lb is None and season_lb is None else 0.0
671        bounds = [
672            (0.0, 1.0),  # alpha
673            (0.0, 1.0),  # beta
674            (0.0, 1.0),  # gamma
675            (lvl_lb, None),  # level
676            (trend_lb, None),  # trend
677            (0.8, 0.995),  # phi
678        ]
679        bounds += [(season_lb, None)] * self.seasonal_periods
680        if self._bounds is not None:
681            assert isinstance(self._bounds, dict)
682            for i, name in enumerate(self._ordered_names()):
683                bounds[i] = self._bounds.get(name, bounds[i])
684        # Update bounds to account for fixed parameters
685        fixed = self._fixed_parameters
686        if "smoothing_level" in fixed:
687            # Update bounds if fixed alpha
688            alpha = fixed["smoothing_level"]
689            # beta <= alpha
690            if bounds[1][1] > alpha:
691                bounds[1] = (bounds[1][0], alpha)
692            # gamma <= 1 - alpha
693            if bounds[2][1] > (1 - alpha):
694                bounds[2] = (bounds[2][0], 1 - alpha)
695            # gamma <= 1 - alpha
696        if "smoothing_trend" in fixed:
697            # beta <= alpha
698            beta = fixed["smoothing_trend"]
699            bounds[0] = (max(beta, bounds[0][0]), bounds[0][1])
700        if "smoothing_seasonal" in fixed:
701            gamma = fixed["smoothing_seasonal"]
702            # gamma <= 1 - alpha => alpha <= 1 - gamma
703            bounds[0] = (bounds[0][0], min(1 - gamma, bounds[0][1]))
704        # Ensure bounds are feasible
705        for i, name in enumerate(self._ordered_names()):
706            lb = bounds[i][0] if bounds[i][0] is not None else -np.inf
707            ub = bounds[i][1] if bounds[i][1] is not None else np.inf
708            if lb >= ub:
709                raise ValueError(
710                    "After adjusting for user-provided bounds fixed values, "
711                    f"the resulting set of bounds for {name}, {bounds[i]}, "
712                    "are infeasible."
713                )
714        self._check_bound_feasibility(bounds)
715        return bounds
716
717    def _get_starting_values(
718        self,
719        params,
720        start_params,
721        use_brute,
722        sel,
723        hw_args,
724        bounds,
725        alpha,
726        func,
727    ):
728        if start_params is None and use_brute and np.any(sel[:3]):
729            # Have a quick look in the region for a good starting place for
730            # alpha, beta & gamma using fixed values for initial
731            m = self.seasonal_periods
732            sv_sel = np.array([False] * (6 + m))
733            sv_sel[:3] = True
734            sv_sel &= sel
735            hw_args.xi = sv_sel.astype(int)
736            hw_args.transform = False
737            # Setup the grid points, respecting constraints
738            points = self._setup_brute(sv_sel, bounds, alpha)
739            opt = opt_wrapper(func)
740            best_val = np.inf
741            best_params = points[0]
742            for point in points:
743                val = opt(point, hw_args)
744                if val < best_val:
745                    best_params = point
746                    best_val = val
747            params[sv_sel] = best_params
748        elif start_params is not None:
749            if len(start_params) != sel.sum():
750                msg = "start_params must have {0} values but has {1}."
751                nxi, nsp = len(sel), len(start_params)
752                raise ValueError(msg.format(nxi, nsp))
753            params[sel] = start_params
754        return params
755
756    def _optimize_parameters(
757        self, data: _OptConfig, use_brute, method, kwargs
758    ) -> _OptConfig:
759        # Prepare starting values
760        alpha = data.alpha
761        beta = data.beta
762        phi = data.phi
763        gamma = data.gamma
764        initial_level = data.level
765        initial_trend = data.trend
766        y = data.y
767        start_params = data.params
768
769        has_seasonal = self.has_seasonal
770        has_trend = self.has_trend
771        trend = self.trend
772        seasonal = self.seasonal
773        damped_trend = self.damped_trend
774
775        m = self.seasonal_periods
776        params = np.zeros(6 + m)
777        l0, b0, s0 = self.initial_values(
778            initial_level=data.level, initial_trend=data.trend
779        )
780
781        init_alpha = alpha if alpha is not None else 0.5 / max(m, 1)
782        init_beta = beta
783        if beta is None and has_trend:
784            init_beta = 0.1 * init_alpha
785        init_gamma = gamma
786        if has_seasonal and gamma is None:
787            init_gamma = 0.05 * (1 - init_alpha)
788        init_phi = phi if phi is not None else 0.99
789        # Selection of parameters to optimize
790        sel = np.array(
791            [
792                alpha is None,
793                has_trend and beta is None,
794                has_seasonal and gamma is None,
795                initial_level is None,
796                has_trend and initial_trend is None,
797                damped_trend and phi is None,
798            ]
799            + [has_seasonal] * m,
800        )
801        (
802            sel,
803            init_alpha,
804            init_beta,
805            init_gamma,
806            init_phi,
807            l0,
808            b0,
809            s0,
810        ) = self._update_for_fixed(
811            sel, init_alpha, init_beta, init_gamma, init_phi, l0, b0, s0
812        )
813
814        func = SMOOTHERS[(seasonal, trend)]
815        params[:6] = [init_alpha, init_beta, init_gamma, l0, b0, init_phi]
816        if m:
817            params[-m:] = s0
818        if not np.any(sel):
819            from statsmodels.tools.sm_exceptions import EstimationWarning
820
821            message = (
822                "Model has no free parameters to estimate. Set "
823                "optimized=False to suppress this warning"
824            )
825            warnings.warn(message, EstimationWarning)
826            data = data.unpack_parameters(params)
827            data.params = params
828            data.mask = sel
829
830            return data
831        orig_bounds = self._construct_bounds()
832
833        bounds = np.array(orig_bounds[:3], dtype=float)
834        hw_args = HoltWintersArgs(
835            sel.astype(int), params, bounds, y, m, self.nobs
836        )
837        params = self._get_starting_values(
838            params,
839            start_params,
840            use_brute,
841            sel,
842            hw_args,
843            bounds,
844            init_alpha,
845            func,
846        )
847
848        # We always use [0, 1] for a, b and g and handle transform inside
849        mod_bounds = [(0, 1)] * 3 + orig_bounds[3:]
850        relevant_bounds = [bnd for bnd, flag in zip(mod_bounds, sel) if flag]
851        bounds = np.array(relevant_bounds, dtype=float)
852        lb, ub = bounds.T
853        lb[np.isnan(lb)] = -np.inf
854        ub[np.isnan(ub)] = np.inf
855        hw_args.xi = sel.astype(int)
856
857        # Ensure strictly inbounds
858        initial_p = self._enforce_bounds(params, sel, lb, ub)
859        # Transform to unrestricted space
860        params[sel] = initial_p
861        params[:3] = to_unrestricted(params, sel, hw_args.bounds)
862        initial_p = params[sel]
863        # Ensure parameters are transformed internally
864        hw_args.transform = True
865        if method in ("least_squares", "ls"):
866            # Least squares uses a different format for bounds
867            ls_bounds = lb, ub
868            self._check_blocked_keywords(kwargs, ("args", "bounds"))
869            res = least_squares(
870                func, initial_p, bounds=ls_bounds, args=(hw_args,), **kwargs
871            )
872            success = res.success
873        elif method in ("basinhopping", "bh"):
874            # Take a deeper look in the local minimum we are in to find the
875            # best solution to parameters, maybe hop around to try escape the
876            # local minimum we may be in.
877            minimizer_kwargs = {"args": (hw_args,), "bounds": relevant_bounds}
878            kwargs = kwargs.copy()
879            if "minimizer_kwargs" in kwargs:
880                self._check_blocked_keywords(
881                    kwargs["minimizer_kwargs"],
882                    ("args", "bounds"),
883                    name="kwargs['minimizer_kwargs']",
884                )
885                minimizer_kwargs.update(kwargs["minimizer_kwargs"])
886                del kwargs["minimizer_kwargs"]
887            default_kwargs = {
888                "minimizer_kwargs": minimizer_kwargs,
889                "stepsize": 0.01,
890            }
891            default_kwargs.update(kwargs)
892            obj = opt_wrapper(func)
893            res = basinhopping(obj, initial_p, **default_kwargs)
894            success = res.lowest_optimization_result.success
895        else:
896            obj = opt_wrapper(func)
897            self._check_blocked_keywords(kwargs, ("args", "bounds", "method"))
898            res = minimize(
899                obj,
900                initial_p,
901                args=(hw_args,),
902                bounds=relevant_bounds,
903                method=method,
904                **kwargs,
905            )
906            success = res.success
907        # finally transform to restricted space
908        params[sel] = res.x
909        params[:3] = to_restricted(params, sel, hw_args.bounds)
910        res.x = params[sel]
911
912        if not success:
913            from statsmodels.tools.sm_exceptions import ConvergenceWarning
914
915            warnings.warn(
916                "Optimization failed to converge. Check mle_retvals.",
917                ConvergenceWarning,
918            )
919        params[sel] = res.x
920
921        data.unpack_parameters(params)
922        data.params = params
923        data.mask = sel
924        data.mle_retvals = res
925
926        return data
927
928    @deprecate_kwarg("smoothing_slope", "smoothing_trend")
929    @deprecate_kwarg("initial_slope", "initial_trend")
930    @deprecate_kwarg("damping_slope", "damping_trend")
931    def fit(
932        self,
933        smoothing_level=None,
934        smoothing_trend=None,
935        smoothing_seasonal=None,
936        damping_trend=None,
937        *,
938        optimized=True,
939        remove_bias=False,
940        start_params=None,
941        method=None,
942        minimize_kwargs=None,
943        use_brute=True,
944        use_boxcox=None,
945        use_basinhopping=None,
946        initial_level=None,
947        initial_trend=None,
948    ):
949        """
950        Fit the model
951
952        Parameters
953        ----------
954        smoothing_level : float, optional
955            The alpha value of the simple exponential smoothing, if the value
956            is set then this value will be used as the value.
957        smoothing_trend :  float, optional
958            The beta value of the Holt's trend method, if the value is
959            set then this value will be used as the value.
960        smoothing_seasonal : float, optional
961            The gamma value of the holt winters seasonal method, if the value
962            is set then this value will be used as the value.
963        damping_trend : float, optional
964            The phi value of the damped method, if the value is
965            set then this value will be used as the value.
966        optimized : bool, optional
967            Estimate model parameters by maximizing the log-likelihood.
968        remove_bias : bool, optional
969            Remove bias from forecast values and fitted values by enforcing
970            that the average residual is equal to zero.
971        start_params : array_like, optional
972            Starting values to used when optimizing the fit.  If not provided,
973            starting values are determined using a combination of grid search
974            and reasonable values based on the initial values of the data. See
975            the notes for the structure of the model parameters.
976        method : str, default "L-BFGS-B"
977            The minimizer used. Valid options are "L-BFGS-B" , "TNC",
978            "SLSQP" (default), "Powell", "trust-constr", "basinhopping" (also
979            "bh") and "least_squares" (also "ls"). basinhopping tries multiple
980            starting values in an attempt to find a global minimizer in
981            non-convex problems, and so is slower than the others.
982        minimize_kwargs : dict[str, Any]
983            A dictionary of keyword arguments passed to SciPy's minimize
984            function if method is one of "L-BFGS-B", "TNC",
985            "SLSQP", "Powell", or "trust-constr", or SciPy's basinhopping
986            or least_squares functions. The valid keywords are optimizer
987            specific. Consult SciPy's documentation for the full set of
988            options.
989        use_brute : bool, optional
990            Search for good starting values using a brute force (grid)
991            optimizer. If False, a naive set of starting values is used.
992        use_boxcox : {True, False, 'log', float}, optional
993            Should the Box-Cox transform be applied to the data first? If 'log'
994            then apply the log. If float then use the value as lambda.
995
996            .. deprecated:: 0.12
997
998               Set use_boxcox when constructing the model
999
1000        use_basinhopping : bool, optional
1001            Deprecated. Using Basin Hopping optimizer to find optimal values.
1002            Use ``method`` instead.
1003
1004            .. deprecated:: 0.12
1005
1006               Use ``method`` instead.
1007
1008        initial_level : float, optional
1009            Value to use when initializing the fitted level.
1010
1011            .. deprecated:: 0.12
1012
1013               Set initial_level when constructing the model
1014
1015        initial_trend : float, optional
1016            Value to use when initializing the fitted trend.
1017
1018            .. deprecated:: 0.12
1019
1020               Set initial_trend when constructing the model
1021               or set initialization_method.
1022
1023        Returns
1024        -------
1025        HoltWintersResults
1026            See statsmodels.tsa.holtwinters.HoltWintersResults.
1027
1028        Notes
1029        -----
1030        This is a full implementation of the holt winters exponential smoothing
1031        as per [1]. This includes all the unstable methods as well as the
1032        stable methods. The implementation of the library covers the
1033        functionality of the R library as much as possible whilst still
1034        being Pythonic.
1035
1036        The parameters are ordered
1037
1038        [alpha, beta, gamma, initial_level, initial_trend, phi]
1039
1040        which are then followed by m seasonal values if the model contains
1041        a seasonal smoother. Any parameter not relevant for the model is
1042        omitted. For example, a model that has a level and a seasonal
1043        component, but no trend and is not damped, would have starting
1044        values
1045
1046        [alpha, gamma, initial_level, s0, s1, ..., s<m-1>]
1047
1048        where sj is the initial value for seasonal component j.
1049
1050        References
1051        ----------
1052        [1] Hyndman, Rob J., and George Athanasopoulos. Forecasting: principles
1053            and practice. OTexts, 2014.
1054        """
1055        # Variable renames to alpha,beta, etc as this helps with following the
1056        # mathematical notation in general
1057        alpha = float_like(smoothing_level, "smoothing_level", True)
1058        beta = float_like(smoothing_trend, "smoothing_trend", True)
1059        gamma = float_like(smoothing_seasonal, "smoothing_seasonal", True)
1060        phi = float_like(damping_trend, "damping_trend", True)
1061        initial_level = float_like(initial_level, "initial_level", True)
1062        initial_trend = float_like(initial_trend, "initial_trend", True)
1063        start_params = array_like(start_params, "start_params", optional=True)
1064        minimize_kwargs = dict_like(
1065            minimize_kwargs, "minimize_kwargs", optional=True
1066        )
1067        minimize_kwargs = {} if minimize_kwargs is None else minimize_kwargs
1068        use_basinhopping = bool_like(
1069            use_basinhopping, "use_basinhopping", optional=True
1070        )
1071        supported_methods = ("basinhopping", "bh")
1072        supported_methods += ("least_squares", "ls")
1073        supported_methods += (
1074            "L-BFGS-B",
1075            "TNC",
1076            "SLSQP",
1077            "Powell",
1078            "trust-constr",
1079        )
1080        method = string_like(
1081            method,
1082            "method",
1083            options=supported_methods,
1084            lower=False,
1085            optional=True,
1086        )
1087        # TODO: Deprecate initial_level and related parameters from fit
1088        if initial_level is not None or initial_trend is not None:
1089            raise ValueError(
1090                "Initial values were set during model construction. These "
1091                "cannot be changed during fit."
1092            )
1093        if use_boxcox is not None:
1094            raise ValueError(
1095                "use_boxcox was set at model initialization and cannot "
1096                "be changed"
1097            )
1098        elif self._use_boxcox is None:
1099            use_boxcox = False
1100        else:
1101            use_boxcox = self._use_boxcox
1102
1103        if use_basinhopping is not None:
1104            raise ValueError(
1105                "use_basinhopping is deprecated. Set optimization method "
1106                "using 'method'."
1107            )
1108
1109        data = self._data
1110        damped = self.damped_trend
1111        phi = phi if damped else 1.0
1112        if self._use_boxcox is None:
1113            if use_boxcox == "log":
1114                lamda = 0.0
1115                y = boxcox(data, lamda)
1116            elif isinstance(use_boxcox, float):
1117                lamda = use_boxcox
1118                y = boxcox(data, lamda)
1119            elif use_boxcox:
1120                y, lamda = boxcox(data)
1121                # use_boxcox = lamda
1122            else:
1123                y = data.squeeze()
1124        else:
1125            y = self._y
1126
1127        self._y = y
1128        res = _OptConfig()
1129        res.alpha = alpha
1130        res.beta = beta
1131        res.phi = phi
1132        res.gamma = gamma
1133        res.level = initial_level
1134        res.trend = initial_trend
1135        res.seasonal = None
1136        res.y = y
1137        res.params = start_params
1138        res.mle_retvals = res.mask = None
1139        method = "SLSQP" if method is None else method
1140        if optimized:
1141            res = self._optimize_parameters(
1142                res, use_brute, method, minimize_kwargs
1143            )
1144        else:
1145            l0, b0, s0 = self.initial_values(
1146                initial_level=initial_level, initial_trend=initial_trend
1147            )
1148            res.level = l0
1149            res.trend = b0
1150            res.seasonal = s0
1151            if self._fixed_parameters:
1152                fp = self._fixed_parameters
1153                res.alpha = fp.get("smoothing_level", res.alpha)
1154                res.beta = fp.get("smoothing_trend", res.beta)
1155                res.gamma = fp.get("smoothing_seasonal", res.gamma)
1156                res.phi = fp.get("damping_trend", res.phi)
1157                res.level = fp.get("initial_level", res.level)
1158                res.trend = fp.get("initial_trend", res.trend)
1159                res.seasonal = fp.get("initial_seasonal", res.seasonal)
1160
1161        hwfit = self._predict(
1162            h=0,
1163            smoothing_level=res.alpha,
1164            smoothing_trend=res.beta,
1165            smoothing_seasonal=res.gamma,
1166            damping_trend=res.phi,
1167            initial_level=res.level,
1168            initial_trend=res.trend,
1169            initial_seasons=res.seasonal,
1170            use_boxcox=use_boxcox,
1171            remove_bias=remove_bias,
1172            is_optimized=res.mask,
1173        )
1174        hwfit._results.mle_retvals = res.mle_retvals
1175        return hwfit
1176
1177    def initial_values(
1178        self, initial_level=None, initial_trend=None, force=False
1179    ):
1180        """
1181        Compute initial values used in the exponential smoothing recursions.
1182
1183        Parameters
1184        ----------
1185        initial_level : {float, None}
1186            The initial value used for the level component.
1187        initial_trend : {float, None}
1188            The initial value used for the trend component.
1189        force : bool
1190            Force the calculation even if initial values exist.
1191
1192        Returns
1193        -------
1194        initial_level : float
1195            The initial value used for the level component.
1196        initial_trend : {float, None}
1197            The initial value used for the trend component.
1198        initial_seasons : list
1199            The initial values used for the seasonal components.
1200
1201        Notes
1202        -----
1203        Convenience function the exposes the values used to initialize the
1204        recursions. When optimizing parameters these are used as starting
1205        values.
1206
1207        Method used to compute the initial value depends on when components
1208        are included in the model.  In a simple exponential smoothing model
1209        without trend or a seasonal components, the initial value is set to the
1210        first observation. When a trend is added, the trend is initialized
1211        either using y[1]/y[0], if multiplicative, or y[1]-y[0]. When the
1212        seasonal component is added the initialization adapts to account for
1213        the modified structure.
1214        """
1215        if self._initialization_method is not None and not force:
1216            return (
1217                self._initial_level,
1218                self._initial_trend,
1219                self._initial_seasonal,
1220            )
1221        y = self._y
1222        trend = self.trend
1223        seasonal = self.seasonal
1224        has_seasonal = self.has_seasonal
1225        has_trend = self.has_trend
1226        m = self.seasonal_periods
1227        l0 = initial_level
1228        b0 = initial_trend
1229        if has_seasonal:
1230            l0 = y[np.arange(self.nobs) % m == 0].mean() if l0 is None else l0
1231            if b0 is None and has_trend:
1232                # TODO: Fix for short m
1233                lead, lag = y[m : m + m], y[:m]
1234                if trend == "mul":
1235                    b0 = np.exp((np.log(lead.mean()) - np.log(lag.mean())) / m)
1236                else:
1237                    b0 = ((lead - lag) / m).mean()
1238            s0 = list(y[:m] / l0) if seasonal == "mul" else list(y[:m] - l0)
1239        elif has_trend:
1240            l0 = y[0] if l0 is None else l0
1241            if b0 is None:
1242                b0 = y[1] / y[0] if trend == "mul" else y[1] - y[0]
1243            s0 = []
1244        else:
1245            if l0 is None:
1246                l0 = y[0]
1247            b0 = None
1248            s0 = []
1249
1250        return l0, b0, s0
1251
1252    @deprecate_kwarg("smoothing_slope", "smoothing_trend")
1253    @deprecate_kwarg("damping_slope", "damping_trend")
1254    def _predict(
1255        self,
1256        h=None,
1257        smoothing_level=None,
1258        smoothing_trend=None,
1259        smoothing_seasonal=None,
1260        initial_level=None,
1261        initial_trend=None,
1262        damping_trend=None,
1263        initial_seasons=None,
1264        use_boxcox=None,
1265        lamda=None,
1266        remove_bias=None,
1267        is_optimized=None,
1268    ):
1269        """
1270        Helper prediction function
1271
1272        Parameters
1273        ----------
1274        h : int, optional
1275            The number of time steps to forecast ahead.
1276        """
1277        # Variable renames to alpha, beta, etc as this helps with following the
1278        # mathematical notation in general
1279        alpha = smoothing_level
1280        beta = smoothing_trend
1281        gamma = smoothing_seasonal
1282        phi = damping_trend
1283
1284        # Start in sample and out of sample predictions
1285        data = self.endog
1286        damped = self.damped_trend
1287        has_seasonal = self.has_seasonal
1288        has_trend = self.has_trend
1289        trend = self.trend
1290        seasonal = self.seasonal
1291        m = self.seasonal_periods
1292        phi = phi if damped else 1.0
1293        if use_boxcox == "log":
1294            lamda = 0.0
1295            y = boxcox(data, 0.0)
1296        elif isinstance(use_boxcox, float):
1297            lamda = use_boxcox
1298            y = boxcox(data, lamda)
1299        elif use_boxcox:
1300            y, lamda = boxcox(data)
1301        else:
1302            lamda = None
1303            y = data.squeeze()
1304            if np.ndim(y) != 1:
1305                raise NotImplementedError("Only 1 dimensional data supported")
1306        y_alpha = np.zeros((self.nobs,))
1307        y_gamma = np.zeros((self.nobs,))
1308        alphac = 1 - alpha
1309        y_alpha[:] = alpha * y
1310        betac = 1 - beta if beta is not None else 0
1311        gammac = 1 - gamma if gamma is not None else 0
1312        if has_seasonal:
1313            y_gamma[:] = gamma * y
1314        lvls = np.zeros((self.nobs + h + 1,))
1315        b = np.zeros((self.nobs + h + 1,))
1316        s = np.zeros((self.nobs + h + m + 1,))
1317        lvls[0] = initial_level
1318        b[0] = initial_trend
1319        s[:m] = initial_seasons
1320        phi_h = (
1321            np.cumsum(np.repeat(phi, h + 1) ** np.arange(1, h + 1 + 1))
1322            if damped
1323            else np.arange(1, h + 1 + 1)
1324        )
1325        trended = {"mul": np.multiply, "add": np.add, None: lambda l, b: l}[
1326            trend
1327        ]
1328        detrend = {"mul": np.divide, "add": np.subtract, None: lambda l, b: 0}[
1329            trend
1330        ]
1331        dampen = {"mul": np.power, "add": np.multiply, None: lambda b, phi: 0}[
1332            trend
1333        ]
1334        nobs = self.nobs
1335        if seasonal == "mul":
1336            for i in range(1, nobs + 1):
1337                lvls[i] = y_alpha[i - 1] / s[i - 1] + (
1338                    alphac * trended(lvls[i - 1], dampen(b[i - 1], phi))
1339                )
1340                if has_trend:
1341                    b[i] = (beta * detrend(lvls[i], lvls[i - 1])) + (
1342                        betac * dampen(b[i - 1], phi)
1343                    )
1344                s[i + m - 1] = y_gamma[i - 1] / trended(
1345                    lvls[i - 1], dampen(b[i - 1], phi)
1346                ) + (gammac * s[i - 1])
1347            _trend = b[1 : nobs + 1].copy()
1348            season = s[m : nobs + m].copy()
1349            lvls[nobs:] = lvls[nobs]
1350            if has_trend:
1351                b[:nobs] = dampen(b[:nobs], phi)
1352                b[nobs:] = dampen(b[nobs], phi_h)
1353            trend = trended(lvls, b)
1354            s[nobs + m - 1 :] = [
1355                s[(nobs - 1) + j % m] for j in range(h + 1 + 1)
1356            ]
1357            fitted = trend * s[:-m]
1358        elif seasonal == "add":
1359            for i in range(1, nobs + 1):
1360                lvls[i] = (
1361                    y_alpha[i - 1]
1362                    - (alpha * s[i - 1])
1363                    + (alphac * trended(lvls[i - 1], dampen(b[i - 1], phi)))
1364                )
1365                if has_trend:
1366                    b[i] = (beta * detrend(lvls[i], lvls[i - 1])) + (
1367                        betac * dampen(b[i - 1], phi)
1368                    )
1369                s[i + m - 1] = (
1370                    y_gamma[i - 1]
1371                    - (gamma * trended(lvls[i - 1], dampen(b[i - 1], phi)))
1372                    + (gammac * s[i - 1])
1373                )
1374            _trend = b[1 : nobs + 1].copy()
1375            season = s[m : nobs + m].copy()
1376            lvls[nobs:] = lvls[nobs]
1377            if has_trend:
1378                b[:nobs] = dampen(b[:nobs], phi)
1379                b[nobs:] = dampen(b[nobs], phi_h)
1380            trend = trended(lvls, b)
1381            s[nobs + m - 1 :] = [
1382                s[(nobs - 1) + j % m] for j in range(h + 1 + 1)
1383            ]
1384            fitted = trend + s[:-m]
1385        else:
1386            for i in range(1, nobs + 1):
1387                lvls[i] = y_alpha[i - 1] + (
1388                    alphac * trended(lvls[i - 1], dampen(b[i - 1], phi))
1389                )
1390                if has_trend:
1391                    b[i] = (beta * detrend(lvls[i], lvls[i - 1])) + (
1392                        betac * dampen(b[i - 1], phi)
1393                    )
1394            _trend = b[1 : nobs + 1].copy()
1395            season = s[m : nobs + m].copy()
1396            lvls[nobs:] = lvls[nobs]
1397            if has_trend:
1398                b[:nobs] = dampen(b[:nobs], phi)
1399                b[nobs:] = dampen(b[nobs], phi_h)
1400            trend = trended(lvls, b)
1401            fitted = trend
1402        level = lvls[1 : nobs + 1].copy()
1403        if use_boxcox or use_boxcox == "log" or isinstance(use_boxcox, float):
1404            fitted = inv_boxcox(fitted, lamda)
1405        err = fitted[: -h - 1] - data
1406        sse = err.T @ err
1407        # (s0 + gamma) + (b0 + beta) + (l0 + alpha) + phi
1408        k = m * has_seasonal + 2 * has_trend + 2 + 1 * damped
1409        aic = self.nobs * np.log(sse / self.nobs) + k * 2
1410        if self.nobs - k - 3 > 0:
1411            aicc_penalty = (2 * (k + 2) * (k + 3)) / (self.nobs - k - 3)
1412        else:
1413            aicc_penalty = np.inf
1414        aicc = aic + aicc_penalty
1415        bic = self.nobs * np.log(sse / self.nobs) + k * np.log(self.nobs)
1416        resid = data - fitted[: -h - 1]
1417        if remove_bias:
1418            fitted += resid.mean()
1419        self.params = {
1420            "smoothing_level": alpha,
1421            "smoothing_trend": beta,
1422            "smoothing_seasonal": gamma,
1423            "damping_trend": phi if damped else np.nan,
1424            "initial_level": lvls[0],
1425            "initial_trend": b[0] / phi if phi > 0 else 0,
1426            "initial_seasons": s[:m],
1427            "use_boxcox": use_boxcox,
1428            "lamda": lamda,
1429            "remove_bias": remove_bias,
1430        }
1431
1432        # Format parameters into a DataFrame
1433        codes = ["alpha", "beta", "gamma", "l.0", "b.0", "phi"]
1434        codes += ["s.{0}".format(i) for i in range(m)]
1435        idx = [
1436            "smoothing_level",
1437            "smoothing_trend",
1438            "smoothing_seasonal",
1439            "initial_level",
1440            "initial_trend",
1441            "damping_trend",
1442        ]
1443        idx += ["initial_seasons.{0}".format(i) for i in range(m)]
1444
1445        formatted = [alpha, beta, gamma, lvls[0], b[0], phi]
1446        formatted += s[:m].tolist()
1447        formatted = list(map(lambda v: np.nan if v is None else v, formatted))
1448        formatted = np.array(formatted)
1449        if is_optimized is None:
1450            optimized = np.zeros(len(codes), dtype=bool)
1451        else:
1452            optimized = is_optimized.astype(bool)
1453        included = [True, has_trend, has_seasonal, True, has_trend, damped]
1454        included += [True] * m
1455        formatted = pd.DataFrame(
1456            [[c, f, o] for c, f, o in zip(codes, formatted, optimized)],
1457            columns=["name", "param", "optimized"],
1458            index=idx,
1459        )
1460        formatted = formatted.loc[included]
1461
1462        hwfit = HoltWintersResults(
1463            self,
1464            self.params,
1465            fittedfcast=fitted,
1466            fittedvalues=fitted[: -h - 1],
1467            fcastvalues=fitted[-h - 1 :],
1468            sse=sse,
1469            level=level,
1470            trend=_trend,
1471            season=season,
1472            aic=aic,
1473            bic=bic,
1474            aicc=aicc,
1475            resid=resid,
1476            k=k,
1477            params_formatted=formatted,
1478            optimized=optimized,
1479        )
1480        return HoltWintersResultsWrapper(hwfit)
1481
1482
1483class SimpleExpSmoothing(ExponentialSmoothing):
1484    """
1485    Simple Exponential Smoothing
1486
1487    Parameters
1488    ----------
1489    endog : array_like
1490        The time series to model.
1491    initialization_method : str, optional
1492        Method for initialize the recursions. One of:
1493
1494        * None
1495        * 'estimated'
1496        * 'heuristic'
1497        * 'legacy-heuristic'
1498        * 'known'
1499
1500        None defaults to the pre-0.12 behavior where initial values
1501        are passed as part of ``fit``. If any of the other values are
1502        passed, then the initial values must also be set when constructing
1503        the model. If 'known' initialization is used, then `initial_level`
1504        must be passed, as well as `initial_trend` and `initial_seasonal` if
1505        applicable. Default is 'estimated'. "legacy-heuristic" uses the same
1506        values that were used in statsmodels 0.11 and earlier.
1507    initial_level : float, optional
1508        The initial level component. Required if estimation method is "known".
1509        If set using either "estimated" or "heuristic" this value is used.
1510        This allows one or more of the initial values to be set while
1511        deferring to the heuristic for others or estimating the unset
1512        parameters.
1513
1514    See Also
1515    --------
1516    ExponentialSmoothing
1517        Exponential smoothing with trend and seasonal components.
1518    Holt
1519        Exponential smoothing with a trend component.
1520
1521    Notes
1522    -----
1523    This is a full implementation of the simple exponential smoothing as
1524    per [1]_.  `SimpleExpSmoothing` is a restricted version of
1525    :class:`ExponentialSmoothing`.
1526
1527    References
1528    ----------
1529    .. [1] Hyndman, Rob J., and George Athanasopoulos. Forecasting: principles
1530        and practice. OTexts, 2014.
1531    """
1532
1533    def __init__(
1534        self,
1535        endog,
1536        initialization_method=None,  # Future: 'estimated',
1537        initial_level=None,
1538    ):
1539        super().__init__(
1540            endog,
1541            initialization_method=initialization_method,
1542            initial_level=initial_level,
1543        )
1544
1545    def fit(
1546        self,
1547        smoothing_level=None,
1548        *,
1549        optimized=True,
1550        start_params=None,
1551        initial_level=None,
1552        use_brute=True,
1553        use_boxcox=None,
1554        remove_bias=False,
1555        method=None,
1556        minimize_kwargs=None,
1557    ):
1558        """
1559        Fit the model
1560
1561        Parameters
1562        ----------
1563        smoothing_level : float, optional
1564            The smoothing_level value of the simple exponential smoothing, if
1565            the value is set then this value will be used as the value.
1566        optimized : bool, optional
1567            Estimate model parameters by maximizing the log-likelihood.
1568        start_params : ndarray, optional
1569            Starting values to used when optimizing the fit.  If not provided,
1570            starting values are determined using a combination of grid search
1571            and reasonable values based on the initial values of the data.
1572        initial_level : float, optional
1573            Value to use when initializing the fitted level.
1574        use_brute : bool, optional
1575            Search for good starting values using a brute force (grid)
1576            optimizer. If False, a naive set of starting values is used.
1577        use_boxcox : {True, False, 'log', float}, optional
1578            Should the Box-Cox transform be applied to the data first? If 'log'
1579            then apply the log. If float then use the value as lambda.
1580        remove_bias : bool, optional
1581            Remove bias from forecast values and fitted values by enforcing
1582            that the average residual is equal to zero.
1583        method : str, default "L-BFGS-B"
1584            The minimizer used. Valid options are "L-BFGS-B" (default), "TNC",
1585            "SLSQP", "Powell", "trust-constr", "basinhopping" (also "bh") and
1586            "least_squares" (also "ls"). basinhopping tries multiple starting
1587            values in an attempt to find a global minimizer in non-convex
1588            problems, and so is slower than the others.
1589        minimize_kwargs : dict[str, Any]
1590            A dictionary of keyword arguments passed to SciPy's minimize
1591            function if method is one of "L-BFGS-B" (default), "TNC",
1592            "SLSQP", "Powell", or "trust-constr", or SciPy's basinhopping
1593            or least_squares. The valid keywords are optimizer specific.
1594            Consult SciPy's documentation for the full set of options.
1595
1596        Returns
1597        -------
1598        HoltWintersResults
1599            See statsmodels.tsa.holtwinters.HoltWintersResults.
1600
1601        Notes
1602        -----
1603        This is a full implementation of the simple exponential smoothing as
1604        per [1].
1605
1606        References
1607        ----------
1608        [1] Hyndman, Rob J., and George Athanasopoulos. Forecasting: principles
1609            and practice. OTexts, 2014.
1610        """
1611        return super().fit(
1612            smoothing_level=smoothing_level,
1613            optimized=optimized,
1614            start_params=start_params,
1615            initial_level=initial_level,
1616            use_brute=use_brute,
1617            remove_bias=remove_bias,
1618            use_boxcox=use_boxcox,
1619            method=method,
1620            minimize_kwargs=minimize_kwargs,
1621        )
1622
1623
1624class Holt(ExponentialSmoothing):
1625    """
1626    Holt's Exponential Smoothing
1627
1628    Parameters
1629    ----------
1630    endog : array_like
1631        The time series to model.
1632    exponential : bool, optional
1633        Type of trend component.
1634    damped_trend : bool, optional
1635        Should the trend component be damped.
1636    initialization_method : str, optional
1637        Method for initialize the recursions. One of:
1638
1639        * None
1640        * 'estimated'
1641        * 'heuristic'
1642        * 'legacy-heuristic'
1643        * 'known'
1644
1645        None defaults to the pre-0.12 behavior where initial values
1646        are passed as part of ``fit``. If any of the other values are
1647        passed, then the initial values must also be set when constructing
1648        the model. If 'known' initialization is used, then `initial_level`
1649        must be passed, as well as `initial_trend` and `initial_seasonal` if
1650        applicable. Default is 'estimated'. "legacy-heuristic" uses the same
1651        values that were used in statsmodels 0.11 and earlier.
1652    initial_level : float, optional
1653        The initial level component. Required if estimation method is "known".
1654        If set using either "estimated" or "heuristic" this value is used.
1655        This allows one or more of the initial values to be set while
1656        deferring to the heuristic for others or estimating the unset
1657        parameters.
1658    initial_trend : float, optional
1659        The initial trend component. Required if estimation method is "known".
1660        If set using either "estimated" or "heuristic" this value is used.
1661        This allows one or more of the initial values to be set while
1662        deferring to the heuristic for others or estimating the unset
1663        parameters.
1664
1665    See Also
1666    --------
1667    ExponentialSmoothing
1668        Exponential smoothing with trend and seasonal components.
1669    SimpleExpSmoothing
1670        Basic exponential smoothing with only a level component.
1671
1672    Notes
1673    -----
1674    This is a full implementation of the Holt's exponential smoothing as
1675    per [1]_. `Holt` is a restricted version of :class:`ExponentialSmoothing`.
1676
1677    References
1678    ----------
1679    .. [1] Hyndman, Rob J., and George Athanasopoulos. Forecasting: principles
1680        and practice. OTexts, 2014.
1681    """
1682
1683    @deprecate_kwarg("damped", "damped_trend")
1684    def __init__(
1685        self,
1686        endog,
1687        exponential=False,
1688        damped_trend=False,
1689        initialization_method=None,  # Future: 'estimated',
1690        initial_level=None,
1691        initial_trend=None,
1692    ):
1693        trend = "mul" if exponential else "add"
1694        super().__init__(
1695            endog,
1696            trend=trend,
1697            damped_trend=damped_trend,
1698            initialization_method=initialization_method,
1699            initial_level=initial_level,
1700            initial_trend=initial_trend,
1701        )
1702
1703    @deprecate_kwarg("smoothing_slope", "smoothing_trend")
1704    @deprecate_kwarg("initial_slope", "initial_trend")
1705    @deprecate_kwarg("damping_slope", "damping_trend")
1706    def fit(
1707        self,
1708        smoothing_level=None,
1709        smoothing_trend=None,
1710        *,
1711        damping_trend=None,
1712        optimized=True,
1713        start_params=None,
1714        initial_level=None,
1715        initial_trend=None,
1716        use_brute=True,
1717        use_boxcox=None,
1718        remove_bias=False,
1719        method=None,
1720        minimize_kwargs=None,
1721    ):
1722        """
1723        Fit the model
1724
1725        Parameters
1726        ----------
1727        smoothing_level : float, optional
1728            The alpha value of the simple exponential smoothing, if the value
1729            is set then this value will be used as the value.
1730        smoothing_trend :  float, optional
1731            The beta value of the Holt's trend method, if the value is
1732            set then this value will be used as the value.
1733        damping_trend : float, optional
1734            The phi value of the damped method, if the value is
1735            set then this value will be used as the value.
1736        optimized : bool, optional
1737            Estimate model parameters by maximizing the log-likelihood.
1738        start_params : ndarray, optional
1739            Starting values to used when optimizing the fit.  If not provided,
1740            starting values are determined using a combination of grid search
1741            and reasonable values based on the initial values of the data.
1742        initial_level : float, optional
1743            Value to use when initializing the fitted level.
1744
1745            .. deprecated:: 0.12
1746
1747               Set initial_level when constructing the model
1748
1749        initial_trend : float, optional
1750            Value to use when initializing the fitted trend.
1751
1752            .. deprecated:: 0.12
1753
1754               Set initial_trend when constructing the model
1755
1756        use_brute : bool, optional
1757            Search for good starting values using a brute force (grid)
1758            optimizer. If False, a naive set of starting values is used.
1759        use_boxcox : {True, False, 'log', float}, optional
1760            Should the Box-Cox transform be applied to the data first? If 'log'
1761            then apply the log. If float then use the value as lambda.
1762        remove_bias : bool, optional
1763            Remove bias from forecast values and fitted values by enforcing
1764            that the average residual is equal to zero.
1765        method : str, default "L-BFGS-B"
1766            The minimizer used. Valid options are "L-BFGS-B" (default), "TNC",
1767            "SLSQP", "Powell", "trust-constr", "basinhopping" (also "bh") and
1768            "least_squares" (also "ls"). basinhopping tries multiple starting
1769            values in an attempt to find a global minimizer in non-convex
1770            problems, and so is slower than the others.
1771        minimize_kwargs : dict[str, Any]
1772            A dictionary of keyword arguments passed to SciPy's minimize
1773            function if method is one of "L-BFGS-B" (default), "TNC",
1774            "SLSQP", "Powell", or "trust-constr", or SciPy's basinhopping
1775            or least_squares. The valid keywords are optimizer specific.
1776            Consult SciPy's documentation for the full set of options.
1777
1778        Returns
1779        -------
1780        HoltWintersResults
1781            See statsmodels.tsa.holtwinters.HoltWintersResults.
1782
1783        Notes
1784        -----
1785        This is a full implementation of the Holt's exponential smoothing as
1786        per [1].
1787
1788        References
1789        ----------
1790        [1] Hyndman, Rob J., and George Athanasopoulos. Forecasting: principles
1791            and practice. OTexts, 2014.
1792        """
1793        return super().fit(
1794            smoothing_level=smoothing_level,
1795            smoothing_trend=smoothing_trend,
1796            damping_trend=damping_trend,
1797            optimized=optimized,
1798            start_params=start_params,
1799            initial_level=initial_level,
1800            initial_trend=initial_trend,
1801            use_brute=use_brute,
1802            use_boxcox=use_boxcox,
1803            remove_bias=remove_bias,
1804            method=method,
1805            minimize_kwargs=minimize_kwargs,
1806        )
1807