1from __future__ import annotations
2
3from statsmodels.compat.python import lrange, Literal
4
5import warnings
6
7import numpy as np
8import pandas as pd
9from pandas import DataFrame
10from pandas.tseries import offsets
11from pandas.tseries.frequencies import to_offset
12
13from statsmodels.tools.data import _is_recarray, _is_using_pandas
14from statsmodels.tools.sm_exceptions import ValueWarning
15from statsmodels.tools.validation import (
16    array_like,
17    bool_like,
18    int_like,
19    string_like,
20)
21
22__all__ = [
23    "lagmat",
24    "lagmat2ds",
25    "add_trend",
26    "duplication_matrix",
27    "elimination_matrix",
28    "commutation_matrix",
29    "vec",
30    "vech",
31    "unvec",
32    "unvech",
33    "freq_to_period",
34    "rename_trend",
35]
36
37
38def add_trend(x, trend="c", prepend=False, has_constant="skip"):
39    """
40    Add a trend and/or constant to an array.
41
42    Parameters
43    ----------
44    x : array_like
45        Original array of data.
46    trend : str {'n', 'c', 't', 'ct', 'ctt'}
47        The trend to add.
48
49        * 'n' add no trend.
50        * 'c' add constant only.
51        * 't' add trend only.
52        * 'ct' add constant and linear trend.
53        * 'ctt' add constant and linear and quadratic trend.
54    prepend : bool
55        If True, prepends the new data to the columns of X.
56    has_constant : str {'raise', 'add', 'skip'}
57        Controls what happens when trend is 'c' and a constant column already
58        exists in x. 'raise' will raise an error. 'add' will add a column of
59        1s. 'skip' will return the data without change. 'skip' is the default.
60
61    Returns
62    -------
63    array_like
64        The original data with the additional trend columns.  If x is a
65        pandas Series or DataFrame, then the trend column names are 'const',
66        'trend' and 'trend_squared'.
67
68    See Also
69    --------
70    statsmodels.tools.tools.add_constant
71        Add a constant column to an array.
72
73    Notes
74    -----
75    Returns columns as ['ctt','ct','c'] whenever applicable. There is currently
76    no checking for an existing trend.
77    """
78    prepend = bool_like(prepend, "prepend")
79    trend = string_like(trend, "trend", options=("n", "c", "t", "ct", "ctt"))
80    has_constant = string_like(
81        has_constant, "has_constant", options=("raise", "add", "skip")
82    )
83
84    # TODO: could be generalized for trend of aribitrary order
85    columns = ["const", "trend", "trend_squared"]
86    if trend == "n":
87        return x.copy()
88    elif trend == "c":  # handles structured arrays
89        columns = columns[:1]
90        trendorder = 0
91    elif trend == "ct" or trend == "t":
92        columns = columns[:2]
93        if trend == "t":
94            columns = columns[1:2]
95        trendorder = 1
96    elif trend == "ctt":
97        trendorder = 2
98
99    if _is_recarray(x):
100        from statsmodels.tools.sm_exceptions import recarray_exception
101
102        raise NotImplementedError(recarray_exception)
103
104    is_pandas = _is_using_pandas(x, None)
105    if is_pandas:
106        if isinstance(x, pd.Series):
107            x = pd.DataFrame(x)
108        else:
109            x = x.copy()
110    else:
111        x = np.asanyarray(x)
112
113    nobs = len(x)
114    trendarr = np.vander(
115        np.arange(1, nobs + 1, dtype=np.float64), trendorder + 1
116    )
117    # put in order ctt
118    trendarr = np.fliplr(trendarr)
119    if trend == "t":
120        trendarr = trendarr[:, 1]
121
122    if "c" in trend:
123        if is_pandas:
124            # Mixed type protection
125            def safe_is_const(s):
126                try:
127                    return np.ptp(s) == 0.0 and np.any(s != 0.0)
128                except:
129                    return False
130
131            col_const = x.apply(safe_is_const, 0)
132        else:
133            ptp0 = np.ptp(np.asanyarray(x), axis=0)
134            col_is_const = ptp0 == 0
135            nz_const = col_is_const & (x[0] != 0)
136            col_const = nz_const
137
138        if np.any(col_const):
139            if has_constant == "raise":
140                if x.ndim == 1:
141                    base_err = "x is constant."
142                else:
143                    columns = np.arange(x.shape[1])[col_const]
144                    if isinstance(x, pd.DataFrame):
145                        columns = x.columns
146                    const_cols = ", ".join([str(c) for c in columns])
147                    base_err = (
148                        "x contains one or more constant columns. Column(s) "
149                        f"{const_cols} are constant."
150                    )
151                msg = f"{base_err} Adding a constant with trend='{trend}' is not allowed."
152                raise ValueError(msg)
153            elif has_constant == "skip":
154                columns = columns[1:]
155                trendarr = trendarr[:, 1:]
156
157    order = 1 if prepend else -1
158    if is_pandas:
159        trendarr = pd.DataFrame(trendarr, index=x.index, columns=columns)
160        x = [trendarr, x]
161        x = pd.concat(x[::order], axis=1)
162    else:
163        x = [trendarr, x]
164        x = np.column_stack(x[::order])
165
166    return x
167
168
169def add_lag(x, col=None, lags=1, drop=False, insert=True):
170    """
171    Returns an array with lags included given an array.
172
173    Parameters
174    ----------
175    x : array_like
176        An array or NumPy ndarray subclass. Can be either a 1d or 2d array with
177        observations in columns.
178    col : int or None
179        `col` can be an int of the zero-based column index. If it's a
180        1d array `col` can be None.
181    lags : int
182        The number of lags desired.
183    drop : bool
184        Whether to keep the contemporaneous variable for the data.
185    insert : bool or int
186        If True, inserts the lagged values after `col`. If False, appends
187        the data. If int inserts the lags at int.
188
189    Returns
190    -------
191    array : ndarray
192        Array with lags
193
194    Examples
195    --------
196
197    >>> import statsmodels.api as sm
198    >>> data = sm.datasets.macrodata.load()
199    >>> data = data.data[['year','quarter','realgdp','cpi']]
200    >>> data = sm.tsa.add_lag(data, 'realgdp', lags=2)
201
202    Notes
203    -----
204    Trims the array both forward and backward, so that the array returned
205    so that the length of the returned array is len(`X`) - lags. The lags are
206    returned in increasing order, ie., t-1,t-2,...,t-lags
207    """
208    lags = int_like(lags, "lags")
209    drop = bool_like(drop, "drop")
210    x = array_like(x, "x", ndim=2)
211    if col is None:
212        col = 0
213
214    # handle negative index
215    if col < 0:
216        col = x.shape[1] + col
217    if x.ndim == 1:
218        x = x[:, None]
219    contemp = x[:, col]
220
221    if insert is True:
222        ins_idx = col + 1
223    elif insert is False:
224        ins_idx = x.shape[1]
225    else:
226        if insert < 0:  # handle negative index
227            insert = x.shape[1] + insert + 1
228        if insert > x.shape[1]:
229            insert = x.shape[1]
230
231            warnings.warn(
232                "insert > number of variables, inserting at the"
233                " last position",
234                ValueWarning,
235            )
236        ins_idx = insert
237
238    ndlags = lagmat(contemp, lags, trim="Both")
239    first_cols = lrange(ins_idx)
240    last_cols = lrange(ins_idx, x.shape[1])
241    if drop:
242        if col in first_cols:
243            first_cols.pop(first_cols.index(col))
244        else:
245            last_cols.pop(last_cols.index(col))
246    return np.column_stack((x[lags:, first_cols], ndlags, x[lags:, last_cols]))
247
248
249def detrend(x, order=1, axis=0):
250    """
251    Detrend an array with a trend of given order along axis 0 or 1.
252
253    Parameters
254    ----------
255    x : array_like, 1d or 2d
256        Data, if 2d, then each row or column is independently detrended with
257        the same trendorder, but independent trend estimates.
258    order : int
259        The polynomial order of the trend, zero is constant, one is
260        linear trend, two is quadratic trend.
261    axis : int
262        Axis can be either 0, observations by rows, or 1, observations by
263        columns.
264
265    Returns
266    -------
267    ndarray
268        The detrended series is the residual of the linear regression of the
269        data on the trend of given order.
270    """
271    order = int_like(order, "order")
272    axis = int_like(axis, "axis")
273
274    if x.ndim == 2 and int(axis) == 1:
275        x = x.T
276    elif x.ndim > 2:
277        raise NotImplementedError(
278            "x.ndim > 2 is not implemented until it is needed"
279        )
280
281    nobs = x.shape[0]
282    if order == 0:
283        # Special case demean
284        resid = x - x.mean(axis=0)
285    else:
286        trends = np.vander(np.arange(float(nobs)), N=order + 1)
287        beta = np.linalg.pinv(trends).dot(x)
288        resid = x - np.dot(trends, beta)
289
290    if x.ndim == 2 and int(axis) == 1:
291        resid = resid.T
292
293    return resid
294
295
296def lagmat(x,
297           maxlag: int,
298           trim: Literal["forward", "backward", "both", "none"]='forward',
299           original: Literal["ex", "sep", "in"]="ex",
300           use_pandas: bool=False
301           ):
302    """
303    Create 2d array of lags.
304
305    Parameters
306    ----------
307    x : array_like
308        Data; if 2d, observation in rows and variables in columns.
309    maxlag : int
310        All lags from zero to maxlag are included.
311    trim : {'forward', 'backward', 'both', 'none', None}
312        The trimming method to use.
313
314        * 'forward' : trim invalid observations in front.
315        * 'backward' : trim invalid initial observations.
316        * 'both' : trim invalid observations on both sides.
317        * 'none', None : no trimming of observations.
318    original : {'ex','sep','in'}
319        How the original is treated.
320
321        * 'ex' : drops the original array returning only the lagged values.
322        * 'in' : returns the original array and the lagged values as a single
323          array.
324        * 'sep' : returns a tuple (original array, lagged values). The original
325                  array is truncated to have the same number of rows as
326                  the returned lagmat.
327    use_pandas : bool
328        If true, returns a DataFrame when the input is a pandas
329        Series or DataFrame.  If false, return numpy ndarrays.
330
331    Returns
332    -------
333    lagmat : ndarray
334        The array with lagged observations.
335    y : ndarray, optional
336        Only returned if original == 'sep'.
337
338    Notes
339    -----
340    When using a pandas DataFrame or Series with use_pandas=True, trim can only
341    be 'forward' or 'both' since it is not possible to consistently extend
342    index values.
343
344    Examples
345    --------
346    >>> from statsmodels.tsa.tsatools import lagmat
347    >>> import numpy as np
348    >>> X = np.arange(1,7).reshape(-1,2)
349    >>> lagmat(X, maxlag=2, trim="forward", original='in')
350    array([[ 1.,  2.,  0.,  0.,  0.,  0.],
351       [ 3.,  4.,  1.,  2.,  0.,  0.],
352       [ 5.,  6.,  3.,  4.,  1.,  2.]])
353
354    >>> lagmat(X, maxlag=2, trim="backward", original='in')
355    array([[ 5.,  6.,  3.,  4.,  1.,  2.],
356       [ 0.,  0.,  5.,  6.,  3.,  4.],
357       [ 0.,  0.,  0.,  0.,  5.,  6.]])
358
359    >>> lagmat(X, maxlag=2, trim="both", original='in')
360    array([[ 5.,  6.,  3.,  4.,  1.,  2.]])
361
362    >>> lagmat(X, maxlag=2, trim="none", original='in')
363    array([[ 1.,  2.,  0.,  0.,  0.,  0.],
364       [ 3.,  4.,  1.,  2.,  0.,  0.],
365       [ 5.,  6.,  3.,  4.,  1.,  2.],
366       [ 0.,  0.,  5.,  6.,  3.,  4.],
367       [ 0.,  0.,  0.,  0.,  5.,  6.]])
368    """
369    maxlag = int_like(maxlag, "maxlag")
370    use_pandas = bool_like(use_pandas, "use_pandas")
371    trim = string_like(
372        trim,
373        "trim",
374        optional=True,
375        options=("forward", "backward", "both", "none"),
376    )
377    original = string_like(original, "original", options=("ex", "sep", "in"))
378
379    # TODO:  allow list of lags additional to maxlag
380    orig = x
381    x = array_like(x, "x", ndim=2, dtype=None)
382    is_pandas = _is_using_pandas(orig, None) and use_pandas
383    trim = "none" if trim is None else trim
384    trim = trim.lower()
385    if is_pandas and trim in ("none", "backward"):
386        raise ValueError(
387            "trim cannot be 'none' or 'forward' when used on "
388            "Series or DataFrames"
389        )
390
391    dropidx = 0
392    nobs, nvar = x.shape
393    if original in ["ex", "sep"]:
394        dropidx = nvar
395    if maxlag >= nobs:
396        raise ValueError("maxlag should be < nobs")
397    lm = np.zeros((nobs + maxlag, nvar * (maxlag + 1)))
398    for k in range(0, int(maxlag + 1)):
399        lm[
400            maxlag - k : nobs + maxlag - k,
401            nvar * (maxlag - k) : nvar * (maxlag - k + 1),
402        ] = x
403
404    if trim in ("none", "forward"):
405        startobs = 0
406    elif trim in ("backward", "both"):
407        startobs = maxlag
408    else:
409        raise ValueError("trim option not valid")
410
411    if trim in ("none", "backward"):
412        stopobs = len(lm)
413    else:
414        stopobs = nobs
415
416    if is_pandas:
417        x = orig
418        x_columns = x.columns if isinstance(x, DataFrame) else [x.name]
419        columns = [str(col) for col in x_columns]
420        for lag in range(maxlag):
421            lag_str = str(lag + 1)
422            columns.extend([str(col) + ".L." + lag_str for col in x_columns])
423        lm = DataFrame(lm[:stopobs], index=x.index, columns=columns)
424        lags = lm.iloc[startobs:]
425        if original in ("sep", "ex"):
426            leads = lags[x_columns]
427            lags = lags.drop(x_columns, axis=1)
428    else:
429        lags = lm[startobs:stopobs, dropidx:]
430        if original == "sep":
431            leads = lm[startobs:stopobs, :dropidx]
432
433    if original == "sep":
434        return lags, leads
435    else:
436        return lags
437
438
439def lagmat2ds(
440    x, maxlag0, maxlagex=None, dropex=0, trim="forward", use_pandas=False
441):
442    """
443    Generate lagmatrix for 2d array, columns arranged by variables.
444
445    Parameters
446    ----------
447    x : array_like
448        Data, 2d. Observations in rows and variables in columns.
449    maxlag0 : int
450        The first variable all lags from zero to maxlag are included.
451    maxlagex : {None, int}
452        The max lag for all other variables all lags from zero to maxlag are
453        included.
454    dropex : int
455        Exclude first dropex lags from other variables. For all variables,
456        except the first, lags from dropex to maxlagex are included.
457    trim : str
458        The trimming method to use.
459
460        * 'forward' : trim invalid observations in front.
461        * 'backward' : trim invalid initial observations.
462        * 'both' : trim invalid observations on both sides.
463        * 'none' : no trimming of observations.
464    use_pandas : bool
465        If true, returns a DataFrame when the input is a pandas
466        Series or DataFrame.  If false, return numpy ndarrays.
467
468    Returns
469    -------
470    ndarray
471        The array with lagged observations, columns ordered by variable.
472
473    Notes
474    -----
475    Inefficient implementation for unequal lags, implemented for convenience.
476    """
477    maxlag0 = int_like(maxlag0, "maxlag0")
478    maxlagex = int_like(maxlagex, "maxlagex", optional=True)
479    trim = string_like(
480        trim,
481        "trim",
482        optional=True,
483        options=("forward", "backward", "both", "none"),
484    )
485    if maxlagex is None:
486        maxlagex = maxlag0
487    maxlag = max(maxlag0, maxlagex)
488    is_pandas = _is_using_pandas(x, None)
489
490    if x.ndim == 1:
491        if is_pandas:
492            x = pd.DataFrame(x)
493        else:
494            x = x[:, None]
495    elif x.ndim == 0 or x.ndim > 2:
496        raise ValueError("Only supports 1 and 2-dimensional data.")
497
498    nobs, nvar = x.shape
499
500    if is_pandas and use_pandas:
501        lags = lagmat(
502            x.iloc[:, 0], maxlag, trim=trim, original="in", use_pandas=True
503        )
504        lagsli = [lags.iloc[:, : maxlag0 + 1]]
505        for k in range(1, nvar):
506            lags = lagmat(
507                x.iloc[:, k], maxlag, trim=trim, original="in", use_pandas=True
508            )
509            lagsli.append(lags.iloc[:, dropex : maxlagex + 1])
510        return pd.concat(lagsli, axis=1)
511    elif is_pandas:
512        x = np.asanyarray(x)
513
514    lagsli = [
515        lagmat(x[:, 0], maxlag, trim=trim, original="in")[:, : maxlag0 + 1]
516    ]
517    for k in range(1, nvar):
518        lagsli.append(
519            lagmat(x[:, k], maxlag, trim=trim, original="in")[
520                :, dropex : maxlagex + 1
521            ]
522        )
523    return np.column_stack(lagsli)
524
525
526def vec(mat):
527    return mat.ravel("F")
528
529
530def vech(mat):
531    # Gets Fortran-order
532    return mat.T.take(_triu_indices(len(mat)))
533
534
535# tril/triu/diag, suitable for ndarray.take
536
537
538def _tril_indices(n):
539    rows, cols = np.tril_indices(n)
540    return rows * n + cols
541
542
543def _triu_indices(n):
544    rows, cols = np.triu_indices(n)
545    return rows * n + cols
546
547
548def _diag_indices(n):
549    rows, cols = np.diag_indices(n)
550    return rows * n + cols
551
552
553def unvec(v):
554    k = int(np.sqrt(len(v)))
555    assert k * k == len(v)
556    return v.reshape((k, k), order="F")
557
558
559def unvech(v):
560    # quadratic formula, correct fp error
561    rows = 0.5 * (-1 + np.sqrt(1 + 8 * len(v)))
562    rows = int(np.round(rows))
563
564    result = np.zeros((rows, rows))
565    result[np.triu_indices(rows)] = v
566    result = result + result.T
567
568    # divide diagonal elements by 2
569    result[np.diag_indices(rows)] /= 2
570
571    return result
572
573
574def duplication_matrix(n):
575    """
576    Create duplication matrix D_n which satisfies vec(S) = D_n vech(S) for
577    symmetric matrix S
578
579    Returns
580    -------
581    D_n : ndarray
582    """
583    n = int_like(n, "n")
584    tmp = np.eye(n * (n + 1) // 2)
585    return np.array([unvech(x).ravel() for x in tmp]).T
586
587
588def elimination_matrix(n):
589    """
590    Create the elimination matrix L_n which satisfies vech(M) = L_n vec(M) for
591    any matrix M
592
593    Parameters
594    ----------
595
596    Returns
597    -------
598    """
599    n = int_like(n, "n")
600    vech_indices = vec(np.tril(np.ones((n, n))))
601    return np.eye(n * n)[vech_indices != 0]
602
603
604def commutation_matrix(p, q):
605    """
606    Create the commutation matrix K_{p,q} satisfying vec(A') = K_{p,q} vec(A)
607
608    Parameters
609    ----------
610    p : int
611    q : int
612
613    Returns
614    -------
615    K : ndarray (pq x pq)
616    """
617    p = int_like(p, "p")
618    q = int_like(q, "q")
619
620    K = np.eye(p * q)
621    indices = np.arange(p * q).reshape((p, q), order="F")
622    return K.take(indices.ravel(), axis=0)
623
624
625def _ar_transparams(params):
626    """
627    Transforms params to induce stationarity/invertability.
628
629    Parameters
630    ----------
631    params : array_like
632        The AR coefficients
633
634    Reference
635    ---------
636    Jones(1980)
637    """
638    newparams = np.tanh(params / 2)
639    tmp = np.tanh(params / 2)
640    for j in range(1, len(params)):
641        a = newparams[j]
642        for kiter in range(j):
643            tmp[kiter] -= a * newparams[j - kiter - 1]
644        newparams[:j] = tmp[:j]
645    return newparams
646
647
648def _ar_invtransparams(params):
649    """
650    Inverse of the Jones reparameterization
651
652    Parameters
653    ----------
654    params : array_like
655        The transformed AR coefficients
656    """
657    params = params.copy()
658    tmp = params.copy()
659    for j in range(len(params) - 1, 0, -1):
660        a = params[j]
661        for kiter in range(j):
662            tmp[kiter] = (params[kiter] + a * params[j - kiter - 1]) / (
663                1 - a ** 2
664            )
665        params[:j] = tmp[:j]
666    invarcoefs = 2 * np.arctanh(params)
667    return invarcoefs
668
669
670def _ma_transparams(params):
671    """
672    Transforms params to induce stationarity/invertability.
673
674    Parameters
675    ----------
676    params : ndarray
677        The ma coeffecients of an (AR)MA model.
678
679    Reference
680    ---------
681    Jones(1980)
682    """
683    newparams = ((1 - np.exp(-params)) / (1 + np.exp(-params))).copy()
684    tmp = ((1 - np.exp(-params)) / (1 + np.exp(-params))).copy()
685
686    # levinson-durbin to get macf
687    for j in range(1, len(params)):
688        b = newparams[j]
689        for kiter in range(j):
690            tmp[kiter] += b * newparams[j - kiter - 1]
691        newparams[:j] = tmp[:j]
692    return newparams
693
694
695def _ma_invtransparams(macoefs):
696    """
697    Inverse of the Jones reparameterization
698
699    Parameters
700    ----------
701    params : ndarray
702        The transformed MA coefficients
703    """
704    tmp = macoefs.copy()
705    for j in range(len(macoefs) - 1, 0, -1):
706        b = macoefs[j]
707        for kiter in range(j):
708            tmp[kiter] = (macoefs[kiter] - b * macoefs[j - kiter - 1]) / (
709                1 - b ** 2
710            )
711        macoefs[:j] = tmp[:j]
712    invmacoefs = -np.log((1 - macoefs) / (1 + macoefs))
713    return invmacoefs
714
715
716def unintegrate_levels(x, d):
717    """
718    Returns the successive differences needed to unintegrate the series.
719
720    Parameters
721    ----------
722    x : array_like
723        The original series
724    d : int
725        The number of differences of the differenced series.
726
727    Returns
728    -------
729    y : array_like
730        The increasing differences from 0 to d-1 of the first d elements
731        of x.
732
733    See Also
734    --------
735    unintegrate
736    """
737    d = int_like(d, "d")
738    x = x[:d]
739    return np.asarray([np.diff(x, d - i)[0] for i in range(d, 0, -1)])
740
741
742def unintegrate(x, levels):
743    """
744    After taking n-differences of a series, return the original series
745
746    Parameters
747    ----------
748    x : array_like
749        The n-th differenced series
750    levels : list
751        A list of the first-value in each differenced series, for
752        [first-difference, second-difference, ..., n-th difference]
753
754    Returns
755    -------
756    y : array_like
757        The original series de-differenced
758
759    Examples
760    --------
761    >>> x = np.array([1, 3, 9., 19, 8.])
762    >>> levels = unintegrate_levels(x, 2)
763    >>> levels
764    array([ 1.,  2.])
765    >>> unintegrate(np.diff(x, 2), levels)
766    array([  1.,   3.,   9.,  19.,   8.])
767    """
768    levels = list(levels)[:]  # copy
769    if len(levels) > 1:
770        x0 = levels.pop(-1)
771        return unintegrate(np.cumsum(np.r_[x0, x]), levels)
772    x0 = levels[0]
773    return np.cumsum(np.r_[x0, x])
774
775
776def freq_to_period(freq):
777    """
778    Convert a pandas frequency to a periodicity
779
780    Parameters
781    ----------
782    freq : str or offset
783        Frequency to convert
784
785    Returns
786    -------
787    period : int
788        Periodicity of freq
789
790    Notes
791    -----
792    Annual maps to 1, quarterly maps to 4, monthly to 12, weekly to 52.
793    """
794    if not isinstance(freq, offsets.DateOffset):
795        freq = to_offset(freq)  # go ahead and standardize
796    freq = freq.rule_code.upper()
797
798    if freq == "A" or freq.startswith(("A-", "AS-")):
799        return 1
800    elif freq == "Q" or freq.startswith(("Q-", "QS-")):
801        return 4
802    elif freq == "M" or freq.startswith(("M-", "MS")):
803        return 12
804    elif freq == "W" or freq.startswith("W-"):
805        return 52
806    elif freq == "D":
807        return 7
808    elif freq == "B":
809        return 5
810    elif freq == "H":
811        return 24
812    else:  # pragma : no cover
813        raise ValueError(
814            "freq {} not understood. Please report if you "
815            "think this is in error.".format(freq)
816        )
817
818
819def rename_trend(trend: str):
820    if trend == "nc":
821        warnings.warn(
822            "trend 'nc' has been renamed to 'n' after 0.14 is released. Use "
823            "'n' now to avoid this warning.",
824            FutureWarning,
825        )
826        return "n"
827    return trend
828