1"""Internal utilities; not for external use"""
2import contextlib
3import functools
4import io
5import itertools
6import os
7import re
8import sys
9import warnings
10from enum import Enum
11from typing import (
12    TYPE_CHECKING,
13    Any,
14    Callable,
15    Collection,
16    Container,
17    Dict,
18    Hashable,
19    Iterable,
20    Iterator,
21    Mapping,
22    MutableMapping,
23    MutableSet,
24    Optional,
25    Sequence,
26    Tuple,
27    TypeVar,
28    Union,
29    cast,
30)
31
32import numpy as np
33import pandas as pd
34
35K = TypeVar("K")
36V = TypeVar("V")
37T = TypeVar("T")
38
39
40def alias_message(old_name: str, new_name: str) -> str:
41    return f"{old_name} has been deprecated. Use {new_name} instead."
42
43
44def alias_warning(old_name: str, new_name: str, stacklevel: int = 3) -> None:
45    warnings.warn(
46        alias_message(old_name, new_name), FutureWarning, stacklevel=stacklevel
47    )
48
49
50def alias(obj: Callable[..., T], old_name: str) -> Callable[..., T]:
51    assert isinstance(old_name, str)
52
53    @functools.wraps(obj)
54    def wrapper(*args, **kwargs):
55        alias_warning(old_name, obj.__name__)
56        return obj(*args, **kwargs)
57
58    wrapper.__doc__ = alias_message(old_name, obj.__name__)
59    return wrapper
60
61
62def _maybe_cast_to_cftimeindex(index: pd.Index) -> pd.Index:
63    from ..coding.cftimeindex import CFTimeIndex
64
65    if len(index) > 0 and index.dtype == "O":
66        try:
67            return CFTimeIndex(index)
68        except (ImportError, TypeError):
69            return index
70    else:
71        return index
72
73
74def maybe_cast_to_coords_dtype(label, coords_dtype):
75    if coords_dtype.kind == "f" and not isinstance(label, slice):
76        label = np.asarray(label, dtype=coords_dtype)
77    return label
78
79
80def maybe_coerce_to_str(index, original_coords):
81    """maybe coerce a pandas Index back to a nunpy array of type str
82
83    pd.Index uses object-dtype to store str - try to avoid this for coords
84    """
85    from . import dtypes
86
87    try:
88        result_type = dtypes.result_type(*original_coords)
89    except TypeError:
90        pass
91    else:
92        if result_type.kind in "SU":
93            index = np.asarray(index, dtype=result_type.type)
94
95    return index
96
97
98def safe_cast_to_index(array: Any) -> pd.Index:
99    """Given an array, safely cast it to a pandas.Index.
100
101    If it is already a pandas.Index, return it unchanged.
102
103    Unlike pandas.Index, if the array has dtype=object or dtype=timedelta64,
104    this function will not attempt to do automatic type conversion but will
105    always return an index with dtype=object.
106    """
107    if isinstance(array, pd.Index):
108        index = array
109    elif hasattr(array, "to_index"):
110        index = array.to_index()
111    elif hasattr(array, "to_pandas_index"):
112        index = array.to_pandas_index()
113    else:
114        kwargs = {}
115        if hasattr(array, "dtype") and array.dtype.kind == "O":
116            kwargs["dtype"] = object
117        index = pd.Index(np.asarray(array), **kwargs)
118    return _maybe_cast_to_cftimeindex(index)
119
120
121def multiindex_from_product_levels(
122    levels: Sequence[pd.Index], names: Sequence[str] = None
123) -> pd.MultiIndex:
124    """Creating a MultiIndex from a product without refactorizing levels.
125
126    Keeping levels the same gives back the original labels when we unstack.
127
128    Parameters
129    ----------
130    levels : sequence of pd.Index
131        Values for each MultiIndex level.
132    names : sequence of str, optional
133        Names for each level.
134
135    Returns
136    -------
137    pandas.MultiIndex
138    """
139    if any(not isinstance(lev, pd.Index) for lev in levels):
140        raise TypeError("levels must be a list of pd.Index objects")
141
142    split_labels, levels = zip(*[lev.factorize() for lev in levels])
143    labels_mesh = np.meshgrid(*split_labels, indexing="ij")
144    labels = [x.ravel() for x in labels_mesh]
145    return pd.MultiIndex(levels, labels, sortorder=0, names=names)
146
147
148def maybe_wrap_array(original, new_array):
149    """Wrap a transformed array with __array_wrap__ if it can be done safely.
150
151    This lets us treat arbitrary functions that take and return ndarray objects
152    like ufuncs, as long as they return an array with the same shape.
153    """
154    # in case func lost array's metadata
155    if isinstance(new_array, np.ndarray) and new_array.shape == original.shape:
156        return original.__array_wrap__(new_array)
157    else:
158        return new_array
159
160
161def equivalent(first: T, second: T) -> bool:
162    """Compare two objects for equivalence (identity or equality), using
163    array_equiv if either object is an ndarray. If both objects are lists,
164    equivalent is sequentially called on all the elements.
165    """
166    # TODO: refactor to avoid circular import
167    from . import duck_array_ops
168
169    if isinstance(first, np.ndarray) or isinstance(second, np.ndarray):
170        return duck_array_ops.array_equiv(first, second)
171    elif isinstance(first, list) or isinstance(second, list):
172        return list_equiv(first, second)
173    else:
174        return (
175            (first is second)
176            or (first == second)
177            or (pd.isnull(first) and pd.isnull(second))
178        )
179
180
181def list_equiv(first, second):
182    equiv = True
183    if len(first) != len(second):
184        return False
185    else:
186        for f, s in zip(first, second):
187            equiv = equiv and equivalent(f, s)
188    return equiv
189
190
191def peek_at(iterable: Iterable[T]) -> Tuple[T, Iterator[T]]:
192    """Returns the first value from iterable, as well as a new iterator with
193    the same content as the original iterable
194    """
195    gen = iter(iterable)
196    peek = next(gen)
197    return peek, itertools.chain([peek], gen)
198
199
200def update_safety_check(
201    first_dict: Mapping[K, V],
202    second_dict: Mapping[K, V],
203    compat: Callable[[V, V], bool] = equivalent,
204) -> None:
205    """Check the safety of updating one dictionary with another.
206
207    Raises ValueError if dictionaries have non-compatible values for any key,
208    where compatibility is determined by identity (they are the same item) or
209    the `compat` function.
210
211    Parameters
212    ----------
213    first_dict, second_dict : dict-like
214        All items in the second dictionary are checked against for conflicts
215        against items in the first dictionary.
216    compat : function, optional
217        Binary operator to determine if two values are compatible. By default,
218        checks for equivalence.
219    """
220    for k, v in second_dict.items():
221        if k in first_dict and not compat(v, first_dict[k]):
222            raise ValueError(
223                "unsafe to merge dictionaries without "
224                f"overriding values; conflicting key {k!r}"
225            )
226
227
228def remove_incompatible_items(
229    first_dict: MutableMapping[K, V],
230    second_dict: Mapping[K, V],
231    compat: Callable[[V, V], bool] = equivalent,
232) -> None:
233    """Remove incompatible items from the first dictionary in-place.
234
235    Items are retained if their keys are found in both dictionaries and the
236    values are compatible.
237
238    Parameters
239    ----------
240    first_dict, second_dict : dict-like
241        Mappings to merge.
242    compat : function, optional
243        Binary operator to determine if two values are compatible. By default,
244        checks for equivalence.
245    """
246    for k in list(first_dict):
247        if k not in second_dict or not compat(first_dict[k], second_dict[k]):
248            del first_dict[k]
249
250
251def is_dict_like(value: Any) -> bool:
252    return hasattr(value, "keys") and hasattr(value, "__getitem__")
253
254
255def is_full_slice(value: Any) -> bool:
256    return isinstance(value, slice) and value == slice(None)
257
258
259def is_list_like(value: Any) -> bool:
260    return isinstance(value, (list, tuple))
261
262
263def is_duck_array(value: Any) -> bool:
264    if isinstance(value, np.ndarray):
265        return True
266    return (
267        hasattr(value, "ndim")
268        and hasattr(value, "shape")
269        and hasattr(value, "dtype")
270        and hasattr(value, "__array_function__")
271        and hasattr(value, "__array_ufunc__")
272    )
273
274
275def either_dict_or_kwargs(
276    pos_kwargs: Optional[Mapping[Any, T]],
277    kw_kwargs: Mapping[str, T],
278    func_name: str,
279) -> Mapping[Hashable, T]:
280    if pos_kwargs is None:
281        # Need an explicit cast to appease mypy due to invariance; see
282        # https://github.com/python/mypy/issues/6228
283        return cast(Mapping[Hashable, T], kw_kwargs)
284
285    if not is_dict_like(pos_kwargs):
286        raise ValueError(f"the first argument to .{func_name} must be a dictionary")
287    if kw_kwargs:
288        raise ValueError(
289            f"cannot specify both keyword and positional arguments to .{func_name}"
290        )
291    return pos_kwargs
292
293
294def _is_scalar(value, include_0d):
295    from .variable import NON_NUMPY_SUPPORTED_ARRAY_TYPES
296
297    if include_0d:
298        include_0d = getattr(value, "ndim", None) == 0
299    return (
300        include_0d
301        or isinstance(value, (str, bytes))
302        or not (
303            isinstance(value, (Iterable,) + NON_NUMPY_SUPPORTED_ARRAY_TYPES)
304            or hasattr(value, "__array_function__")
305        )
306    )
307
308
309# See GH5624, this is a convoluted way to allow type-checking to use `TypeGuard` without
310# requiring typing_extensions as a required dependency to _run_ the code (it is required
311# to type-check).
312try:
313    if sys.version_info >= (3, 10):
314        from typing import TypeGuard
315    else:
316        from typing_extensions import TypeGuard
317except ImportError:
318    if TYPE_CHECKING:
319        raise
320    else:
321
322        def is_scalar(value: Any, include_0d: bool = True) -> bool:
323            """Whether to treat a value as a scalar.
324
325            Any non-iterable, string, or 0-D array
326            """
327            return _is_scalar(value, include_0d)
328
329
330else:
331
332    def is_scalar(value: Any, include_0d: bool = True) -> TypeGuard[Hashable]:
333        """Whether to treat a value as a scalar.
334
335        Any non-iterable, string, or 0-D array
336        """
337        return _is_scalar(value, include_0d)
338
339
340def is_valid_numpy_dtype(dtype: Any) -> bool:
341    try:
342        np.dtype(dtype)
343    except (TypeError, ValueError):
344        return False
345    else:
346        return True
347
348
349def to_0d_object_array(value: Any) -> np.ndarray:
350    """Given a value, wrap it in a 0-D numpy.ndarray with dtype=object."""
351    result = np.empty((), dtype=object)
352    result[()] = value
353    return result
354
355
356def to_0d_array(value: Any) -> np.ndarray:
357    """Given a value, wrap it in a 0-D numpy.ndarray."""
358    if np.isscalar(value) or (isinstance(value, np.ndarray) and value.ndim == 0):
359        return np.array(value)
360    else:
361        return to_0d_object_array(value)
362
363
364def dict_equiv(
365    first: Mapping[K, V],
366    second: Mapping[K, V],
367    compat: Callable[[V, V], bool] = equivalent,
368) -> bool:
369    """Test equivalence of two dict-like objects. If any of the values are
370    numpy arrays, compare them correctly.
371
372    Parameters
373    ----------
374    first, second : dict-like
375        Dictionaries to compare for equality
376    compat : function, optional
377        Binary operator to determine if two values are compatible. By default,
378        checks for equivalence.
379
380    Returns
381    -------
382    equals : bool
383        True if the dictionaries are equal
384    """
385    for k in first:
386        if k not in second or not compat(first[k], second[k]):
387            return False
388    return all(k in first for k in second)
389
390
391def compat_dict_intersection(
392    first_dict: Mapping[K, V],
393    second_dict: Mapping[K, V],
394    compat: Callable[[V, V], bool] = equivalent,
395) -> MutableMapping[K, V]:
396    """Return the intersection of two dictionaries as a new dictionary.
397
398    Items are retained if their keys are found in both dictionaries and the
399    values are compatible.
400
401    Parameters
402    ----------
403    first_dict, second_dict : dict-like
404        Mappings to merge.
405    compat : function, optional
406        Binary operator to determine if two values are compatible. By default,
407        checks for equivalence.
408
409    Returns
410    -------
411    intersection : dict
412        Intersection of the contents.
413    """
414    new_dict = dict(first_dict)
415    remove_incompatible_items(new_dict, second_dict, compat)
416    return new_dict
417
418
419def compat_dict_union(
420    first_dict: Mapping[K, V],
421    second_dict: Mapping[K, V],
422    compat: Callable[[V, V], bool] = equivalent,
423) -> MutableMapping[K, V]:
424    """Return the union of two dictionaries as a new dictionary.
425
426    An exception is raised if any keys are found in both dictionaries and the
427    values are not compatible.
428
429    Parameters
430    ----------
431    first_dict, second_dict : dict-like
432        Mappings to merge.
433    compat : function, optional
434        Binary operator to determine if two values are compatible. By default,
435        checks for equivalence.
436
437    Returns
438    -------
439    union : dict
440        union of the contents.
441    """
442    new_dict = dict(first_dict)
443    update_safety_check(first_dict, second_dict, compat)
444    new_dict.update(second_dict)
445    return new_dict
446
447
448class Frozen(Mapping[K, V]):
449    """Wrapper around an object implementing the mapping interface to make it
450    immutable. If you really want to modify the mapping, the mutable version is
451    saved under the `mapping` attribute.
452    """
453
454    __slots__ = ("mapping",)
455
456    def __init__(self, mapping: Mapping[K, V]):
457        self.mapping = mapping
458
459    def __getitem__(self, key: K) -> V:
460        return self.mapping[key]
461
462    def __iter__(self) -> Iterator[K]:
463        return iter(self.mapping)
464
465    def __len__(self) -> int:
466        return len(self.mapping)
467
468    def __contains__(self, key: object) -> bool:
469        return key in self.mapping
470
471    def __repr__(self) -> str:
472        return "{}({!r})".format(type(self).__name__, self.mapping)
473
474
475def FrozenDict(*args, **kwargs) -> Frozen:
476    return Frozen(dict(*args, **kwargs))
477
478
479class HybridMappingProxy(Mapping[K, V]):
480    """Implements the Mapping interface. Uses the wrapped mapping for item lookup
481    and a separate wrapped keys collection for iteration.
482
483    Can be used to construct a mapping object from another dict-like object without
484    eagerly accessing its items or when a mapping object is expected but only
485    iteration over keys is actually used.
486
487    Note: HybridMappingProxy does not validate consistency of the provided `keys`
488    and `mapping`. It is the caller's responsibility to ensure that they are
489    suitable for the task at hand.
490    """
491
492    __slots__ = ("_keys", "mapping")
493
494    def __init__(self, keys: Collection[K], mapping: Mapping[K, V]):
495        self._keys = keys
496        self.mapping = mapping
497
498    def __getitem__(self, key: K) -> V:
499        return self.mapping[key]
500
501    def __iter__(self) -> Iterator[K]:
502        return iter(self._keys)
503
504    def __len__(self) -> int:
505        return len(self._keys)
506
507
508class OrderedSet(MutableSet[T]):
509    """A simple ordered set.
510
511    The API matches the builtin set, but it preserves insertion order of elements, like
512    a dict. Note that, unlike in an OrderedDict, equality tests are not order-sensitive.
513    """
514
515    _d: Dict[T, None]
516
517    __slots__ = ("_d",)
518
519    def __init__(self, values: Iterable[T] = None):
520        self._d = {}
521        if values is not None:
522            self.update(values)
523
524    # Required methods for MutableSet
525
526    def __contains__(self, value: Hashable) -> bool:
527        return value in self._d
528
529    def __iter__(self) -> Iterator[T]:
530        return iter(self._d)
531
532    def __len__(self) -> int:
533        return len(self._d)
534
535    def add(self, value: T) -> None:
536        self._d[value] = None
537
538    def discard(self, value: T) -> None:
539        del self._d[value]
540
541    # Additional methods
542
543    def update(self, values: Iterable[T]) -> None:
544        for v in values:
545            self._d[v] = None
546
547    def __repr__(self) -> str:
548        return "{}({!r})".format(type(self).__name__, list(self))
549
550
551class NdimSizeLenMixin:
552    """Mixin class that extends a class that defines a ``shape`` property to
553    one that also defines ``ndim``, ``size`` and ``__len__``.
554    """
555
556    __slots__ = ()
557
558    @property
559    def ndim(self: Any) -> int:
560        return len(self.shape)
561
562    @property
563    def size(self: Any) -> int:
564        # cast to int so that shape = () gives size = 1
565        return int(np.prod(self.shape))
566
567    def __len__(self: Any) -> int:
568        try:
569            return self.shape[0]
570        except IndexError:
571            raise TypeError("len() of unsized object")
572
573
574class NDArrayMixin(NdimSizeLenMixin):
575    """Mixin class for making wrappers of N-dimensional arrays that conform to
576    the ndarray interface required for the data argument to Variable objects.
577
578    A subclass should set the `array` property and override one or more of
579    `dtype`, `shape` and `__getitem__`.
580    """
581
582    __slots__ = ()
583
584    @property
585    def dtype(self: Any) -> np.dtype:
586        return self.array.dtype
587
588    @property
589    def shape(self: Any) -> Tuple[int]:
590        return self.array.shape
591
592    def __getitem__(self: Any, key):
593        return self.array[key]
594
595    def __repr__(self: Any) -> str:
596        return "{}(array={!r})".format(type(self).__name__, self.array)
597
598
599class ReprObject:
600    """Object that prints as the given value, for use with sentinel values."""
601
602    __slots__ = ("_value",)
603
604    def __init__(self, value: str):
605        self._value = value
606
607    def __repr__(self) -> str:
608        return self._value
609
610    def __eq__(self, other) -> bool:
611        if isinstance(other, ReprObject):
612            return self._value == other._value
613        return False
614
615    def __hash__(self) -> int:
616        return hash((type(self), self._value))
617
618    def __dask_tokenize__(self):
619        from dask.base import normalize_token
620
621        return normalize_token((type(self), self._value))
622
623
624@contextlib.contextmanager
625def close_on_error(f):
626    """Context manager to ensure that a file opened by xarray is closed if an
627    exception is raised before the user sees the file object.
628    """
629    try:
630        yield
631    except Exception:
632        f.close()
633        raise
634
635
636def is_remote_uri(path: str) -> bool:
637    """Finds URLs of the form protocol:// or protocol::
638
639    This also matches for http[s]://, which were the only remote URLs
640    supported in <=v0.16.2.
641    """
642    return bool(re.search(r"^[a-z][a-z0-9]*(\://|\:\:)", path))
643
644
645def read_magic_number_from_file(filename_or_obj, count=8) -> bytes:
646    # check byte header to determine file type
647    if isinstance(filename_or_obj, bytes):
648        magic_number = filename_or_obj[:count]
649    elif isinstance(filename_or_obj, io.IOBase):
650        if filename_or_obj.tell() != 0:
651            raise ValueError(
652                "cannot guess the engine, "
653                "file-like object read/write pointer not at the start of the file, "
654                "please close and reopen, or use a context manager"
655            )
656        magic_number = filename_or_obj.read(count)  # type: ignore
657        filename_or_obj.seek(0)
658    else:
659        raise TypeError(f"cannot read the magic number form {type(filename_or_obj)}")
660    return magic_number
661
662
663def try_read_magic_number_from_path(pathlike, count=8) -> Optional[bytes]:
664    if isinstance(pathlike, str) or hasattr(pathlike, "__fspath__"):
665        path = os.fspath(pathlike)
666        try:
667            with open(path, "rb") as f:
668                return read_magic_number_from_file(f, count)
669        except (FileNotFoundError, TypeError):
670            pass
671    return None
672
673
674def try_read_magic_number_from_file_or_path(
675    filename_or_obj, count=8
676) -> Optional[bytes]:
677    magic_number = try_read_magic_number_from_path(filename_or_obj, count)
678    if magic_number is None:
679        try:
680            magic_number = read_magic_number_from_file(filename_or_obj, count)
681        except TypeError:
682            pass
683    return magic_number
684
685
686def is_uniform_spaced(arr, **kwargs) -> bool:
687    """Return True if values of an array are uniformly spaced and sorted.
688
689    >>> is_uniform_spaced(range(5))
690    True
691    >>> is_uniform_spaced([-4, 0, 100])
692    False
693
694    kwargs are additional arguments to ``np.isclose``
695    """
696    arr = np.array(arr, dtype=float)
697    diffs = np.diff(arr)
698    return bool(np.isclose(diffs.min(), diffs.max(), **kwargs))
699
700
701def hashable(v: Any) -> bool:
702    """Determine whether `v` can be hashed."""
703    try:
704        hash(v)
705    except TypeError:
706        return False
707    return True
708
709
710def decode_numpy_dict_values(attrs: Mapping[K, V]) -> Dict[K, V]:
711    """Convert attribute values from numpy objects to native Python objects,
712    for use in to_dict
713    """
714    attrs = dict(attrs)
715    for k, v in attrs.items():
716        if isinstance(v, np.ndarray):
717            attrs[k] = v.tolist()
718        elif isinstance(v, np.generic):
719            attrs[k] = v.item()
720    return attrs
721
722
723def ensure_us_time_resolution(val):
724    """Convert val out of numpy time, for use in to_dict.
725    Needed because of numpy bug GH#7619"""
726    if np.issubdtype(val.dtype, np.datetime64):
727        val = val.astype("datetime64[us]")
728    elif np.issubdtype(val.dtype, np.timedelta64):
729        val = val.astype("timedelta64[us]")
730    return val
731
732
733class HiddenKeyDict(MutableMapping[K, V]):
734    """Acts like a normal dictionary, but hides certain keys."""
735
736    __slots__ = ("_data", "_hidden_keys")
737
738    # ``__init__`` method required to create instance from class.
739
740    def __init__(self, data: MutableMapping[K, V], hidden_keys: Iterable[K]):
741        self._data = data
742        self._hidden_keys = frozenset(hidden_keys)
743
744    def _raise_if_hidden(self, key: K) -> None:
745        if key in self._hidden_keys:
746            raise KeyError(f"Key `{key!r}` is hidden.")
747
748    # The next five methods are requirements of the ABC.
749    def __setitem__(self, key: K, value: V) -> None:
750        self._raise_if_hidden(key)
751        self._data[key] = value
752
753    def __getitem__(self, key: K) -> V:
754        self._raise_if_hidden(key)
755        return self._data[key]
756
757    def __delitem__(self, key: K) -> None:
758        self._raise_if_hidden(key)
759        del self._data[key]
760
761    def __iter__(self) -> Iterator[K]:
762        for k in self._data:
763            if k not in self._hidden_keys:
764                yield k
765
766    def __len__(self) -> int:
767        num_hidden = len(self._hidden_keys & self._data.keys())
768        return len(self._data) - num_hidden
769
770
771def infix_dims(
772    dims_supplied: Collection, dims_all: Collection, missing_dims: str = "raise"
773) -> Iterator:
774    """
775    Resolves a supplied list containing an ellipsis representing other items, to
776    a generator with the 'realized' list of all items
777    """
778    if ... in dims_supplied:
779        if len(set(dims_all)) != len(dims_all):
780            raise ValueError("Cannot use ellipsis with repeated dims")
781        if list(dims_supplied).count(...) > 1:
782            raise ValueError("More than one ellipsis supplied")
783        other_dims = [d for d in dims_all if d not in dims_supplied]
784        existing_dims = drop_missing_dims(dims_supplied, dims_all, missing_dims)
785        for d in existing_dims:
786            if d is ...:
787                yield from other_dims
788            else:
789                yield d
790    else:
791        existing_dims = drop_missing_dims(dims_supplied, dims_all, missing_dims)
792        if set(existing_dims) ^ set(dims_all):
793            raise ValueError(
794                f"{dims_supplied} must be a permuted list of {dims_all}, unless `...` is included"
795            )
796        yield from existing_dims
797
798
799def get_temp_dimname(dims: Container[Hashable], new_dim: Hashable) -> Hashable:
800    """Get an new dimension name based on new_dim, that is not used in dims.
801    If the same name exists, we add an underscore(s) in the head.
802
803    Example1:
804        dims: ['a', 'b', 'c']
805        new_dim: ['_rolling']
806        -> ['_rolling']
807    Example2:
808        dims: ['a', 'b', 'c', '_rolling']
809        new_dim: ['_rolling']
810        -> ['__rolling']
811    """
812    while new_dim in dims:
813        new_dim = "_" + str(new_dim)
814    return new_dim
815
816
817def drop_dims_from_indexers(
818    indexers: Mapping[Any, Any],
819    dims: Union[list, Mapping[Any, int]],
820    missing_dims: str,
821) -> Mapping[Hashable, Any]:
822    """Depending on the setting of missing_dims, drop any dimensions from indexers that
823    are not present in dims.
824
825    Parameters
826    ----------
827    indexers : dict
828    dims : sequence
829    missing_dims : {"raise", "warn", "ignore"}
830    """
831
832    if missing_dims == "raise":
833        invalid = indexers.keys() - set(dims)
834        if invalid:
835            raise ValueError(
836                f"Dimensions {invalid} do not exist. Expected one or more of {dims}"
837            )
838
839        return indexers
840
841    elif missing_dims == "warn":
842
843        # don't modify input
844        indexers = dict(indexers)
845
846        invalid = indexers.keys() - set(dims)
847        if invalid:
848            warnings.warn(
849                f"Dimensions {invalid} do not exist. Expected one or more of {dims}"
850            )
851        for key in invalid:
852            indexers.pop(key)
853
854        return indexers
855
856    elif missing_dims == "ignore":
857        return {key: val for key, val in indexers.items() if key in dims}
858
859    else:
860        raise ValueError(
861            f"Unrecognised option {missing_dims} for missing_dims argument"
862        )
863
864
865def drop_missing_dims(
866    supplied_dims: Collection, dims: Collection, missing_dims: str
867) -> Collection:
868    """Depending on the setting of missing_dims, drop any dimensions from supplied_dims that
869    are not present in dims.
870
871    Parameters
872    ----------
873    supplied_dims : dict
874    dims : sequence
875    missing_dims : {"raise", "warn", "ignore"}
876    """
877
878    if missing_dims == "raise":
879        supplied_dims_set = {val for val in supplied_dims if val is not ...}
880        invalid = supplied_dims_set - set(dims)
881        if invalid:
882            raise ValueError(
883                f"Dimensions {invalid} do not exist. Expected one or more of {dims}"
884            )
885
886        return supplied_dims
887
888    elif missing_dims == "warn":
889
890        invalid = set(supplied_dims) - set(dims)
891        if invalid:
892            warnings.warn(
893                f"Dimensions {invalid} do not exist. Expected one or more of {dims}"
894            )
895
896        return [val for val in supplied_dims if val in dims or val is ...]
897
898    elif missing_dims == "ignore":
899        return [val for val in supplied_dims if val in dims or val is ...]
900
901    else:
902        raise ValueError(
903            f"Unrecognised option {missing_dims} for missing_dims argument"
904        )
905
906
907class UncachedAccessor:
908    """Acts like a property, but on both classes and class instances
909
910    This class is necessary because some tools (e.g. pydoc and sphinx)
911    inspect classes for which property returns itself and not the
912    accessor.
913    """
914
915    def __init__(self, accessor):
916        self._accessor = accessor
917
918    def __get__(self, obj, cls):
919        if obj is None:
920            return self._accessor
921
922        return self._accessor(obj)
923
924
925# Singleton type, as per https://github.com/python/typing/pull/240
926class Default(Enum):
927    token = 0
928
929
930_default = Default.token
931
932
933def iterate_nested(nested_list):
934    for item in nested_list:
935        if isinstance(item, list):
936            yield from iterate_nested(item)
937        else:
938            yield item
939