1"""
2This class is defined to override standard pickle functionality
3
4The goals of it follow:
5-Serialize lambdas and nested functions to compiled byte code
6-Deal with main module correctly
7-Deal with other non-serializable objects
8
9It does not include an unpickler, as standard python unpickling suffices.
10
11This module was extracted from the `cloud` package, developed by `PiCloud, Inc.
12<https://web.archive.org/web/20140626004012/http://www.picloud.com/>`_.
13
14Copyright (c) 2012, Regents of the University of California.
15Copyright (c) 2009 `PiCloud, Inc. <https://web.archive.org/web/20140626004012/http://www.picloud.com/>`_.
16All rights reserved.
17
18Redistribution and use in source and binary forms, with or without
19modification, are permitted provided that the following conditions
20are met:
21    * Redistributions of source code must retain the above copyright
22      notice, this list of conditions and the following disclaimer.
23    * Redistributions in binary form must reproduce the above copyright
24      notice, this list of conditions and the following disclaimer in the
25      documentation and/or other materials provided with the distribution.
26    * Neither the name of the University of California, Berkeley nor the
27      names of its contributors may be used to endorse or promote
28      products derived from this software without specific prior written
29      permission.
30
31THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
37TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
38PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
39LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
40NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
41SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42"""
43from __future__ import print_function
44
45import dis
46import io
47import itertools
48import logging
49import opcode
50import operator
51import pickle
52import platform
53import struct
54import sys
55import types
56import weakref
57import uuid
58import threading
59
60
61try:
62    from enum import Enum
63except ImportError:
64    Enum = None
65
66# cloudpickle is meant for inter process communication: we expect all
67# communicating processes to run the same Python version hence we favor
68# communication speed over compatibility:
69DEFAULT_PROTOCOL = pickle.HIGHEST_PROTOCOL
70
71# Track the provenance of reconstructed dynamic classes to make it possible to
72# recontruct instances from the matching singleton class definition when
73# appropriate and preserve the usual "isinstance" semantics of Python objects.
74_DYNAMIC_CLASS_TRACKER_BY_CLASS = weakref.WeakKeyDictionary()
75_DYNAMIC_CLASS_TRACKER_BY_ID = weakref.WeakValueDictionary()
76_DYNAMIC_CLASS_TRACKER_LOCK = threading.Lock()
77
78PYPY = platform.python_implementation() == "PyPy"
79
80builtin_code_type = None
81if PYPY:
82    # builtin-code objects only exist in pypy
83    builtin_code_type = type(float.__new__.__code__)
84
85if sys.version_info[0] < 3:  # pragma: no branch
86    from pickle import Pickler
87
88    try:
89        from cStringIO import StringIO
90    except ImportError:
91        from StringIO import StringIO
92    string_types = (basestring,)  # noqa
93    PY3 = False
94    PY2 = True
95else:
96    types.ClassType = type
97    from pickle import _Pickler as Pickler
98    from io import BytesIO as StringIO
99
100    string_types = (str,)
101    PY3 = True
102    PY2 = False
103    from importlib._bootstrap import _find_spec
104
105_extract_code_globals_cache = weakref.WeakKeyDictionary()
106
107
108def _ensure_tracking(class_def):
109    with _DYNAMIC_CLASS_TRACKER_LOCK:
110        class_tracker_id = _DYNAMIC_CLASS_TRACKER_BY_CLASS.get(class_def)
111        if class_tracker_id is None:
112            class_tracker_id = uuid.uuid4().hex
113            _DYNAMIC_CLASS_TRACKER_BY_CLASS[class_def] = class_tracker_id
114            _DYNAMIC_CLASS_TRACKER_BY_ID[class_tracker_id] = class_def
115    return class_tracker_id
116
117
118def _lookup_class_or_track(class_tracker_id, class_def):
119    if class_tracker_id is not None:
120        with _DYNAMIC_CLASS_TRACKER_LOCK:
121            class_def = _DYNAMIC_CLASS_TRACKER_BY_ID.setdefault(
122                class_tracker_id, class_def
123            )
124            _DYNAMIC_CLASS_TRACKER_BY_CLASS[class_def] = class_tracker_id
125    return class_def
126
127
128if sys.version_info[:2] >= (3, 5):
129    from pickle import _getattribute
130elif sys.version_info[:2] >= (3, 4):
131    from pickle import _getattribute as _py34_getattribute
132
133    #  pickle._getattribute does not return the parent under Python 3.4
134    def _getattribute(obj, name):
135        return _py34_getattribute(obj, name), None
136
137
138else:
139    # pickle._getattribute is a python3 addition and enchancement of getattr,
140    # that can handle dotted attribute names. In cloudpickle for python2,
141    # handling dotted names is not needed, so we simply define _getattribute as
142    # a wrapper around getattr.
143    def _getattribute(obj, name):
144        return getattr(obj, name, None), None
145
146
147def _whichmodule(obj, name):
148    """Find the module an object belongs to.
149
150    This function differs from ``pickle.whichmodule`` in two ways:
151    - it does not mangle the cases where obj's module is __main__ and obj was
152      not found in any module.
153    - Errors arising during module introspection are ignored, as those errors
154      are considered unwanted side effects.
155    """
156    module_name = getattr(obj, "__module__", None)
157    if module_name is not None:
158        return module_name
159    # Protect the iteration by using a list copy of sys.modules against dynamic
160    # modules that trigger imports of other modules upon calls to getattr.
161    for module_name, module in list(sys.modules.items()):
162        if module_name == "__main__" or module is None:
163            continue
164        try:
165            if _getattribute(module, name)[0] is obj:
166                return module_name
167        except Exception:
168            pass
169    return None
170
171
172def _is_global(obj, name=None):
173    """Determine if obj can be pickled as attribute of a file-backed module"""
174    if name is None:
175        name = getattr(obj, "__qualname__", None)
176    if name is None:
177        name = getattr(obj, "__name__", None)
178
179    module_name = _whichmodule(obj, name)
180
181    if module_name is None:
182        # In this case, obj.__module__ is None AND obj was not found in any
183        # imported module. obj is thus treated as dynamic.
184        return False
185
186    if module_name == "__main__":
187        return False
188
189    module = sys.modules.get(module_name, None)
190    if module is None:
191        # The main reason why obj's module would not be imported is that this
192        # module has been dynamically created, using for example
193        # types.ModuleType. The other possibility is that module was removed
194        # from sys.modules after obj was created/imported. But this case is not
195        # supported, as the standard pickle does not support it either.
196        return False
197
198    # module has been added to sys.modules, but it can still be dynamic.
199    if _is_dynamic(module):
200        return False
201
202    try:
203        obj2, parent = _getattribute(module, name)
204    except AttributeError:
205        # obj was not found inside the module it points to
206        return False
207    return obj2 is obj
208
209
210def _extract_code_globals(co):
211    """
212    Find all globals names read or written to by codeblock co
213    """
214    out_names = _extract_code_globals_cache.get(co)
215    if out_names is None:
216        names = co.co_names
217        out_names = {names[oparg] for _, oparg in _walk_global_ops(co)}
218
219        # Declaring a function inside another one using the "def ..."
220        # syntax generates a constant code object corresonding to the one
221        # of the nested function's As the nested function may itself need
222        # global variables, we need to introspect its code, extract its
223        # globals, (look for code object in it's co_consts attribute..) and
224        # add the result to code_globals
225        if co.co_consts:
226            for const in co.co_consts:
227                if isinstance(const, types.CodeType):
228                    out_names |= _extract_code_globals(const)
229
230        _extract_code_globals_cache[co] = out_names
231
232    return out_names
233
234
235def _find_imported_submodules(code, top_level_dependencies):
236    """
237    Find currently imported submodules used by a function.
238
239    Submodules used by a function need to be detected and referenced for the
240    function to work correctly at depickling time. Because submodules can be
241    referenced as attribute of their parent package (``package.submodule``), we
242    need a special introspection technique that does not rely on GLOBAL-related
243    opcodes to find references of them in a code object.
244
245    Example:
246    ```
247    import concurrent.futures
248    import cloudpickle
249    def func():
250        x = concurrent.futures.ThreadPoolExecutor
251    if __name__ == '__main__':
252        cloudpickle.dumps(func)
253    ```
254    The globals extracted by cloudpickle in the function's state include the
255    concurrent package, but not its submodule (here, concurrent.futures), which
256    is the module used by func. Find_imported_submodules will detect the usage
257    of concurrent.futures. Saving this module alongside with func will ensure
258    that calling func once depickled does not fail due to concurrent.futures
259    not being imported
260    """
261
262    subimports = []
263    # check if any known dependency is an imported package
264    for x in top_level_dependencies:
265        if (
266            isinstance(x, types.ModuleType)
267            and hasattr(x, "__package__")
268            and x.__package__
269        ):
270            # check if the package has any currently loaded sub-imports
271            prefix = x.__name__ + "."
272            # A concurrent thread could mutate sys.modules,
273            # make sure we iterate over a copy to avoid exceptions
274            for name in list(sys.modules):
275                # Older versions of pytest will add a "None" module to
276                # sys.modules.
277                if name is not None and name.startswith(prefix):
278                    # check whether the function can address the sub-module
279                    tokens = set(name[len(prefix) :].split("."))
280                    if not tokens - set(code.co_names):
281                        subimports.append(sys.modules[name])
282    return subimports
283
284
285def cell_set(cell, value):
286    """Set the value of a closure cell.
287
288    The point of this function is to set the cell_contents attribute of a cell
289    after its creation. This operation is necessary in case the cell contains a
290    reference to the function the cell belongs to, as when calling the
291    function's constructor
292    ``f = types.FunctionType(code, globals, name, argdefs, closure)``,
293    closure will not be able to contain the yet-to-be-created f.
294
295    In Python3.7, cell_contents is writeable, so setting the contents of a cell
296    can be done simply using
297    >>> cell.cell_contents = value
298
299    In earlier Python3 versions, the cell_contents attribute of a cell is read
300    only, but this limitation can be worked around by leveraging the Python 3
301    ``nonlocal`` keyword.
302
303    In Python2 however, this attribute is read only, and there is no
304    ``nonlocal`` keyword. For this reason, we need to come up with more
305    complicated hacks to set this attribute.
306
307    The chosen approach is to create a function with a STORE_DEREF opcode,
308    which sets the content of a closure variable. Typically:
309
310    >>> def inner(value):
311    ...     lambda: cell  # the lambda makes cell a closure
312    ...     cell = value  # cell is a closure, so this triggers a STORE_DEREF
313
314    (Note that in Python2, A STORE_DEREF can never be triggered from an inner
315    function. The function g for example here
316    >>> def f(var):
317    ...     def g():
318    ...         var += 1
319    ...     return g
320
321    will not modify the closure variable ``var```inplace, but instead try to
322    load a local variable var and increment it. As g does not assign the local
323    variable ``var`` any initial value, calling f(1)() will fail at runtime.)
324
325    Our objective is to set the value of a given cell ``cell``. So we need to
326    somewhat reference our ``cell`` object into the ``inner`` function so that
327    this object (and not the smoke cell of the lambda function) gets affected
328    by the STORE_DEREF operation.
329
330    In inner, ``cell`` is referenced as a cell variable (an enclosing variable
331    that is referenced by the inner function). If we create a new function
332    cell_set with the exact same code as ``inner``, but with ``cell`` marked as
333    a free variable instead, the STORE_DEREF will be applied on its closure -
334    ``cell``, which we can specify explicitly during construction! The new
335    cell_set variable thus actually sets the contents of a specified cell!
336
337    Note: we do not make use of the ``nonlocal`` keyword to set the contents of
338    a cell in early python3 versions to limit possible syntax errors in case
339    test and checker libraries decide to parse the whole file.
340    """
341
342    if sys.version_info[:2] >= (3, 7):  # pragma: no branch
343        cell.cell_contents = value
344    else:
345        _cell_set = types.FunctionType(
346            _cell_set_template_code, {}, "_cell_set", (), (cell,)
347        )
348        _cell_set(value)
349
350
351def _make_cell_set_template_code():
352    def _cell_set_factory(value):
353        lambda: cell
354        cell = value
355
356    co = _cell_set_factory.__code__
357
358    if PY2:  # pragma: no branch
359        _cell_set_template_code = types.CodeType(
360            co.co_argcount,
361            co.co_nlocals,
362            co.co_stacksize,
363            co.co_flags,
364            co.co_code,
365            co.co_consts,
366            co.co_names,
367            co.co_varnames,
368            co.co_filename,
369            co.co_name,
370            co.co_firstlineno,
371            co.co_lnotab,
372            co.co_cellvars,  # co_freevars is initialized with co_cellvars
373            (),  # co_cellvars is made empty
374        )
375    else:
376        _cell_set_template_code = types.CodeType(
377            co.co_argcount,
378            co.co_kwonlyargcount,  # Python 3 only argument
379            co.co_nlocals,
380            co.co_stacksize,
381            co.co_flags,
382            co.co_code,
383            co.co_consts,
384            co.co_names,
385            co.co_varnames,
386            co.co_filename,
387            co.co_name,
388            co.co_firstlineno,
389            co.co_lnotab,
390            co.co_cellvars,  # co_freevars is initialized with co_cellvars
391            (),  # co_cellvars is made empty
392        )
393    return _cell_set_template_code
394
395
396if sys.version_info[:2] < (3, 7):
397    _cell_set_template_code = _make_cell_set_template_code()
398
399# relevant opcodes
400STORE_GLOBAL = opcode.opmap["STORE_GLOBAL"]
401DELETE_GLOBAL = opcode.opmap["DELETE_GLOBAL"]
402LOAD_GLOBAL = opcode.opmap["LOAD_GLOBAL"]
403GLOBAL_OPS = (STORE_GLOBAL, DELETE_GLOBAL, LOAD_GLOBAL)
404HAVE_ARGUMENT = dis.HAVE_ARGUMENT
405EXTENDED_ARG = dis.EXTENDED_ARG
406
407
408_BUILTIN_TYPE_NAMES = {}
409for k, v in types.__dict__.items():
410    if type(v) is type:
411        _BUILTIN_TYPE_NAMES[v] = k
412
413
414def _builtin_type(name):
415    return getattr(types, name)
416
417
418if sys.version_info < (3, 4):  # pragma: no branch
419
420    def _walk_global_ops(code):
421        """
422        Yield (opcode, argument number) tuples for all
423        global-referencing instructions in *code*.
424        """
425        code = getattr(code, "co_code", b"")
426        if PY2:  # pragma: no branch
427            code = map(ord, code)
428
429        n = len(code)
430        i = 0
431        extended_arg = 0
432        while i < n:
433            op = code[i]
434            i += 1
435            if op >= HAVE_ARGUMENT:
436                oparg = code[i] + code[i + 1] * 256 + extended_arg
437                extended_arg = 0
438                i += 2
439                if op == EXTENDED_ARG:
440                    extended_arg = oparg * 65536
441                if op in GLOBAL_OPS:
442                    yield op, oparg
443
444
445else:
446
447    def _walk_global_ops(code):
448        """
449        Yield (opcode, argument number) tuples for all
450        global-referencing instructions in *code*.
451        """
452        for instr in dis.get_instructions(code):
453            op = instr.opcode
454            if op in GLOBAL_OPS:
455                yield op, instr.arg
456
457
458def _extract_class_dict(cls):
459    """Retrieve a copy of the dict of a class without the inherited methods"""
460    clsdict = dict(cls.__dict__)  # copy dict proxy to a dict
461    if len(cls.__bases__) == 1:
462        inherited_dict = cls.__bases__[0].__dict__
463    else:
464        inherited_dict = {}
465        for base in reversed(cls.__bases__):
466            inherited_dict.update(base.__dict__)
467    to_remove = []
468    for name, value in clsdict.items():
469        try:
470            base_value = inherited_dict[name]
471            if value is base_value:
472                to_remove.append(name)
473        except KeyError:
474            pass
475    for name in to_remove:
476        clsdict.pop(name)
477    return clsdict
478
479
480class CloudPickler(Pickler):
481
482    dispatch = Pickler.dispatch.copy()
483
484    def __init__(self, file, protocol=None):
485        if protocol is None:
486            protocol = DEFAULT_PROTOCOL
487        Pickler.__init__(self, file, protocol=protocol)
488        # map ids to dictionary. used to ensure that functions can share global env
489        self.globals_ref = {}
490
491    def dump(self, obj):
492        self.inject_addons()
493        try:
494            return Pickler.dump(self, obj)
495        except RuntimeError as e:
496            if "recursion" in e.args[0]:
497                msg = """Could not pickle object as excessively deep recursion required."""
498                raise pickle.PicklingError(msg)
499            else:
500                raise
501
502    def save_memoryview(self, obj):
503        self.save(obj.tobytes())
504
505    dispatch[memoryview] = save_memoryview
506
507    if PY2:  # pragma: no branch
508
509        def save_buffer(self, obj):
510            self.save(str(obj))
511
512        dispatch[buffer] = save_buffer  # noqa: F821 'buffer' was removed in Python 3
513
514    def save_module(self, obj):
515        """
516        Save a module as an import
517        """
518        if _is_dynamic(obj):
519            self.save_reduce(dynamic_subimport, (obj.__name__, vars(obj)), obj=obj)
520        else:
521            self.save_reduce(subimport, (obj.__name__,), obj=obj)
522
523    dispatch[types.ModuleType] = save_module
524
525    def save_codeobject(self, obj):
526        """
527        Save a code object
528        """
529        if PY3:  # pragma: no branch
530            if hasattr(obj, "co_posonlyargcount"):  # pragma: no branch
531                args = (
532                    obj.co_argcount,
533                    obj.co_posonlyargcount,
534                    obj.co_kwonlyargcount,
535                    obj.co_nlocals,
536                    obj.co_stacksize,
537                    obj.co_flags,
538                    obj.co_code,
539                    obj.co_consts,
540                    obj.co_names,
541                    obj.co_varnames,
542                    obj.co_filename,
543                    obj.co_name,
544                    obj.co_firstlineno,
545                    obj.co_lnotab,
546                    obj.co_freevars,
547                    obj.co_cellvars,
548                )
549            else:
550                args = (
551                    obj.co_argcount,
552                    obj.co_kwonlyargcount,
553                    obj.co_nlocals,
554                    obj.co_stacksize,
555                    obj.co_flags,
556                    obj.co_code,
557                    obj.co_consts,
558                    obj.co_names,
559                    obj.co_varnames,
560                    obj.co_filename,
561                    obj.co_name,
562                    obj.co_firstlineno,
563                    obj.co_lnotab,
564                    obj.co_freevars,
565                    obj.co_cellvars,
566                )
567        else:
568            args = (
569                obj.co_argcount,
570                obj.co_nlocals,
571                obj.co_stacksize,
572                obj.co_flags,
573                obj.co_code,
574                obj.co_consts,
575                obj.co_names,
576                obj.co_varnames,
577                obj.co_filename,
578                obj.co_name,
579                obj.co_firstlineno,
580                obj.co_lnotab,
581                obj.co_freevars,
582                obj.co_cellvars,
583            )
584        self.save_reduce(types.CodeType, args, obj=obj)
585
586    dispatch[types.CodeType] = save_codeobject
587
588    def save_function(self, obj, name=None):
589        """ Registered with the dispatch to handle all function types.
590
591        Determines what kind of function obj is (e.g. lambda, defined at
592        interactive prompt, etc) and handles the pickling appropriately.
593        """
594        if _is_global(obj, name=name):
595            return Pickler.save_global(self, obj, name=name)
596        elif PYPY and isinstance(obj.__code__, builtin_code_type):
597            return self.save_pypy_builtin_func(obj)
598        else:
599            return self.save_function_tuple(obj)
600
601    dispatch[types.FunctionType] = save_function
602
603    def save_pypy_builtin_func(self, obj):
604        """Save pypy equivalent of builtin functions.
605
606        PyPy does not have the concept of builtin-functions. Instead,
607        builtin-functions are simple function instances, but with a
608        builtin-code attribute.
609        Most of the time, builtin functions should be pickled by attribute. But
610        PyPy has flaky support for __qualname__, so some builtin functions such
611        as float.__new__ will be classified as dynamic. For this reason only,
612        we created this special routine. Because builtin-functions are not
613        expected to have closure or globals, there is no additional hack
614        (compared the one already implemented in pickle) to protect ourselves
615        from reference cycles. A simple (reconstructor, newargs, obj.__dict__)
616        tuple is save_reduced.
617
618        Note also that PyPy improved their support for __qualname__ in v3.6, so
619        this routing should be removed when cloudpickle supports only PyPy 3.6
620        and later.
621        """
622        rv = (
623            types.FunctionType,
624            (obj.__code__, {}, obj.__name__, obj.__defaults__, obj.__closure__),
625            obj.__dict__,
626        )
627        self.save_reduce(*rv, obj=obj)
628
629    def _save_dynamic_enum(self, obj, clsdict):
630        """Special handling for dynamic Enum subclasses
631
632        Use a dedicated Enum constructor (inspired by EnumMeta.__call__) as the
633        EnumMeta metaclass has complex initialization that makes the Enum
634        subclasses hold references to their own instances.
635        """
636        members = dict((e.name, e.value) for e in obj)
637
638        # Python 2.7 with enum34 can have no qualname:
639        qualname = getattr(obj, "__qualname__", None)
640
641        self.save_reduce(
642            _make_skeleton_enum,
643            (
644                obj.__bases__,
645                obj.__name__,
646                qualname,
647                members,
648                obj.__module__,
649                _ensure_tracking(obj),
650                None,
651            ),
652            obj=obj,
653        )
654
655        # Cleanup the clsdict that will be passed to _rehydrate_skeleton_class:
656        # Those attributes are already handled by the metaclass.
657        for attrname in [
658            "_generate_next_value_",
659            "_member_names_",
660            "_member_map_",
661            "_member_type_",
662            "_value2member_map_",
663        ]:
664            clsdict.pop(attrname, None)
665        for member in members:
666            clsdict.pop(member)
667
668    def save_dynamic_class(self, obj):
669        """Save a class that can't be stored as module global.
670
671        This method is used to serialize classes that are defined inside
672        functions, or that otherwise can't be serialized as attribute lookups
673        from global modules.
674        """
675        clsdict = _extract_class_dict(obj)
676        clsdict.pop("__weakref__", None)
677
678        # For ABCMeta in python3.7+, remove _abc_impl as it is not picklable.
679        # This is a fix which breaks the cache but this only makes the first
680        # calls to issubclass slower.
681        if "_abc_impl" in clsdict:
682            import abc
683
684            (registry, _, _, _) = abc._get_dump(obj)
685            clsdict["_abc_impl"] = [subclass_weakref() for subclass_weakref in registry]
686
687        # On PyPy, __doc__ is a readonly attribute, so we need to include it in
688        # the initial skeleton class.  This is safe because we know that the
689        # doc can't participate in a cycle with the original class.
690        type_kwargs = {"__doc__": clsdict.pop("__doc__", None)}
691
692        if hasattr(obj, "__slots__"):
693            type_kwargs["__slots__"] = obj.__slots__
694            # pickle string length optimization: member descriptors of obj are
695            # created automatically from obj's __slots__ attribute, no need to
696            # save them in obj's state
697            if isinstance(obj.__slots__, string_types):
698                clsdict.pop(obj.__slots__)
699            else:
700                for k in obj.__slots__:
701                    clsdict.pop(k, None)
702
703        # If type overrides __dict__ as a property, include it in the type
704        # kwargs. In Python 2, we can't set this attribute after construction.
705        __dict__ = clsdict.pop("__dict__", None)
706        if isinstance(__dict__, property):
707            type_kwargs["__dict__"] = __dict__
708
709        save = self.save
710        write = self.write
711
712        # We write pickle instructions explicitly here to handle the
713        # possibility that the type object participates in a cycle with its own
714        # __dict__. We first write an empty "skeleton" version of the class and
715        # memoize it before writing the class' __dict__ itself. We then write
716        # instructions to "rehydrate" the skeleton class by restoring the
717        # attributes from the __dict__.
718        #
719        # A type can appear in a cycle with its __dict__ if an instance of the
720        # type appears in the type's __dict__ (which happens for the stdlib
721        # Enum class), or if the type defines methods that close over the name
722        # of the type, (which is common for Python 2-style super() calls).
723
724        # Push the rehydration function.
725        save(_rehydrate_skeleton_class)
726
727        # Mark the start of the args tuple for the rehydration function.
728        write(pickle.MARK)
729
730        # Create and memoize an skeleton class with obj's name and bases.
731        if Enum is not None and issubclass(obj, Enum):
732            # Special handling of Enum subclasses
733            self._save_dynamic_enum(obj, clsdict)
734        else:
735            # "Regular" class definition:
736            tp = type(obj)
737            self.save_reduce(
738                _make_skeleton_class,
739                (
740                    tp,
741                    obj.__name__,
742                    obj.__bases__,
743                    type_kwargs,
744                    _ensure_tracking(obj),
745                    None,
746                ),
747                obj=obj,
748            )
749
750        # Now save the rest of obj's __dict__. Any references to obj
751        # encountered while saving will point to the skeleton class.
752        save(clsdict)
753
754        # Write a tuple of (skeleton_class, clsdict).
755        write(pickle.TUPLE)
756
757        # Call _rehydrate_skeleton_class(skeleton_class, clsdict)
758        write(pickle.REDUCE)
759
760    def save_function_tuple(self, func):
761        """  Pickles an actual func object.
762
763        A func comprises: code, globals, defaults, closure, and dict.  We
764        extract and save these, injecting reducing functions at certain points
765        to recreate the func object.  Keep in mind that some of these pieces
766        can contain a ref to the func itself.  Thus, a naive save on these
767        pieces could trigger an infinite loop of save's.  To get around that,
768        we first create a skeleton func object using just the code (this is
769        safe, since this won't contain a ref to the func), and memoize it as
770        soon as it's created.  The other stuff can then be filled in later.
771        """
772        if is_tornado_coroutine(func):
773            self.save_reduce(_rebuild_tornado_coroutine, (func.__wrapped__,), obj=func)
774            return
775
776        save = self.save
777        write = self.write
778
779        code, f_globals, defaults, closure_values, dct, base_globals = self.extract_func_data(
780            func
781        )
782
783        save(_fill_function)  # skeleton function updater
784        write(pickle.MARK)  # beginning of tuple that _fill_function expects
785
786        # Extract currently-imported submodules used by func. Storing these
787        # modules in a smoke _cloudpickle_subimports attribute of the object's
788        # state will trigger the side effect of importing these modules at
789        # unpickling time (which is necessary for func to work correctly once
790        # depickled)
791        submodules = _find_imported_submodules(
792            code, itertools.chain(f_globals.values(), closure_values or ())
793        )
794
795        # create a skeleton function object and memoize it
796        save(_make_skel_func)
797        save(
798            (
799                code,
800                len(closure_values) if closure_values is not None else -1,
801                base_globals,
802            )
803        )
804        write(pickle.REDUCE)
805        self.memoize(func)
806
807        # save the rest of the func data needed by _fill_function
808        state = {
809            "globals": f_globals,
810            "defaults": defaults,
811            "dict": dct,
812            "closure_values": closure_values,
813            "module": func.__module__,
814            "name": func.__name__,
815            "doc": func.__doc__,
816            "_cloudpickle_submodules": submodules,
817        }
818        if hasattr(func, "__annotations__") and sys.version_info >= (3, 7):
819            # Although annotations were added in Python3.4, It is not possible
820            # to properly pickle them until Python3.7. (See #193)
821            state["annotations"] = func.__annotations__
822        if hasattr(func, "__qualname__"):
823            state["qualname"] = func.__qualname__
824        if hasattr(func, "__kwdefaults__"):
825            state["kwdefaults"] = func.__kwdefaults__
826        save(state)
827        write(pickle.TUPLE)
828        write(pickle.REDUCE)  # applies _fill_function on the tuple
829
830    def extract_func_data(self, func):
831        """
832        Turn the function into a tuple of data necessary to recreate it:
833            code, globals, defaults, closure_values, dict
834        """
835        code = func.__code__
836
837        # extract all global ref's
838        func_global_refs = _extract_code_globals(code)
839
840        # process all variables referenced by global environment
841        f_globals = {}
842        for var in func_global_refs:
843            if var in func.__globals__:
844                f_globals[var] = func.__globals__[var]
845
846        # defaults requires no processing
847        defaults = func.__defaults__
848
849        # process closure
850        closure = (
851            list(map(_get_cell_contents, func.__closure__))
852            if func.__closure__ is not None
853            else None
854        )
855
856        # save the dict
857        dct = func.__dict__
858
859        # base_globals represents the future global namespace of func at
860        # unpickling time. Looking it up and storing it in globals_ref allow
861        # functions sharing the same globals at pickling time to also
862        # share them once unpickled, at one condition: since globals_ref is
863        # an attribute of a Cloudpickler instance, and that a new CloudPickler is
864        # created each time pickle.dump or pickle.dumps is called, functions
865        # also need to be saved within the same invokation of
866        # cloudpickle.dump/cloudpickle.dumps (for example: cloudpickle.dumps([f1, f2])). There
867        # is no such limitation when using Cloudpickler.dump, as long as the
868        # multiple invokations are bound to the same Cloudpickler.
869        base_globals = self.globals_ref.setdefault(id(func.__globals__), {})
870
871        if base_globals == {}:
872            # Add module attributes used to resolve relative imports
873            # instructions inside func.
874            for k in ["__package__", "__name__", "__path__", "__file__"]:
875                # Some built-in functions/methods such as object.__new__  have
876                # their __globals__ set to None in PyPy
877                if func.__globals__ is not None and k in func.__globals__:
878                    base_globals[k] = func.__globals__[k]
879
880        return (code, f_globals, defaults, closure, dct, base_globals)
881
882    if not PY3:  # pragma: no branch
883        # Python3 comes with native reducers that allow builtin functions and
884        # methods pickling as module/class attributes.  The following method
885        # extends this for python2.
886        # Please note that currently, neither pickle nor cloudpickle support
887        # dynamically created builtin functions/method pickling.
888        def save_builtin_function_or_method(self, obj):
889            is_bound = getattr(obj, "__self__", None) is not None
890            if is_bound:
891                # obj is a bound builtin method.
892                rv = (getattr, (obj.__self__, obj.__name__))
893                return self.save_reduce(obj=obj, *rv)
894
895            is_unbound = hasattr(obj, "__objclass__")
896            if is_unbound:
897                # obj is an unbound builtin method (accessed from its class)
898                rv = (getattr, (obj.__objclass__, obj.__name__))
899                return self.save_reduce(obj=obj, *rv)
900
901            # Otherwise, obj is not a method, but a function. Fallback to
902            # default pickling by attribute.
903            return Pickler.save_global(self, obj)
904
905        dispatch[types.BuiltinFunctionType] = save_builtin_function_or_method
906
907        # A comprehensive summary of the various kinds of builtin methods can
908        # be found in PEP 579: https://www.python.org/dev/peps/pep-0579/
909        classmethod_descriptor_type = type(float.__dict__["fromhex"])
910        wrapper_descriptor_type = type(float.__repr__)
911        method_wrapper_type = type(1.5 .__repr__)
912
913        dispatch[classmethod_descriptor_type] = save_builtin_function_or_method
914        dispatch[wrapper_descriptor_type] = save_builtin_function_or_method
915        dispatch[method_wrapper_type] = save_builtin_function_or_method
916
917    if sys.version_info[:2] < (3, 4):
918        method_descriptor = type(str.upper)
919        dispatch[method_descriptor] = save_builtin_function_or_method
920
921    def save_getset_descriptor(self, obj):
922        return self.save_reduce(getattr, (obj.__objclass__, obj.__name__))
923
924    dispatch[types.GetSetDescriptorType] = save_getset_descriptor
925
926    def save_global(self, obj, name=None, pack=struct.pack):
927        """
928        Save a "global".
929
930        The name of this method is somewhat misleading: all types get
931        dispatched here.
932        """
933        if obj is type(None):  # noqa
934            return self.save_reduce(type, (None,), obj=obj)
935        elif obj is type(Ellipsis):
936            return self.save_reduce(type, (Ellipsis,), obj=obj)
937        elif obj is type(NotImplemented):
938            return self.save_reduce(type, (NotImplemented,), obj=obj)
939        elif obj in _BUILTIN_TYPE_NAMES:
940            return self.save_reduce(_builtin_type, (_BUILTIN_TYPE_NAMES[obj],), obj=obj)
941        elif name is not None:
942            Pickler.save_global(self, obj, name=name)
943        elif not _is_global(obj, name=name):
944            self.save_dynamic_class(obj)
945        else:
946            Pickler.save_global(self, obj, name=name)
947
948    dispatch[type] = save_global
949    dispatch[types.ClassType] = save_global
950
951    def save_instancemethod(self, obj):
952        # Memoization rarely is ever useful due to python bounding
953        if obj.__self__ is None:
954            self.save_reduce(getattr, (obj.im_class, obj.__name__))
955        else:
956            if PY3:  # pragma: no branch
957                self.save_reduce(
958                    types.MethodType, (obj.__func__, obj.__self__), obj=obj
959                )
960            else:
961                self.save_reduce(
962                    types.MethodType,
963                    (obj.__func__, obj.__self__, type(obj.__self__)),
964                    obj=obj,
965                )
966
967    dispatch[types.MethodType] = save_instancemethod
968
969    def save_inst(self, obj):
970        """Inner logic to save instance. Based off pickle.save_inst"""
971        cls = obj.__class__
972
973        # Try the dispatch table (pickle module doesn't do it)
974        f = self.dispatch.get(cls)
975        if f:
976            f(self, obj)  # Call unbound method with explicit self
977            return
978
979        memo = self.memo
980        write = self.write
981        save = self.save
982
983        if hasattr(obj, "__getinitargs__"):
984            args = obj.__getinitargs__()
985            len(args)  # XXX Assert it's a sequence
986            pickle._keep_alive(args, memo)
987        else:
988            args = ()
989
990        write(pickle.MARK)
991
992        if self.bin:
993            save(cls)
994            for arg in args:
995                save(arg)
996            write(pickle.OBJ)
997        else:
998            for arg in args:
999                save(arg)
1000            write(pickle.INST + cls.__module__ + "\n" + cls.__name__ + "\n")
1001
1002        self.memoize(obj)
1003
1004        try:
1005            getstate = obj.__getstate__
1006        except AttributeError:
1007            stuff = obj.__dict__
1008        else:
1009            stuff = getstate()
1010            pickle._keep_alive(stuff, memo)
1011        save(stuff)
1012        write(pickle.BUILD)
1013
1014    if PY2:  # pragma: no branch
1015        dispatch[types.InstanceType] = save_inst
1016
1017    def save_property(self, obj):
1018        # properties not correctly saved in python
1019        self.save_reduce(property, (obj.fget, obj.fset, obj.fdel, obj.__doc__), obj=obj)
1020
1021    dispatch[property] = save_property
1022
1023    def save_classmethod(self, obj):
1024        orig_func = obj.__func__
1025        self.save_reduce(type(obj), (orig_func,), obj=obj)
1026
1027    dispatch[classmethod] = save_classmethod
1028    dispatch[staticmethod] = save_classmethod
1029
1030    def save_itemgetter(self, obj):
1031        """itemgetter serializer (needed for namedtuple support)"""
1032
1033        class Dummy:
1034            def __getitem__(self, item):
1035                return item
1036
1037        items = obj(Dummy())
1038        if not isinstance(items, tuple):
1039            items = (items,)
1040        return self.save_reduce(operator.itemgetter, items)
1041
1042    if type(operator.itemgetter) is type:
1043        dispatch[operator.itemgetter] = save_itemgetter
1044
1045    def save_attrgetter(self, obj):
1046        """attrgetter serializer"""
1047
1048        class Dummy(object):
1049            def __init__(self, attrs, index=None):
1050                self.attrs = attrs
1051                self.index = index
1052
1053            def __getattribute__(self, item):
1054                attrs = object.__getattribute__(self, "attrs")
1055                index = object.__getattribute__(self, "index")
1056                if index is None:
1057                    index = len(attrs)
1058                    attrs.append(item)
1059                else:
1060                    attrs[index] = ".".join([attrs[index], item])
1061                return type(self)(attrs, index)
1062
1063        attrs = []
1064        obj(Dummy(attrs))
1065        return self.save_reduce(operator.attrgetter, tuple(attrs))
1066
1067    if type(operator.attrgetter) is type:
1068        dispatch[operator.attrgetter] = save_attrgetter
1069
1070    def save_file(self, obj):
1071        """Save a file"""
1072        try:
1073            import StringIO as pystringIO  # we can't use cStringIO as it lacks the name attribute
1074        except ImportError:
1075            import io as pystringIO
1076
1077        if not hasattr(obj, "name") or not hasattr(obj, "mode"):
1078            raise pickle.PicklingError(
1079                "Cannot pickle files that do not map to an actual file"
1080            )
1081        if obj is sys.stdout:
1082            return self.save_reduce(getattr, (sys, "stdout"), obj=obj)
1083        if obj is sys.stderr:
1084            return self.save_reduce(getattr, (sys, "stderr"), obj=obj)
1085        if obj is sys.stdin:
1086            raise pickle.PicklingError("Cannot pickle standard input")
1087        if obj.closed:
1088            raise pickle.PicklingError("Cannot pickle closed files")
1089        if hasattr(obj, "isatty") and obj.isatty():
1090            raise pickle.PicklingError("Cannot pickle files that map to tty objects")
1091        if "r" not in obj.mode and "+" not in obj.mode:
1092            raise pickle.PicklingError(
1093                "Cannot pickle files that are not opened for reading: %s" % obj.mode
1094            )
1095
1096        name = obj.name
1097
1098        retval = pystringIO.StringIO()
1099
1100        try:
1101            # Read the whole file
1102            curloc = obj.tell()
1103            obj.seek(0)
1104            contents = obj.read()
1105            obj.seek(curloc)
1106        except IOError:
1107            raise pickle.PicklingError(
1108                "Cannot pickle file %s as it cannot be read" % name
1109            )
1110        retval.write(contents)
1111        retval.seek(curloc)
1112
1113        retval.name = name
1114        self.save(retval)
1115        self.memoize(obj)
1116
1117    def save_ellipsis(self, obj):
1118        self.save_reduce(_gen_ellipsis, ())
1119
1120    def save_not_implemented(self, obj):
1121        self.save_reduce(_gen_not_implemented, ())
1122
1123    try:  # Python 2
1124        dispatch[file] = save_file
1125    except NameError:  # Python 3  # pragma: no branch
1126        dispatch[io.TextIOWrapper] = save_file
1127
1128    dispatch[type(Ellipsis)] = save_ellipsis
1129    dispatch[type(NotImplemented)] = save_not_implemented
1130
1131    def save_weakset(self, obj):
1132        self.save_reduce(weakref.WeakSet, (list(obj),))
1133
1134    dispatch[weakref.WeakSet] = save_weakset
1135
1136    def save_logger(self, obj):
1137        self.save_reduce(logging.getLogger, (obj.name,), obj=obj)
1138
1139    dispatch[logging.Logger] = save_logger
1140
1141    def save_root_logger(self, obj):
1142        self.save_reduce(logging.getLogger, (), obj=obj)
1143
1144    dispatch[logging.RootLogger] = save_root_logger
1145
1146    if hasattr(types, "MappingProxyType"):  # pragma: no branch
1147
1148        def save_mappingproxy(self, obj):
1149            self.save_reduce(types.MappingProxyType, (dict(obj),), obj=obj)
1150
1151        dispatch[types.MappingProxyType] = save_mappingproxy
1152
1153    """Special functions for Add-on libraries"""
1154
1155    def inject_addons(self):
1156        """Plug in system. Register additional pickling functions if modules already loaded"""
1157        pass
1158
1159
1160# Tornado support
1161
1162
1163def is_tornado_coroutine(func):
1164    """
1165    Return whether *func* is a Tornado coroutine function.
1166    Running coroutines are not supported.
1167    """
1168    if "tornado.gen" not in sys.modules:
1169        return False
1170    gen = sys.modules["tornado.gen"]
1171    if not hasattr(gen, "is_coroutine_function"):
1172        # Tornado version is too old
1173        return False
1174    return gen.is_coroutine_function(func)
1175
1176
1177def _rebuild_tornado_coroutine(func):
1178    from tornado import gen
1179
1180    return gen.coroutine(func)
1181
1182
1183# Shorthands for legacy support
1184
1185
1186def dump(obj, file, protocol=None):
1187    """Serialize obj as bytes streamed into file
1188
1189    protocol defaults to cloudpickle.DEFAULT_PROTOCOL which is an alias to
1190    pickle.HIGHEST_PROTOCOL. This setting favors maximum communication speed
1191    between processes running the same Python version.
1192
1193    Set protocol=pickle.DEFAULT_PROTOCOL instead if you need to ensure
1194    compatibility with older versions of Python.
1195    """
1196    CloudPickler(file, protocol=protocol).dump(obj)
1197
1198
1199def dumps(obj, protocol=None):
1200    """Serialize obj as a string of bytes allocated in memory
1201
1202    protocol defaults to cloudpickle.DEFAULT_PROTOCOL which is an alias to
1203    pickle.HIGHEST_PROTOCOL. This setting favors maximum communication speed
1204    between processes running the same Python version.
1205
1206    Set protocol=pickle.DEFAULT_PROTOCOL instead if you need to ensure
1207    compatibility with older versions of Python.
1208    """
1209    file = StringIO()
1210    try:
1211        cp = CloudPickler(file, protocol=protocol)
1212        cp.dump(obj)
1213        return file.getvalue()
1214    finally:
1215        file.close()
1216
1217
1218# including pickles unloading functions in this namespace
1219load = pickle.load
1220loads = pickle.loads
1221
1222
1223# hack for __import__ not working as desired
1224def subimport(name):
1225    __import__(name)
1226    return sys.modules[name]
1227
1228
1229def dynamic_subimport(name, vars):
1230    mod = types.ModuleType(name)
1231    mod.__dict__.update(vars)
1232    return mod
1233
1234
1235def _gen_ellipsis():
1236    return Ellipsis
1237
1238
1239def _gen_not_implemented():
1240    return NotImplemented
1241
1242
1243def _get_cell_contents(cell):
1244    try:
1245        return cell.cell_contents
1246    except ValueError:
1247        # sentinel used by ``_fill_function`` which will leave the cell empty
1248        return _empty_cell_value
1249
1250
1251def instance(cls):
1252    """Create a new instance of a class.
1253
1254    Parameters
1255    ----------
1256    cls : type
1257        The class to create an instance of.
1258
1259    Returns
1260    -------
1261    instance : cls
1262        A new instance of ``cls``.
1263    """
1264    return cls()
1265
1266
1267@instance
1268class _empty_cell_value(object):
1269    """sentinel for empty closures
1270    """
1271
1272    @classmethod
1273    def __reduce__(cls):
1274        return cls.__name__
1275
1276
1277def _fill_function(*args):
1278    """Fills in the rest of function data into the skeleton function object
1279
1280    The skeleton itself is create by _make_skel_func().
1281    """
1282    if len(args) == 2:
1283        func = args[0]
1284        state = args[1]
1285    elif len(args) == 5:
1286        # Backwards compat for cloudpickle v0.4.0, after which the `module`
1287        # argument was introduced
1288        func = args[0]
1289        keys = ["globals", "defaults", "dict", "closure_values"]
1290        state = dict(zip(keys, args[1:]))
1291    elif len(args) == 6:
1292        # Backwards compat for cloudpickle v0.4.1, after which the function
1293        # state was passed as a dict to the _fill_function it-self.
1294        func = args[0]
1295        keys = ["globals", "defaults", "dict", "module", "closure_values"]
1296        state = dict(zip(keys, args[1:]))
1297    else:
1298        raise ValueError("Unexpected _fill_value arguments: %r" % (args,))
1299
1300    # - At pickling time, any dynamic global variable used by func is
1301    #   serialized by value (in state['globals']).
1302    # - At unpickling time, func's __globals__ attribute is initialized by
1303    #   first retrieving an empty isolated namespace that will be shared
1304    #   with other functions pickled from the same original module
1305    #   by the same CloudPickler instance and then updated with the
1306    #   content of state['globals'] to populate the shared isolated
1307    #   namespace with all the global variables that are specifically
1308    #   referenced for this function.
1309    func.__globals__.update(state["globals"])
1310
1311    func.__defaults__ = state["defaults"]
1312    func.__dict__ = state["dict"]
1313    if "annotations" in state:
1314        func.__annotations__ = state["annotations"]
1315    if "doc" in state:
1316        func.__doc__ = state["doc"]
1317    if "name" in state:
1318        func.__name__ = state["name"]
1319    if "module" in state:
1320        func.__module__ = state["module"]
1321    if "qualname" in state:
1322        func.__qualname__ = state["qualname"]
1323    if "kwdefaults" in state:
1324        func.__kwdefaults__ = state["kwdefaults"]
1325    # _cloudpickle_subimports is a set of submodules that must be loaded for
1326    # the pickled function to work correctly at unpickling time. Now that these
1327    # submodules are depickled (hence imported), they can be removed from the
1328    # object's state (the object state only served as a reference holder to
1329    # these submodules)
1330    if "_cloudpickle_submodules" in state:
1331        state.pop("_cloudpickle_submodules")
1332
1333    cells = func.__closure__
1334    if cells is not None:
1335        for cell, value in zip(cells, state["closure_values"]):
1336            if value is not _empty_cell_value:
1337                cell_set(cell, value)
1338
1339    return func
1340
1341
1342def _make_empty_cell():
1343    if False:
1344        # trick the compiler into creating an empty cell in our lambda
1345        cell = None
1346        raise AssertionError("this route should not be executed")
1347
1348    return (lambda: cell).__closure__[0]
1349
1350
1351def _make_skel_func(code, cell_count, base_globals=None):
1352    """ Creates a skeleton function object that contains just the provided
1353        code and the correct number of cells in func_closure.  All other
1354        func attributes (e.g. func_globals) are empty.
1355    """
1356    # This is backward-compatibility code: for cloudpickle versions between
1357    # 0.5.4 and 0.7, base_globals could be a string or None. base_globals
1358    # should now always be a dictionary.
1359    if base_globals is None or isinstance(base_globals, str):
1360        base_globals = {}
1361
1362    base_globals["__builtins__"] = __builtins__
1363
1364    closure = (
1365        tuple(_make_empty_cell() for _ in range(cell_count))
1366        if cell_count >= 0
1367        else None
1368    )
1369    return types.FunctionType(code, base_globals, None, None, closure)
1370
1371
1372def _make_skeleton_class(
1373    type_constructor, name, bases, type_kwargs, class_tracker_id, extra
1374):
1375    """Build dynamic class with an empty __dict__ to be filled once memoized
1376
1377    If class_tracker_id is not None, try to lookup an existing class definition
1378    matching that id. If none is found, track a newly reconstructed class
1379    definition under that id so that other instances stemming from the same
1380    class id will also reuse this class definition.
1381
1382    The "extra" variable is meant to be a dict (or None) that can be used for
1383    forward compatibility shall the need arise.
1384    """
1385    skeleton_class = type_constructor(name, bases, type_kwargs)
1386    return _lookup_class_or_track(class_tracker_id, skeleton_class)
1387
1388
1389def _rehydrate_skeleton_class(skeleton_class, class_dict):
1390    """Put attributes from `class_dict` back on `skeleton_class`.
1391
1392    See CloudPickler.save_dynamic_class for more info.
1393    """
1394    registry = None
1395    for attrname, attr in class_dict.items():
1396        if attrname == "_abc_impl":
1397            registry = attr
1398        else:
1399            setattr(skeleton_class, attrname, attr)
1400    if registry is not None:
1401        for subclass in registry:
1402            skeleton_class.register(subclass)
1403
1404    return skeleton_class
1405
1406
1407def _make_skeleton_enum(
1408    bases, name, qualname, members, module, class_tracker_id, extra
1409):
1410    """Build dynamic enum with an empty __dict__ to be filled once memoized
1411
1412    The creation of the enum class is inspired by the code of
1413    EnumMeta._create_.
1414
1415    If class_tracker_id is not None, try to lookup an existing enum definition
1416    matching that id. If none is found, track a newly reconstructed enum
1417    definition under that id so that other instances stemming from the same
1418    class id will also reuse this enum definition.
1419
1420    The "extra" variable is meant to be a dict (or None) that can be used for
1421    forward compatibility shall the need arise.
1422    """
1423    # enums always inherit from their base Enum class at the last position in
1424    # the list of base classes:
1425    enum_base = bases[-1]
1426    metacls = enum_base.__class__
1427    classdict = metacls.__prepare__(name, bases)
1428
1429    for member_name, member_value in members.items():
1430        classdict[member_name] = member_value
1431    enum_class = metacls.__new__(metacls, name, bases, classdict)
1432    enum_class.__module__ = module
1433
1434    # Python 2.7 compat
1435    if qualname is not None:
1436        enum_class.__qualname__ = qualname
1437
1438    return _lookup_class_or_track(class_tracker_id, enum_class)
1439
1440
1441def _is_dynamic(module):
1442    """
1443    Return True if the module is special module that cannot be imported by its
1444    name.
1445    """
1446    # Quick check: module that have __file__ attribute are not dynamic modules.
1447    if hasattr(module, "__file__"):
1448        return False
1449
1450    if hasattr(module, "__spec__"):
1451        if module.__spec__ is not None:
1452            return False
1453
1454        # In PyPy, Some built-in modules such as _codecs can have their
1455        # __spec__ attribute set to None despite being imported.  For such
1456        # modules, the ``_find_spec`` utility of the standard library is used.
1457        parent_name = module.__name__.rpartition(".")[0]
1458        if parent_name:  # pragma: no cover
1459            # This code handles the case where an imported package (and not
1460            # module) remains with __spec__ set to None. It is however untested
1461            # as no package in the PyPy stdlib has __spec__ set to None after
1462            # it is imported.
1463            try:
1464                parent = sys.modules[parent_name]
1465            except KeyError:
1466                msg = "parent {!r} not in sys.modules"
1467                raise ImportError(msg.format(parent_name))
1468            else:
1469                pkgpath = parent.__path__
1470        else:
1471            pkgpath = None
1472        return _find_spec(module.__name__, pkgpath, module) is None
1473
1474    else:
1475        # Backward compat for Python 2
1476        import imp
1477
1478        try:
1479            path = None
1480            for part in module.__name__.split("."):
1481                if path is not None:
1482                    path = [path]
1483                f, path, description = imp.find_module(part, path)
1484                if f is not None:
1485                    f.close()
1486        except ImportError:
1487            return True
1488        return False
1489