1#-----------------------------------------------------------------------------
2# Copyright (c) 2005-2019, PyInstaller Development Team.
3#
4# Distributed under the terms of the GNU General Public License with exception
5# for distributing bootloader.
6#
7# The full license is in the file COPYING.txt, distributed with this software.
8#-----------------------------------------------------------------------------
9
10from __future__ import print_function
11
12"""
13Build packages using spec files.
14
15NOTE: All global variables, classes and imported modules create API
16      for .spec files.
17"""
18
19
20import glob
21import os
22import pprint
23import shutil
24import sys
25
26
27# Relative imports to PyInstaller modules.
28from .. import HOMEPATH, DEFAULT_DISTPATH, DEFAULT_WORKPATH
29from .. import compat
30from .. import log as logging
31from ..utils.misc import absnormpath, compile_py_files
32from ..compat import is_py2, is_win, PYDYLIB_NAMES, VALID_MODULE_TYPES, \
33    open_file, text_type, unicode_writer
34from ..depend import bindepend
35from ..depend.analysis import initialize_modgraph
36from .api import PYZ, EXE, COLLECT, MERGE
37from .datastruct import TOC, Target, Tree, _check_guts_eq
38from .imphook import AdditionalFilesCache, ModuleHookCache
39from .osx import BUNDLE
40from .toc_conversion import DependencyProcessor
41from .utils import _check_guts_toc_mtime, format_binaries_and_datas
42from ..depend.utils import create_py3_base_library, scan_code_for_ctypes
43from ..archive import pyz_crypto
44from ..utils.misc import get_path_to_toplevel_modules, get_unicode_modules, mtime
45from ..configure import get_importhooks_dir
46
47if is_win:
48    from ..utils.win32 import winmanifest
49
50logger = logging.getLogger(__name__)
51
52STRINGTYPE = type('')
53TUPLETYPE = type((None,))
54
55rthooks = {}
56
57# place where the loader modules and initialization scripts live
58_init_code_path = os.path.join(HOMEPATH, 'PyInstaller', 'loader')
59
60IMPORT_TYPES = ['top-level', 'conditional', 'delayed', 'delayed, conditional',
61                'optional', 'conditional, optional', 'delayed, optional',
62                'delayed, conditional, optional']
63
64WARNFILE_HEADER = """\
65
66This file lists modules PyInstaller was not able to find. This does not
67necessarily mean this module is required for running you program. Python and
68Python 3rd-party packages include a lot of conditional or optional module. For
69example the module 'ntpath' only exists on Windows, whereas the module
70'posixpath' only exists on Posix systems.
71
72Types if import:
73* top-level: imported at the top-level - look at these first
74* conditional: imported within an if-statement
75* delayed: imported from within a function
76* optional: imported within a try-except-statement
77
78IMPORTANT: Do NOT post this list to the issue-tracker. Use it as a basis for
79           yourself tracking down the missing module. Thanks!
80
81"""
82
83
84def _old_api_error(obj_name):
85    """
86    Cause PyInstall to exit when .spec file uses old api.
87    :param obj_name: Name of the old api that is no longer suppored.
88    """
89    raise SystemExit('%s has been removed in PyInstaller 2.0. '
90                     'Please update your spec-file. See '
91                     'http://www.pyinstaller.org/wiki/MigrateTo2.0 '
92                     'for details' % obj_name)
93
94
95# TODO find better place for function.
96def setupUPXFlags():
97    f = compat.getenv("UPX", "")
98    if is_win:
99        # Binaries built with Visual Studio 7.1 require --strip-loadconf
100        # or they won't compress. Configure.py makes sure that UPX is new
101        # enough to support --strip-loadconf.
102        f = "--strip-loadconf " + f
103    # Do not compress any icon, so that additional icons in the executable
104    # can still be externally bound
105    f = "--compress-icons=0 " + f
106    f = "--best " + f
107    compat.setenv("UPX", f)
108
109
110class Analysis(Target):
111    """
112    Class does analysis of the user's main Python scripts.
113
114    An Analysis has five outputs, all TOCs (Table of Contents) accessed as
115    attributes of the analysis.
116
117    scripts
118            The scripts you gave Analysis as input, with any runtime hook scripts
119            prepended.
120    pure
121            The pure Python modules.
122    binaries
123            The extensionmodules and their dependencies. The secondary dependecies
124            are filtered. On Windows files from C:\\Windows are excluded by default.
125            On Linux/Unix only system libraries from /lib or /usr/lib are excluded.
126    datas
127            Data-file dependencies. These are data-file that are found to be needed
128            by modules. They can be anything: plugins, font files, images, translations,
129            etc.
130    zipfiles
131            The zipfiles dependencies (usually .egg files).
132    """
133    _old_scripts = set((
134        absnormpath(os.path.join(HOMEPATH, "support", "_mountzlib.py")),
135        absnormpath(os.path.join(HOMEPATH, "support", "useUnicode.py")),
136        absnormpath(os.path.join(HOMEPATH, "support", "useTK.py")),
137        absnormpath(os.path.join(HOMEPATH, "support", "unpackTK.py")),
138        absnormpath(os.path.join(HOMEPATH, "support", "removeTK.py")),
139        ))
140
141    def __init__(self, scripts, pathex=None, binaries=None, datas=None,
142                 hiddenimports=None, hookspath=None, excludes=None, runtime_hooks=None,
143                 cipher=None, win_no_prefer_redirects=False, win_private_assemblies=False,
144                 noarchive=False):
145        """
146        scripts
147                A list of scripts specified as file names.
148        pathex
149                An optional list of paths to be searched before sys.path.
150        binaries
151                An optional list of additional binaries (dlls, etc.) to include.
152        datas
153                An optional list of additional data files to include.
154        hiddenimport
155                An optional list of additional (hidden) modules to include.
156        hookspath
157                An optional list of additional paths to search for hooks.
158                (hook-modules).
159        excludes
160                An optional list of module or package names (their Python names,
161                not path names) that will be ignored (as though they were not found).
162        runtime_hooks
163                An optional list of scripts to use as users' runtime hooks. Specified
164                as file names.
165        win_no_prefer_redirects
166                If True, prefers not to follow version redirects when searching for
167                Windows SxS Assemblies.
168        win_private_assemblies
169                If True, changes all bundled Windows SxS Assemblies into Private
170                Assemblies to enforce assembly versions.
171        noarchive
172                If True, don't place source files in a archive, but keep them as
173                individual files.
174        """
175        super(Analysis, self).__init__()
176        from ..config import CONF
177
178        self.inputs = []
179        spec_dir = os.path.dirname(CONF['spec'])
180        for script in scripts:
181            # If path is relative, it is relative to the location of .spec file.
182            if not os.path.isabs(script):
183                script = os.path.join(spec_dir, script)
184            if absnormpath(script) in self._old_scripts:
185                logger.warning('Ignoring obsolete auto-added script %s', script)
186                continue
187            # Normalize script path.
188            script = os.path.normpath(script)
189            if not os.path.exists(script):
190                raise ValueError("script '%s' not found" % script)
191            self.inputs.append(script)
192
193        # Django hook requires this variable to find the script manage.py.
194        CONF['main_script'] = self.inputs[0]
195
196        self.pathex = self._extend_pathex(pathex, self.inputs)
197        # Set global config variable 'pathex' to make it available for
198        # PyInstaller.utils.hooks and import hooks. Path extensions for module
199        # search.
200        CONF['pathex'] = self.pathex
201        # Extend sys.path so PyInstaller could find all necessary modules.
202        logger.info('Extending PYTHONPATH with paths\n' + pprint.pformat(self.pathex))
203        sys.path.extend(self.pathex)
204
205        # Set global variable to hold assembly binding redirects
206        CONF['binding_redirects'] = []
207
208        self.hiddenimports = hiddenimports or []
209        # Include modules detected when parsing options, like 'codecs' and encodings.
210        self.hiddenimports.extend(CONF['hiddenimports'])
211
212        self.hookspath = hookspath
213
214        # Custom runtime hook files that should be included and started before
215        # any existing PyInstaller runtime hooks.
216        self.custom_runtime_hooks = runtime_hooks or []
217
218        if cipher:
219            logger.info('Will encrypt Python bytecode with key: %s', cipher.key)
220            # Create a Python module which contains the decryption key which will
221            # be used at runtime by pyi_crypto.PyiBlockCipher.
222            pyi_crypto_key_path = os.path.join(CONF['workpath'], 'pyimod00_crypto_key.py')
223            with open_file(pyi_crypto_key_path, 'w', encoding='utf-8') as f:
224                f.write(text_type('# -*- coding: utf-8 -*-\n'
225                                  'key = %r\n' % cipher.key))
226            logger.info('Adding dependencies on pyi_crypto.py module')
227            self.hiddenimports.append(pyz_crypto.get_crypto_hiddenimports())
228
229        self.excludes = excludes or []
230        self.scripts = TOC()
231        self.pure = TOC()
232        self.binaries = TOC()
233        self.zipfiles = TOC()
234        self.zipped_data = TOC()
235        self.datas = TOC()
236        self.dependencies = TOC()
237        self.binding_redirects = CONF['binding_redirects'] = []
238        self.win_no_prefer_redirects = win_no_prefer_redirects
239        self.win_private_assemblies = win_private_assemblies
240        self._python_version = sys.version
241        self.noarchive = noarchive
242
243        self.__postinit__()
244
245
246        # TODO create function to convert datas/binaries from 'hook format' to TOC.
247        # Initialise 'binaries' and 'datas' with lists specified in .spec file.
248        if binaries:
249            logger.info("Appending 'binaries' from .spec")
250            for name, pth in format_binaries_and_datas(binaries, workingdir=spec_dir):
251                self.binaries.append((name, pth, 'BINARY'))
252        if datas:
253            logger.info("Appending 'datas' from .spec")
254            for name, pth in format_binaries_and_datas(datas, workingdir=spec_dir):
255                self.datas.append((name, pth, 'DATA'))
256
257    _GUTS = (# input parameters
258            ('inputs', _check_guts_eq),  # parameter `scripts`
259            ('pathex', _check_guts_eq),
260            ('hiddenimports', _check_guts_eq),
261            ('hookspath', _check_guts_eq),
262            ('excludes', _check_guts_eq),
263            ('custom_runtime_hooks', _check_guts_eq),
264            ('win_no_prefer_redirects', _check_guts_eq),
265            ('win_private_assemblies', _check_guts_eq),
266
267            #'cipher': no need to check as it is implied by an
268            # additional hidden import
269
270            #calculated/analysed values
271            ('_python_version', _check_guts_eq),
272            ('scripts', _check_guts_toc_mtime),
273            ('pure', lambda *args: _check_guts_toc_mtime(*args, **{'pyc': 1})),
274            ('binaries', _check_guts_toc_mtime),
275            ('zipfiles', _check_guts_toc_mtime),
276            ('zipped_data', None),  # TODO check this, too
277            ('datas', _check_guts_toc_mtime),
278            # TODO: Need to add "dependencies"?
279
280            # cached binding redirects - loaded into CONF for PYZ/COLLECT to find.
281            ('binding_redirects', None),
282            )
283
284    def _extend_pathex(self, spec_pathex, scripts):
285        """
286        Normalize additional paths where PyInstaller will look for modules and
287        add paths with scripts to the list of paths.
288
289        :param spec_pathex: Additional paths defined defined in .spec file.
290        :param scripts: Scripts to create executable from.
291        :return: list of updated paths
292        """
293        # Based on main supplied script - add top-level modules directory to PYTHONPATH.
294        # Sometimes the main app script is not top-level module but submodule like 'mymodule.mainscript.py'.
295        # In that case PyInstaller will not be able find modules in the directory containing 'mymodule'.
296        # Add this directory to PYTHONPATH so PyInstaller could find it.
297        pathex = []
298        # Add scripts paths first.
299        for script in scripts:
300            logger.debug('script: %s' % script)
301            script_toplevel_dir = get_path_to_toplevel_modules(script)
302            if script_toplevel_dir:
303                pathex.append(script_toplevel_dir)
304        # Append paths from .spec.
305        if spec_pathex is not None:
306            pathex.extend(spec_pathex)
307        # Normalize paths in pathex and make them absolute.
308        return [absnormpath(p) for p in pathex]
309
310    def _check_guts(self, data, last_build):
311        if Target._check_guts(self, data, last_build):
312            return True
313        for fnm in self.inputs:
314            if mtime(fnm) > last_build:
315                logger.info("Building because %s changed", fnm)
316                return True
317        # Now we know that none of the input parameters and none of
318        # the input files has changed. So take the values calculated
319        # resp. analysed in the last run and store them in `self`.
320        self.scripts = TOC(data['scripts'])
321        self.pure = TOC(data['pure'])
322        self.binaries = TOC(data['binaries'])
323        self.zipfiles = TOC(data['zipfiles'])
324        self.zipped_data = TOC(data['zipped_data'])
325        self.datas = TOC(data['datas'])
326
327        # Store previously found binding redirects in CONF for later use by PKG/COLLECT
328        from ..config import CONF
329        self.binding_redirects = CONF['binding_redirects'] = data['binding_redirects']
330
331        return False
332
333    def assemble(self):
334        """
335        This method is the MAIN method for finding all necessary files to be bundled.
336        """
337        from ..config import CONF
338
339        # Either instantiate a ModuleGraph object or for tests reuse
340        # dependency graph already created.
341        # Do not reuse dependency graph when option --exclude-module was used.
342        if 'tests_modgraph' in CONF and not self.excludes:
343            logger.info('Reusing basic module graph object.')
344            self.graph = CONF['tests_modgraph']
345        else:
346            for m in self.excludes:
347                logger.debug("Excluding module '%s'" % m)
348            self.graph = initialize_modgraph(
349                excludes=self.excludes, user_hook_dirs=self.hookspath)
350
351        # TODO Find a better place where to put 'base_library.zip' and when to created it.
352        # For Python 3 it is necessary to create file 'base_library.zip'
353        # containing core Python modules. In Python 3 some built-in modules
354        # are written in pure Python. base_library.zip is a way how to have
355        # those modules as "built-in".
356        if not is_py2:
357            libzip_filename = os.path.join(CONF['workpath'], 'base_library.zip')
358            create_py3_base_library(libzip_filename, graph=self.graph)
359            # Bundle base_library.zip as data file.
360            # Data format of TOC item:   ('relative_path_in_dist_dir', 'absolute_path_on_disk', 'DATA')
361            self.datas.append((os.path.basename(libzip_filename), libzip_filename, 'DATA'))
362
363        # Expand sys.path of module graph.
364        # The attribute is the set of paths to use for imports: sys.path,
365        # plus our loader, plus other paths from e.g. --path option).
366        self.graph.path = self.pathex + self.graph.path
367        self.graph.set_setuptools_nspackages()
368
369        # Analyze the script's hidden imports (named on the command line)
370        self.graph.add_hiddenimports(self.hiddenimports)
371
372
373        logger.info("running Analysis %s", self.tocbasename)
374        # Get paths to Python and, in Windows, the manifest.
375        python = sys.executable
376        if not is_win:
377            # Linux/MacOS: get a real, non-link path to the running Python executable.
378            while os.path.islink(python):
379                python = os.path.join(os.path.dirname(python), os.readlink(python))
380            depmanifest = None
381        else:
382            # Windows: Create a manifest to embed into built .exe, containing the same
383            # dependencies as python.exe.
384            depmanifest = winmanifest.Manifest(type_="win32", name=CONF['specnm'],
385                                               processorArchitecture=winmanifest.processor_architecture(),
386                                               version=(1, 0, 0, 0))
387            depmanifest.filename = os.path.join(CONF['workpath'],
388                                                CONF['specnm'] + ".exe.manifest")
389
390        # We record "binaries" separately from the modulegraph, as there
391        # is no way to record those dependencies in the graph. These include
392        # the python executable and any binaries added by hooks later.
393        # "binaries" are not the same as "extensions" which are .so or .dylib
394        # that are found and recorded as extension nodes in the graph.
395        # Reset seen variable before running bindepend. We use bindepend only for
396        # the python executable.
397        bindepend.seen.clear()
398
399        # Add binary and assembly dependencies of Python.exe.
400        # This also ensures that its assembly depencies under Windows get added to the
401        # built .exe's manifest. Python 2.7 extension modules have no assembly
402        # dependencies, and rely on the app-global dependencies set by the .exe.
403        self.binaries.extend(bindepend.Dependencies([('', python, '')],
404                                                    manifest=depmanifest,
405                                                    redirects=self.binding_redirects)[1:])
406        if is_win:
407            depmanifest.writeprettyxml()
408
409
410        #FIXME: For simplicity, move the following hook caching into a new
411        #PyiModuleGraph.cache_module_hooks() method and have the current
412        #"PyiModuleGraph" instance own the current "ModuleHookCache" instance.
413
414        ### Hook cache.
415        logger.info('Caching module hooks...')
416
417        # List of all directories containing hook scripts. Default hooks are
418        # listed before and hence take precedence over custom hooks.
419        module_hook_dirs = [get_importhooks_dir()]
420        if self.hookspath:
421            module_hook_dirs.extend(self.hookspath)
422
423        # Hook cache prepopulated with these lazy loadable hook scripts.
424        module_hook_cache = ModuleHookCache(
425            module_graph=self.graph, hook_dirs=module_hook_dirs)
426
427
428        ### Module graph.
429        #
430        # Construct the module graph of import relationships between modules
431        # required by this user's application. For each entry point (top-level
432        # user-defined Python script), all imports originating from this entry
433        # point are recursively parsed into a subgraph of the module graph. This
434        # subgraph is then connected to this graph's root node, ensuring
435        # imported module nodes will be reachable from the root node -- which is
436        # is (arbitrarily) chosen to be the first entry point's node.
437
438        # List to hold graph nodes of scripts and runtime hooks in use order.
439        priority_scripts = []
440
441        # Assume that if the script does not exist, Modulegraph will raise error.
442        # Save the graph nodes of each in sequence.
443        for script in self.inputs:
444            logger.info("Analyzing %s", script)
445            priority_scripts.append(self.graph.run_script(script))
446
447
448        ### Post-graph hooks.
449        #
450        # Run post-graph hooks for all modules imported by this user's
451        # application. For each iteration of the infinite "while" loop below:
452        #
453        # 1. All hook() functions defined in cached hooks for imported modules
454        #    are called. This may result in new modules being imported (e.g., as
455        #    hidden imports) that were ignored earlier in the current iteration:
456        #    if this is the case, all hook() functions defined in cached hooks
457        #    for these modules will be called by the next iteration.
458        # 2. All cached hooks whose hook() functions were called are removed
459        #    from this cache. If this cache is empty, no hook() functions will
460        #    be called by the next iteration and this loop will be terminated.
461        # 3. If no hook() functions were called, this loop is terminated.
462        logger.info('Loading module hooks...')
463
464        # Cache of all external dependencies (e.g., binaries, datas) listed in
465        # hook scripts for imported modules.
466        additional_files_cache = AdditionalFilesCache()
467
468        #FIXME: For orthogonality, move the following "while" loop into a new
469        #PyiModuleGraph.post_graph_hooks() method. The "PyiModuleGraph" class
470        #already handles all other hook types. Moreover, the graph node
471        #retrieval and type checking performed below are low-level operations
472        #best isolated into the "PyiModuleGraph" class itself.
473
474        # For each imported module, run this module's post-graph hooks if any.
475        while True:
476            # Set of the names of all imported modules whose post-graph hooks
477            # are run by this iteration, preventing the next iteration from re-
478            # running these hooks. If still empty at the end of this iteration,
479            # no post-graph hooks were run; thus, this loop will be terminated.
480            hooked_module_names = set()
481
482            # For each remaining hookable module and corresponding hooks...
483            for module_name, module_hooks in module_hook_cache.items():
484                # Graph node for this module if imported or "None" otherwise.
485                module_node = self.graph.findNode(
486                    module_name, create_nspkg=False)
487
488                # If this module has not been imported, temporarily ignore it.
489                # This module is retained in the cache, as a subsequently run
490                # post-graph hook could import this module as a hidden import.
491                if module_node is None:
492                    continue
493
494                # If this module is unimportable, permanently ignore it.
495                if type(module_node).__name__ not in VALID_MODULE_TYPES:
496                    hooked_module_names.add(module_name)
497                    continue
498
499                # For each hook script for this module...
500                for module_hook in module_hooks:
501                    # Run this script's post-graph hook if any.
502                    module_hook.post_graph()
503
504                    # Cache all external dependencies listed by this script
505                    # after running this hook, which could add dependencies.
506                    additional_files_cache.add(
507                        module_name,
508                        module_hook.binaries,
509                        module_hook.datas)
510
511                # Prevent this module's hooks from being run again.
512                hooked_module_names.add(module_name)
513
514            # Prevent all post-graph hooks run above from being run again by the
515            # next iteration.
516            module_hook_cache.remove_modules(*hooked_module_names)
517
518            # If no post-graph hooks were run, terminate iteration.
519            if not hooked_module_names:
520                break
521
522        # Update 'binaries' TOC and 'datas' TOC.
523        deps_proc = DependencyProcessor(self.graph, additional_files_cache)
524        self.binaries.extend(deps_proc.make_binaries_toc())
525        self.datas.extend(deps_proc.make_datas_toc())
526        self.zipped_data.extend(deps_proc.make_zipped_data_toc())
527        # Note: zipped eggs are collected below
528
529
530        ### Look for dlls that are imported by Python 'ctypes' module.
531        # First get code objects of all modules that import 'ctypes'.
532        logger.info('Looking for ctypes DLLs')
533        ctypes_code_objs = self.graph.get_co_using_ctypes()  # dict like:  {'module1': code_obj, 'module2': code_obj}
534        for name, co in ctypes_code_objs.items():
535            # Get dlls that might be needed by ctypes.
536            logger.debug('Scanning %s for shared libraries or dlls', name)
537            ctypes_binaries = scan_code_for_ctypes(co)
538            self.binaries.extend(set(ctypes_binaries))
539
540        # Analyze run-time hooks.
541        # Run-time hooks has to be executed before user scripts. Add them
542        # to the beginning of 'priority_scripts'.
543        priority_scripts = self.graph.analyze_runtime_hooks(self.custom_runtime_hooks) + priority_scripts
544
545        # 'priority_scripts' is now a list of the graph nodes of custom runtime
546        # hooks, then regular runtime hooks, then the PyI loader scripts.
547        # Further on, we will make sure they end up at the front of self.scripts
548
549        ### Extract the nodes of the graph as TOCs for further processing.
550
551        # Initialize the scripts list with priority scripts in the proper order.
552        self.scripts = self.graph.nodes_to_toc(priority_scripts)
553
554        # Extend the binaries list with all the Extensions modulegraph has found.
555        self.binaries = self.graph.make_binaries_toc(self.binaries)
556        # Fill the "pure" list with pure Python modules.
557        assert len(self.pure) == 0
558        self.pure = self.graph.make_pure_toc()
559        # And get references to module code objects constructed by ModuleGraph
560        # to avoid writing .pyc/pyo files to hdd.
561        self.pure._code_cache = self.graph.get_code_objects()
562
563        # Add remaining binary dependencies - analyze Python C-extensions and what
564        # DLLs they depend on.
565        logger.info('Looking for dynamic libraries')
566        self.binaries.extend(bindepend.Dependencies(self.binaries,
567                                                    redirects=self.binding_redirects))
568
569        ### Include zipped Python eggs.
570        logger.info('Looking for eggs')
571        self.zipfiles.extend(deps_proc.make_zipfiles_toc())
572
573        # Verify that Python dynamic library can be found.
574        # Without dynamic Python library PyInstaller cannot continue.
575        self._check_python_library(self.binaries)
576
577        if is_win:
578            # Remove duplicate redirects
579            self.binding_redirects[:] = list(set(self.binding_redirects))
580            logger.info("Found binding redirects: \n%s", self.binding_redirects)
581
582        # Place Python source in data files for the noarchive case.
583        if self.noarchive:
584            # Create a new TOC of ``(dest path for .pyc, source for .py, type)``.
585            new_toc = TOC()
586            for name, path, typecode in self.pure:
587                assert typecode == 'PYMODULE'
588                # Transform a python module name into a file name.
589                name = name.replace('.', os.sep)
590                # Special case: modules have an implied filename to add.
591                if os.path.splitext(os.path.basename(path))[0] == '__init__':
592                    name += os.sep + '__init__'
593                # Append the extension for the compiled result.
594                name += '.py' + ('o' if sys.flags.optimize else 'c')
595                new_toc.append((name, path, typecode))
596            # Put the result of byte-compiling this TOC in datas. Mark all entries as data.
597            for name, path, typecode in compile_py_files(new_toc, CONF['workpath']):
598                self.datas.append((name, path, 'DATA'))
599            # Store no source in the archive.
600            self.pure = TOC()
601
602        # Write warnings about missing modules.
603        self._write_warnings()
604        # Write debug information about hte graph
605        self._write_graph_debug()
606
607    def _write_warnings(self):
608        """
609        Write warnings about missing modules. Get them from the graph
610        and use the graph to figure out who tried to import them.
611        """
612        def dependency_description(name, depInfo):
613            if not depInfo or depInfo == 'direct':
614                imptype = 0
615            else:
616                imptype = (depInfo.conditional
617                           + 2 * depInfo.function
618                           + 4 * depInfo.tryexcept)
619            return '%s (%s)' % (name, IMPORT_TYPES[imptype])
620
621        from ..config import CONF
622        miss_toc = self.graph.make_missing_toc()
623        with open_file(CONF['warnfile'], 'w', encoding='utf-8') as wf:
624            wf_unicode = unicode_writer(wf)
625            wf_unicode.write(WARNFILE_HEADER)
626            for (n, p, status) in miss_toc:
627                importers = self.graph.get_importers(n)
628                print(status, 'module named', n, '- imported by',
629                      ', '.join(dependency_description(name, data)
630                                for name, data in importers),
631                      file=wf_unicode)
632        logger.info("Warnings written to %s", CONF['warnfile'])
633
634    def _write_graph_debug(self):
635        """Write a xref (in html) and with `--log-level DEBUG` a dot-drawing
636        of the graph.
637        """
638        from ..config import CONF
639        with open_file(CONF['xref-file'], 'w', encoding='utf-8') as fh:
640            self.graph.create_xref(unicode_writer(fh))
641            logger.info("Graph cross-reference written to %s", CONF['xref-file'])
642        if logger.getEffectiveLevel() > logging.DEBUG:
643            return
644        # The `DOT language's <https://www.graphviz.org/doc/info/lang.html>`_
645        # default character encoding (see the end of the linked page) is UTF-8.
646        with open_file(CONF['dot-file'], 'w', encoding='utf-8') as fh:
647            self.graph.graphreport(unicode_writer(fh))
648            logger.info("Graph drawing written to %s", CONF['dot-file'])
649
650
651    def _check_python_library(self, binaries):
652        """
653        Verify presence of the Python dynamic library in the binary dependencies.
654        Python library is an essential piece that has to be always included.
655        """
656        # First check that libpython is in resolved binary dependencies.
657        for (nm, filename, typ) in binaries:
658            if typ == 'BINARY' and nm in PYDYLIB_NAMES:
659                # Just print its filename and return.
660                logger.info('Using Python library %s', filename)
661                # Checking was successful - end of function.
662                return
663
664        # Python lib not in dependencies - try to find it.
665        logger.info('Python library not in binary dependencies. Doing additional searching...')
666        python_lib = bindepend.get_python_library_path()
667        if python_lib:
668            logger.debug('Adding Python library to binary dependencies')
669            binaries.append((os.path.basename(python_lib), python_lib, 'BINARY'))
670            logger.info('Using Python library %s', python_lib)
671        else:
672            msg = """Python library not found: %s
673This would mean your Python installation doesn't come with proper library files.
674This usually happens by missing development package, or unsuitable build parameters of Python installation.
675
676* On Debian/Ubuntu, you would need to install Python development packages
677  * apt-get install python3-dev
678  * apt-get install python-dev
679* If you're building Python by yourself, please rebuild your Python with `--enable-shared` (or, `--enable-framework` on Darwin)
680""" % (", ".join(PYDYLIB_NAMES),)
681            raise IOError(msg)
682
683
684class ExecutableBuilder(object):
685    """
686    Class that constructs the executable.
687    """
688    # TODO wrap the 'main' and 'build' function into this class.
689
690
691def build(spec, distpath, workpath, clean_build):
692    """
693    Build the executable according to the created SPEC file.
694    """
695    from ..config import CONF
696
697    # For combatibility with Python < 2.7.9 we can not use `lambda`,
698    # but need to declare _old_api_error as beeing global, see issue #1408
699    def TkPKG(*args, **kwargs):
700        global _old_api_error
701        _old_api_error('TkPKG')
702
703    def TkTree(*args, **kwargs):
704        global _old_api_error
705        _old_api_error('TkTree')
706
707    # Ensure starting tilde and environment variables get expanded in distpath / workpath.
708    # '~/path/abc', '${env_var_name}/path/abc/def'
709    distpath = compat.expand_path(distpath)
710    workpath = compat.expand_path(workpath)
711    CONF['spec'] = compat.expand_path(spec)
712
713    CONF['specpath'], CONF['specnm'] = os.path.split(spec)
714    CONF['specnm'] = os.path.splitext(CONF['specnm'])[0]
715
716    # Add 'specname' to workpath and distpath if they point to PyInstaller homepath.
717    if os.path.dirname(distpath) == HOMEPATH:
718        distpath = os.path.join(HOMEPATH, CONF['specnm'], os.path.basename(distpath))
719    CONF['distpath'] = distpath
720    if os.path.dirname(workpath) == HOMEPATH:
721        workpath = os.path.join(HOMEPATH, CONF['specnm'], os.path.basename(workpath), CONF['specnm'])
722    else:
723        workpath = os.path.join(workpath, CONF['specnm'])
724
725    CONF['warnfile'] = os.path.join(workpath, 'warn-%s.txt' % CONF['specnm'])
726    CONF['dot-file'] = os.path.join(workpath, 'graph-%s.dot' % CONF['specnm'])
727    CONF['xref-file'] = os.path.join(workpath, 'xref-%s.html' % CONF['specnm'])
728
729    # Clean PyInstaller cache (CONF['cachedir']) and temporary files (workpath)
730    # to be able start a clean build.
731    if clean_build:
732        logger.info('Removing temporary files and cleaning cache in %s', CONF['cachedir'])
733        for pth in (CONF['cachedir'], workpath):
734            if os.path.exists(pth):
735                # Remove all files in 'pth'.
736                for f in glob.glob(pth + '/*'):
737                    # Remove dirs recursively.
738                    if os.path.isdir(f):
739                        shutil.rmtree(f)
740                    else:
741                        os.remove(f)
742
743    # Create DISTPATH and workpath if they does not exist.
744    for pth in (CONF['distpath'], workpath):
745        if not os.path.exists(pth):
746            os.makedirs(pth)
747
748    # Construct NAMESPACE for running the Python code from .SPEC file.
749    # NOTE: Passing NAMESPACE allows to avoid having global variables in this
750    #       module and makes isolated environment for running tests.
751    # NOTE: Defining NAMESPACE allows to map any class to a apecific name for .SPEC.
752    # FIXME: Some symbols might be missing. Add them if there are some failures.
753    # TODO: What from this .spec API is deprecated and could be removed?
754    spec_namespace = {
755        # Set of global variables that can be used while processing .spec file.
756        # Some of them act as configuration options.
757        'DISTPATH': CONF['distpath'],
758        'HOMEPATH': HOMEPATH,
759        'SPEC': CONF['spec'],
760        'specnm': CONF['specnm'],
761        'SPECPATH': CONF['specpath'],
762        'WARNFILE': CONF['warnfile'],
763        'workpath': workpath,
764        # PyInstaller classes for .spec.
765        'TOC': TOC,
766        'Analysis': Analysis,
767        'BUNDLE': BUNDLE,
768        'COLLECT': COLLECT,
769        'EXE': EXE,
770        'MERGE': MERGE,
771        'PYZ': PYZ,
772        'Tree': Tree,
773        # Old classes for .spec - raise Exception for user.
774        'TkPKG': TkPKG,
775        'TkTree': TkTree,
776        # Python modules available for .spec.
777        'os': os,
778        'pyi_crypto': pyz_crypto,
779    }
780
781    # Set up module PyInstaller.config for passing some arguments to 'exec'
782    # function.
783    from ..config import CONF
784    CONF['workpath'] = workpath
785
786    # Execute the specfile. Read it as a binary file...
787    with open(spec, 'rU' if is_py2 else 'rb') as f:
788        # ... then let Python determine the encoding, since ``compile`` accepts
789        # byte strings.
790        code = compile(f.read(), spec, 'exec')
791    exec(code, spec_namespace)
792
793def __add_options(parser):
794    parser.add_argument("--distpath", metavar="DIR",
795                        default=DEFAULT_DISTPATH,
796                        help=('Where to put the bundled app (default: %s)' %
797                              os.path.join(os.curdir, 'dist')))
798    parser.add_argument('--workpath', default=DEFAULT_WORKPATH,
799                        help=('Where to put all the temporary work files, '
800                              '.log, .pyz and etc. (default: %s)' %
801                              os.path.join(os.curdir, 'build')))
802    parser.add_argument('-y', '--noconfirm',
803                        action="store_true", default=False,
804                        help='Replace output directory (default: %s) without '
805                        'asking for confirmation' % os.path.join('SPECPATH', 'dist', 'SPECNAME'))
806    parser.add_argument('--upx-dir', default=None,
807                        help='Path to UPX utility (default: search the execution path)')
808    parser.add_argument("-a", "--ascii", action="store_true",
809                        help="Do not include unicode encoding support "
810                        "(default: included if available)")
811    parser.add_argument('--clean', dest='clean_build', action='store_true',
812                        default=False,
813                        help='Clean PyInstaller cache and remove temporary '
814                        'files before building.')
815
816
817def main(pyi_config, specfile, noconfirm, ascii=False, **kw):
818
819    from ..config import CONF
820    CONF['noconfirm'] = noconfirm
821
822    # Some modules are included if they are detected at build-time or
823    # if a command-line argument is specified. (e.g. --ascii)
824    if CONF.get('hiddenimports') is None:
825        CONF['hiddenimports'] = []
826    # Test unicode support.
827    if not ascii:
828        CONF['hiddenimports'].extend(get_unicode_modules())
829
830    # FIXME: this should be a global import, but can't due to recursive imports
831    # If configuration dict is supplied - skip configuration step.
832    if pyi_config is None:
833        import PyInstaller.configure as configure
834        CONF.update(configure.get_config(kw.get('upx_dir')))
835    else:
836        CONF.update(pyi_config)
837
838    if CONF['hasUPX']:
839        setupUPXFlags()
840
841    CONF['ui_admin'] = kw.get('ui_admin', False)
842    CONF['ui_access'] = kw.get('ui_uiaccess', False)
843
844    build(specfile, kw.get('distpath'), kw.get('workpath'), kw.get('clean_build'))
845