1# coding: utf-8
2"""JupyterLab command handler"""
3
4# Copyright (c) Jupyter Development Team.
5# Distributed under the terms of the Modified BSD License.
6import contextlib
7import errno
8import hashlib
9import itertools
10import json
11import logging
12import os
13import os.path as osp
14import re
15import shutil
16import stat
17import site
18import subprocess
19import sys
20import tarfile
21import warnings
22from copy import deepcopy
23from glob import glob
24from pathlib import Path
25from tempfile import TemporaryDirectory
26from threading import Event
27from urllib.error import URLError
28from urllib.request import Request, quote, urljoin, urlopen
29
30from jupyter_core.paths import jupyter_config_path
31from jupyter_server.extension.serverextension import GREEN_ENABLED, GREEN_OK, RED_DISABLED, RED_X
32from jupyterlab_server.config import (LabConfig, get_federated_extensions,
33                                      get_package_url, get_page_config,
34                                      get_static_page_config,
35                                      write_page_config)
36from jupyterlab_server.process import Process, WatchHelper, list2cmdline, which
37from packaging.version import Version
38from traitlets import Bool, Dict, HasTraits, Instance, List, Unicode, default
39
40from jupyterlab.coreconfig import CoreConfig
41from jupyterlab.jlpmapp import HERE, YARN_PATH
42from jupyterlab.semver import Range, gt, gte, lt, lte, make_semver
43from jupyterlab._version import __version__
44
45# The regex for expecting the webpack output.
46WEBPACK_EXPECT = re.compile(r'.*theme-light-extension/style/theme.css')
47
48
49# The repo root directory
50REPO_ROOT = osp.abspath(osp.join(HERE, '..'))
51
52
53# The dev mode directory.
54DEV_DIR = osp.join(REPO_ROOT, 'dev_mode')
55
56
57# If we are pinning the package, rename it `pin@<alias>`
58PIN_PREFIX = 'pin@'
59
60
61# Default Yarn registry used in default yarn.lock
62YARN_DEFAULT_REGISTRY = 'https://registry.yarnpkg.com'
63
64
65class ProgressProcess(Process):
66
67    def __init__(self, cmd, logger=None, cwd=None, kill_event=None,
68                 env=None):
69        """Start a subprocess that can be run asynchronously.
70
71        Parameters
72        ----------
73        cmd: list
74            The command to run.
75        logger: :class:`~logger.Logger`, optional
76            The logger instance.
77        cwd: string, optional
78            The cwd of the process.
79        kill_event: :class:`~threading.Event`, optional
80            An event used to kill the process operation.
81        env: dict, optional
82            The environment for the process.
83        """
84        if not isinstance(cmd, (list, tuple)):
85            raise ValueError('Command must be given as a list')
86
87        if kill_event and kill_event.is_set():
88            raise ValueError('Process aborted')
89
90        self.logger = _ensure_logger(logger)
91        self._last_line = ''
92        self.cmd = cmd
93        self.logger.debug('> ' + list2cmdline(cmd))
94
95        self.proc = self._create_process(
96            cwd=cwd,
97            env=env,
98            stderr=subprocess.STDOUT,
99            stdout=subprocess.PIPE,
100            universal_newlines=True,
101            encoding='utf-8',
102        )
103        self._kill_event = kill_event or Event()
104
105        Process._procs.add(self)
106
107    def wait(self):
108        cache = []
109        proc = self.proc
110        kill_event = self._kill_event
111        spinner = itertools.cycle(['-', '\\', '|', '/'])
112        while proc.poll() is None:
113            sys.stdout.write(next(spinner))   # write the next character
114            sys.stdout.flush()                # flush stdout buffer (actual character display)
115            sys.stdout.write('\b')
116            if kill_event.is_set():
117                self.terminate()
118                raise ValueError('Process was aborted')
119            try:
120                out, _ = proc.communicate(timeout=.1)
121                cache.append(out)
122            except subprocess.TimeoutExpired:
123                continue
124        self.logger.debug('\n'.join(cache))
125        sys.stdout.flush()
126        return self.terminate()
127
128
129def pjoin(*args):
130    """Join paths to create a real path.
131    """
132    return osp.abspath(osp.join(*args))
133
134
135def get_user_settings_dir():
136    """Get the configured JupyterLab user settings directory.
137    """
138    settings_dir = os.environ.get('JUPYTERLAB_SETTINGS_DIR')
139    settings_dir = settings_dir or pjoin(
140        jupyter_config_path()[0], 'lab', 'user-settings'
141    )
142    return osp.abspath(settings_dir)
143
144
145def get_workspaces_dir():
146    """Get the configured JupyterLab workspaces directory.
147    """
148    workspaces_dir = os.environ.get('JUPYTERLAB_WORKSPACES_DIR')
149    workspaces_dir = workspaces_dir or pjoin(
150        jupyter_config_path()[0], 'lab', 'workspaces'
151    )
152    return osp.abspath(workspaces_dir)
153
154
155def get_app_dir():
156    """Get the configured JupyterLab app directory.
157    """
158    # Default to the override environment variable.
159    if os.environ.get('JUPYTERLAB_DIR'):
160        # We must resolve the path to get the canonical case of the path for
161        # case-sensitive systems
162        return str(Path(os.environ['JUPYTERLAB_DIR']).resolve())
163
164    # Use the default locations for data_files.
165    app_dir = pjoin(sys.prefix, 'share', 'jupyter', 'lab')
166
167    # Check for a user level install.
168    # Ensure that USER_BASE is defined
169    if hasattr(site, 'getuserbase'):
170        site.getuserbase()
171    userbase = getattr(site, 'USER_BASE', None)
172    if HERE.startswith(userbase) and not app_dir.startswith(userbase):
173        app_dir = pjoin(userbase, 'share', 'jupyter', 'lab')
174
175    # Check for a system install in '/usr/local/share'.
176    elif (sys.prefix.startswith('/usr') and not
177          osp.exists(app_dir) and
178          osp.exists('/usr/local/share/jupyter/lab')):
179        app_dir = '/usr/local/share/jupyter/lab'
180
181    # We must resolve the path to get the canonical case of the path for
182    # case-sensitive systems
183    return str(Path(app_dir).resolve())
184
185
186def dedupe_yarn(path, logger=None):
187    """ `yarn-deduplicate` with the `fewer` strategy to minimize total
188        packages installed in a given staging directory
189
190        This means a extension (or dependency) _could_ cause a downgrade of an
191        version expected at publication time, but core should aggressively set
192        pins above, for example, known-bad versions
193    """
194    had_dupes = ProgressProcess(
195        ['node', YARN_PATH, 'yarn-deduplicate', '-s', 'fewer', '--fail'],
196        cwd=path, logger=logger
197    ).wait() != 0
198
199    if had_dupes:
200        yarn_proc = ProgressProcess(['node', YARN_PATH], cwd=path, logger=logger)
201        yarn_proc.wait()
202
203
204def ensure_node_modules(cwd, logger=None):
205    """Ensure that node_modules is up to date.
206
207    Returns true if the node_modules was updated.
208    """
209    logger = _ensure_logger(logger)
210    yarn_proc = ProgressProcess(['node', YARN_PATH, 'check', '--verify-tree'], cwd=cwd, logger=logger)
211    ret = yarn_proc.wait()
212
213    # Update node_modules if needed.
214    if ret != 0:
215        yarn_proc = ProgressProcess(['node', YARN_PATH], cwd=cwd, logger=logger)
216        yarn_proc.wait()
217        dedupe_yarn(REPO_ROOT, logger)
218
219    return ret != 0
220
221
222def ensure_dev(logger=None):
223    """Ensure that the dev assets are available.
224    """
225    logger = _ensure_logger(logger)
226    target = pjoin(DEV_DIR, 'static')
227
228    # Determine whether to build.
229    if ensure_node_modules(REPO_ROOT, logger) or not osp.exists(target):
230        yarn_proc = ProgressProcess(['node', YARN_PATH, 'build'], cwd=REPO_ROOT,
231                            logger=logger)
232        yarn_proc.wait()
233
234
235def ensure_core(logger=None):
236    """Ensure that the core assets are available.
237    """
238    staging = pjoin(HERE, 'staging')
239    logger = _ensure_logger(logger)
240
241    # Determine whether to build.
242    target = pjoin(HERE, 'static', 'index.html')
243    if not osp.exists(target):
244        ensure_node_modules(staging, logger)
245        yarn_proc = ProgressProcess(['node', YARN_PATH, 'build'], cwd=staging,
246                            logger=logger)
247        yarn_proc.wait()
248
249
250def ensure_app(app_dir):
251    """Ensure that an application directory is available.
252
253    If it does not exist, return a list of messages to prompt the user.
254    """
255    if osp.exists(pjoin(app_dir, 'static', 'index.html')):
256        return
257
258    msgs = ['JupyterLab application assets not found in "%s"' % app_dir,
259            'Please run `jupyter lab build` or use a different app directory']
260    return msgs
261
262
263def watch_packages(logger=None):
264    """Run watch mode for the source packages.
265
266    Parameters
267    ----------
268    logger: :class:`~logger.Logger`, optional
269        The logger instance.
270
271    Returns
272    -------
273    A list of `WatchHelper` objects.
274    """
275    logger = _ensure_logger(logger)
276    ensure_node_modules(REPO_ROOT, logger)
277
278    ts_dir = osp.abspath(osp.join(REPO_ROOT, 'packages', 'metapackage'))
279
280    # Run typescript watch and wait for the string indicating it is done.
281    ts_regex = r'.* Found 0 errors\. Watching for file changes\.'
282    ts_proc = WatchHelper(['node', YARN_PATH, 'run', 'watch'],
283                          cwd=ts_dir, logger=logger, startup_regex=ts_regex)
284
285    return [ts_proc]
286
287
288def watch_dev(logger=None):
289    """Run watch mode in a given directory.
290
291    Parameters
292    ----------
293    logger: :class:`~logger.Logger`, optional
294        The logger instance.
295
296    Returns
297    -------
298    A list of `WatchHelper` objects.
299    """
300    logger = _ensure_logger(logger)
301
302    package_procs = watch_packages(logger)
303
304    # Run webpack watch and wait for compilation.
305    wp_proc = WatchHelper(['node', YARN_PATH, 'run', 'watch'],
306                          cwd=DEV_DIR, logger=logger,
307                          startup_regex=WEBPACK_EXPECT)
308
309    return package_procs + [wp_proc]
310
311
312class AppOptions(HasTraits):
313    """Options object for build system"""
314
315    def __init__(self, logger=None, core_config=None, **kwargs):
316        if core_config is not None:
317            kwargs['core_config'] = core_config
318        if logger is not None:
319            kwargs['logger'] = logger
320
321        # use the default if app_dir is empty
322        if 'app_dir' in kwargs and not kwargs['app_dir']:
323            kwargs.pop('app_dir')
324
325        super(AppOptions, self).__init__(**kwargs)
326
327    app_dir = Unicode(help='The application directory')
328
329    use_sys_dir = Bool(
330        True,
331        help=('Whether to shadow the default app_dir if that is set to a '
332             'non-default value'))
333
334    logger = Instance(logging.Logger, help='The logger to use')
335
336    core_config = Instance(CoreConfig, help='Configuration for core data')
337
338    kill_event = Instance(Event, args=(), help='Event for aborting call')
339
340    labextensions_path = List(Unicode(), help='The paths to look in for prebuilt JupyterLab extensions')
341
342    registry = Unicode(help="NPM packages registry URL")
343
344    splice_source = Bool(False, help="Splice source packages into app directory.")
345
346    @default('logger')
347    def _default_logger(self):
348        return logging.getLogger('jupyterlab')
349
350    # These defaults need to be federated to pick up
351    # any changes to env vars:
352    @default('app_dir')
353    def _default_app_dir(self):
354        return get_app_dir()
355
356    @default('core_config')
357    def _default_core_config(self):
358        return CoreConfig()
359
360    @default('registry')
361    def _default_registry(self):
362        config = _yarn_config(self.logger)["yarn config"]
363        return config.get("registry", YARN_DEFAULT_REGISTRY)
364
365
366def _ensure_options(options):
367    """Helper to use deprecated kwargs for AppOption"""
368    if options is None:
369        return AppOptions()
370    elif issubclass(options.__class__, AppOptions):
371        return options
372    else:
373        return AppOptions(**options)
374
375
376def watch(app_options=None):
377    """Watch the application.
378
379    Parameters
380    ----------
381    app_options: :class:`AppOptions`, optional
382        The application options.
383
384    Returns
385    -------
386    A list of processes to run asynchronously.
387    """
388    app_options = _ensure_options(app_options)
389    _node_check(app_options.logger)
390    handler = _AppHandler(app_options)
391
392    if app_options.splice_source:
393        package_procs = watch_packages(app_options.logger)
394    else:
395        package_procs = []
396
397    return package_procs + handler.watch()
398
399
400
401def install_extension(extension, app_options=None, pin=None):
402    """Install an extension package into JupyterLab.
403
404    The extension is first validated.
405
406    Returns `True` if a rebuild is recommended, `False` otherwise.
407    """
408    app_options = _ensure_options(app_options)
409    _node_check(app_options.logger)
410    handler = _AppHandler(app_options)
411    return handler.install_extension(extension, pin=pin)
412
413
414def uninstall_extension(name=None, app_options=None, all_=False):
415    """Uninstall an extension by name or path.
416
417    Returns `True` if a rebuild is recommended, `False` otherwise.
418    """
419    app_options = _ensure_options(app_options)
420    _node_check(app_options.logger)
421    handler = _AppHandler(app_options)
422    if all_ is True:
423        return handler.uninstall_all_extensions()
424    return handler.uninstall_extension(name)
425
426
427def update_extension(name=None, all_=False, app_dir=None, app_options=None):
428    """Update an extension by name, or all extensions.
429    Either `name` must be given as a string, or `all_` must be `True`.
430    If `all_` is `True`, the value of `name` is ignored.
431    Returns `True` if a rebuild is recommended, `False` otherwise.
432    """
433    app_options = _ensure_options(app_options)
434    _node_check(app_options.logger)
435    handler = _AppHandler(app_options)
436    if all_ is True:
437        return handler.update_all_extensions()
438    return handler.update_extension(name)
439
440
441def clean(app_options=None):
442    """Clean the JupyterLab application directory."""
443    app_options = _ensure_options(app_options)
444    handler = _AppHandler(app_options)
445    logger = app_options.logger
446    app_dir = app_options.app_dir
447
448    logger.info('Cleaning %s...', app_dir)
449    if app_dir == pjoin(HERE, 'dev'):
450        raise ValueError('Cannot clean the dev app')
451    if app_dir == pjoin(HERE, 'core'):
452        raise ValueError('Cannot clean the core app')
453
454    if getattr(app_options, 'all', False):
455        logger.info('Removing everything in %s...', app_dir)
456        _rmtree_star(app_dir, logger)
457    else:
458        possibleTargets = ['extensions', 'settings', 'staging', 'static']
459        targets = [t for t in possibleTargets if getattr(app_options, t)]
460
461        for name in targets:
462            target = pjoin(app_dir, name)
463            if osp.exists(target):
464                logger.info('Removing %s...', name)
465                _rmtree(target, logger)
466            else:
467                logger.info('%s not present, skipping...', name)
468
469    logger.info('Success!')
470    if getattr(app_options, 'all', False) or getattr(app_options, 'extensions', False):
471        logger.info('All of your extensions have been removed, and will need to be reinstalled')
472
473
474def build(name=None, version=None, static_url=None,
475          kill_event=None,
476          clean_staging=False, app_options=None, production=True, minimize=True):
477    """Build the JupyterLab application.
478    """
479    app_options = _ensure_options(app_options)
480    _node_check(app_options.logger)
481    handler = _AppHandler(app_options)
482    return handler.build(name=name, version=version, static_url=static_url,
483                         production=production, minimize=minimize, clean_staging=clean_staging)
484
485
486def get_app_info(app_options=None):
487    """Get a dictionary of information about the app.
488    """
489    handler = _AppHandler(app_options)
490    handler._ensure_disabled_info()
491    return handler.info
492
493
494def enable_extension(extension, app_options=None, level='sys_prefix'):
495    """Enable a JupyterLab extension.
496
497    Returns `True` if a rebuild is recommended, `False` otherwise.
498    """
499    handler = _AppHandler(app_options)
500    return handler.toggle_extension(extension, False, level=level)
501
502
503def disable_extension(extension, app_options=None, level='sys_prefix'):
504    """Disable a JupyterLab package.
505
506    Returns `True` if a rebuild is recommended, `False` otherwise.
507    """
508    handler = _AppHandler(app_options)
509    return handler.toggle_extension(extension, True, level=level)
510
511
512def check_extension(extension, installed=False, app_options=None):
513    """Check if a JupyterLab extension is enabled or disabled.
514    """
515    handler = _AppHandler(app_options)
516    return handler.check_extension(extension, installed)
517
518
519def build_check(app_options=None):
520    """Determine whether JupyterLab should be built.
521
522    Returns a list of messages.
523    """
524    app_options = _ensure_options(app_options)
525    _node_check(app_options.logger)
526    handler = _AppHandler(app_options)
527    return handler.build_check()
528
529
530def list_extensions(app_options=None):
531    """List the extensions.
532    """
533    handler = _AppHandler(app_options)
534    return handler.list_extensions()
535
536
537def link_package(path, app_options=None):
538    """Link a package against the JupyterLab build.
539
540    Returns `True` if a rebuild is recommended, `False` otherwise.
541    """
542    handler = _AppHandler(app_options)
543    return handler.link_package(path)
544
545
546def unlink_package(package, app_options=None):
547    """Unlink a package from JupyterLab by path or name.
548
549    Returns `True` if a rebuild is recommended, `False` otherwise.
550    """
551    handler = _AppHandler(app_options)
552    return handler.unlink_package(package)
553
554
555def get_app_version(app_options=None):
556    """Get the application version."""
557    handler = _AppHandler(app_options)
558    return handler.info['version']
559
560
561def get_latest_compatible_package_versions(names, app_options=None):
562    """Get the latest compatible version of a list of packages.
563    """
564    handler = _AppHandler(app_options)
565    return handler.latest_compatible_package_versions(names)
566
567
568def read_package(target):
569    """Read the package data in a given target tarball.
570    """
571    tar = tarfile.open(target, "r")
572    f = tar.extractfile('package/package.json')
573    data = json.loads(f.read().decode('utf8'))
574    data['jupyterlab_extracted_files'] = [
575        f.path[len('package/'):] for f in tar.getmembers()
576    ]
577    tar.close()
578    return data
579
580
581# ----------------------------------------------------------------------
582# Implementation details
583# ----------------------------------------------------------------------
584
585
586class _AppHandler(object):
587
588    def __init__(self, options):
589        """Create a new _AppHandler object
590        """
591        options = _ensure_options(options)
592        self._options = options
593        self.app_dir = options.app_dir
594        self.sys_dir = get_app_dir() if options.use_sys_dir else self.app_dir
595        self.logger = options.logger
596        # Make a deep copy of the core data so we don't influence the original copy
597        self.core_data = deepcopy(options.core_config._data)
598        self.labextensions_path = options.labextensions_path
599        self.kill_event = options.kill_event
600        self.registry = options.registry
601
602        # Do this last since it relies on other attributes
603        self.info = self._get_app_info()
604
605
606
607    def install_extension(self, extension, existing=None, pin=None):
608        """Install an extension package into JupyterLab.
609
610        The extension is first validated.
611
612        Returns `True` if a rebuild is recommended, `False` otherwise.
613        """
614        extension = _normalize_path(extension)
615        extensions = self.info['extensions']
616
617        # Check for a core extensions.
618        if extension in self.info['core_extensions']:
619            config = self._read_build_config()
620            uninstalled = config.get('uninstalled_core_extensions', [])
621            if extension in uninstalled:
622                self.logger.info('Installing core extension %s' % extension)
623                uninstalled.remove(extension)
624                config['uninstalled_core_extensions'] = uninstalled
625                self._write_build_config(config)
626                return True
627            return False
628
629        # Create the app dirs if needed.
630        self._ensure_app_dirs()
631
632        # Install the package using a temporary directory.
633        with TemporaryDirectory() as tempdir:
634            info = self._install_extension(extension, tempdir, pin=pin)
635
636        name = info['name']
637
638        # Local directories get name mangled and stored in metadata.
639        if info['is_dir']:
640            config = self._read_build_config()
641            local = config.setdefault('local_extensions', dict())
642            local[name] = info['source']
643            self._write_build_config(config)
644
645        # Remove an existing extension with the same name and different path
646        if name in extensions:
647            other = extensions[name]
648            if other['path'] != info['path'] and other['location'] == 'app':
649                os.remove(other['path'])
650
651        return True
652
653    def build(self, name=None, version=None, static_url=None,
654              clean_staging=False, production=True, minimize=True):
655        """Build the application.
656        """
657        if production is None:
658            production = not (self.info['linked_packages'] or self.info['local_extensions'])
659
660        if not production:
661            minimize = False
662
663        # If splicing, make sure the source packages are built
664        if self._options.splice_source:
665            ensure_node_modules(REPO_ROOT, logger=self.logger)
666            self._run(['node', YARN_PATH, 'build:packages'], cwd=REPO_ROOT)
667
668        info = ['production' if production else 'development']
669        if production:
670            info.append('minimized' if minimize else 'not minimized')
671        self.logger.info(f'Building jupyterlab assets ({", ".join(info)})')
672
673        # Set up the build directory.
674        app_dir = self.app_dir
675
676        self._populate_staging(
677            name=name, version=version, static_url=static_url,
678            clean=clean_staging
679        )
680
681        staging = pjoin(app_dir, 'staging')
682
683        # Make sure packages are installed.
684        ret = self._run(['node', YARN_PATH, 'install', '--non-interactive'], cwd=staging)
685        if ret != 0:
686            msg = 'npm dependencies failed to install'
687            self.logger.debug(msg)
688            raise RuntimeError(msg)
689
690        # Build the app.
691        dedupe_yarn(staging, self.logger)
692        command = f'build:{"prod" if production else "dev"}{":minimize" if minimize else ""}'
693        ret = self._run(['node', YARN_PATH, 'run', command], cwd=staging)
694        if ret != 0:
695            msg = 'JupyterLab failed to build'
696            self.logger.debug(msg)
697            raise RuntimeError(msg)
698
699    def watch(self):
700        """Start the application watcher and then run the watch in
701        the background.
702        """
703        staging = pjoin(self.app_dir, 'staging')
704
705        self._populate_staging()
706
707        # Make sure packages are installed.
708        self._run(['node', YARN_PATH, 'install'], cwd=staging)
709        dedupe_yarn(staging, self.logger)
710
711        proc = WatchHelper(['node', YARN_PATH, 'run', 'watch'],
712                           cwd=pjoin(self.app_dir, 'staging'),
713                           startup_regex=WEBPACK_EXPECT,
714                           logger=self.logger)
715        return [proc]
716
717    def list_extensions(self):
718        """Print an output of the extensions.
719        """
720        self._ensure_disabled_info()
721        logger = self.logger
722        info = self.info
723
724        logger.info('JupyterLab v%s' % info['version'])
725
726        if info['federated_extensions'] or info['extensions']:
727            info['compat_errors'] = self._get_extension_compat()
728
729        if info['federated_extensions']:
730            self._list_federated_extensions()
731
732        if info['extensions']:
733            logger.info('Other labextensions (built into JupyterLab)')
734            self._list_extensions(info, 'app')
735            self._list_extensions(info, 'sys')
736
737        local = info['local_extensions']
738        if local:
739            logger.info('\n   local extensions:')
740            for name in sorted(local):
741                logger.info('        %s: %s' % (name, local[name]))
742
743        linked_packages = info['linked_packages']
744        if linked_packages:
745            logger.info('\n   linked packages:')
746            for key in sorted(linked_packages):
747                source = linked_packages[key]['source']
748                logger.info('        %s: %s' % (key, source))
749
750        uninstalled_core = info['uninstalled_core']
751        if uninstalled_core:
752            logger.info('\nUninstalled core extensions:')
753            [logger.info('    %s' % item) for item in sorted(uninstalled_core)]
754
755        all_exts = list(info['federated_extensions']) + list(info['extensions']) + list(info['core_extensions'])
756        # Ignore disabled extensions that are not installed
757        disabled = [i for i in info['disabled'] if i.partition(':')[0] in all_exts]
758        if disabled:
759            logger.info('\nDisabled extensions:')
760            for item in sorted(disabled):
761                # Show that all plugins will be disabled if the whole extension matches
762                if item in all_exts:
763                    item += ' (all plugins)'
764                logger.info('    %s' % item)
765
766        # Here check if modules are improperly shadowed
767        improper_shadowed = []
768        for ext_name in self.info['shadowed_exts']:
769           source_version = self.info['extensions'][ext_name]['version']
770           prebuilt_version = self.info['federated_extensions'][ext_name]['version']
771           if not gte(prebuilt_version, source_version, True):
772               improper_shadowed.append(ext_name)
773
774        if improper_shadowed:
775            logger.info('\nThe following source extensions are overshadowed by older prebuilt extensions:')
776            [logger.info('    %s' % name) for name in sorted(improper_shadowed)]
777
778        messages = self.build_check(fast=True)
779        if messages:
780            logger.info('\nBuild recommended, please run `jupyter lab build`:')
781            [logger.info('    %s' % item) for item in messages]
782
783    def build_check(self, fast=False):
784        """Determine whether JupyterLab should be built.
785
786        Returns a list of messages.
787        """
788        app_dir = self.app_dir
789        local = self.info['local_extensions']
790        linked = self.info['linked_packages']
791        messages = []
792
793        # Check for no application.
794        pkg_path = pjoin(app_dir, 'static', 'package.json')
795        if not osp.exists(pkg_path):
796            return ['No built application']
797
798        static_data = self.info['static_data']
799        old_jlab = static_data['jupyterlab']
800        old_deps = static_data.get('dependencies', dict())
801
802        # Look for mismatched version.
803        static_version = old_jlab.get('version', '')
804        if not static_version.endswith('-spliced'):
805            core_version = old_jlab['version']
806            if Version(static_version) != Version(core_version):
807                msg = 'Version mismatch: %s (built), %s (current)'
808                return [msg % (static_version, core_version)]
809
810        shadowed_exts = self.info['shadowed_exts']
811
812        # Look for mismatched extensions.
813        new_package = self._get_package_template(silent=fast)
814        new_jlab = new_package['jupyterlab']
815        new_deps = new_package.get('dependencies', dict())
816
817        for ext_type in ['extensions', 'mimeExtensions']:
818            # Extensions that were added.
819            for ext in new_jlab[ext_type]:
820                if ext in shadowed_exts:
821                    continue
822                if ext not in old_jlab[ext_type]:
823                    messages.append('%s needs to be included in build' % ext)
824
825            # Extensions that were removed.
826            for ext in old_jlab[ext_type]:
827                if ext in shadowed_exts:
828                    continue
829                if ext not in new_jlab[ext_type]:
830                    messages.append('%s needs to be removed from build' % ext)
831
832        # Look for mismatched dependencies
833        src_pkg_dir = pjoin(REPO_ROOT, 'packages')
834        for (pkg, dep) in new_deps.items():
835            if old_deps.get(pkg, '').startswith(src_pkg_dir):
836                continue
837            if pkg not in old_deps:
838                continue
839            # Skip local and linked since we pick them up separately.
840            if pkg in local or pkg in linked:
841                continue
842            if old_deps[pkg] != dep:
843                msg = '%s changed from %s to %s'
844                messages.append(msg % (pkg, old_deps[pkg], new_deps[pkg]))
845
846        # Look for updated local extensions.
847        for (name, source) in local.items():
848            if fast or name in shadowed_exts:
849                continue
850            dname = pjoin(app_dir, 'extensions')
851            if self._check_local(name, source, dname):
852                messages.append('%s content changed' % name)
853
854        # Look for updated linked packages.
855        for (name, item) in linked.items():
856            if fast or name in shadowed_exts:
857                continue
858            dname = pjoin(app_dir, 'staging', 'linked_packages')
859            if self._check_local(name, item['source'], dname):
860                messages.append('%s content changed' % name)
861
862        return messages
863
864    def uninstall_extension(self, name):
865        """Uninstall an extension by name.
866
867        Returns `True` if a rebuild is recommended, `False` otherwise.
868        """
869        info = self.info
870        logger = self.logger
871
872        if name in info['federated_extensions']:
873            if info['federated_extensions'][name].get('install', dict()).get('uninstallInstructions', None):
874                logger.error('JupyterLab cannot uninstall this extension. %s' % info['federated_extensions'][name]['install']['uninstallInstructions'])
875            else:
876                logger.error('JupyterLab cannot uninstall %s since it was installed outside of JupyterLab. Use the same method used to install this extension to uninstall this extension.' % name)
877            return False
878
879        # Allow for uninstalled core extensions.
880        if name in info['core_extensions']:
881            config = self._read_build_config()
882            uninstalled = config.get('uninstalled_core_extensions', [])
883            if name not in uninstalled:
884                logger.info('Uninstalling core extension %s' % name)
885                uninstalled.append(name)
886                config['uninstalled_core_extensions'] = uninstalled
887                self._write_build_config(config)
888                return True
889            return False
890
891        local = info['local_extensions']
892
893        for (extname, data) in info['extensions'].items():
894            path = data['path']
895            if extname == name:
896                msg = 'Uninstalling %s from %s' % (name, osp.dirname(path))
897                logger.info(msg)
898                os.remove(path)
899                # Handle local extensions.
900                if extname in local:
901                    config = self._read_build_config()
902                    data = config.setdefault('local_extensions', dict())
903                    del data[extname]
904                    self._write_build_config(config)
905                return True
906
907        logger.warn('No labextension named "%s" installed' % name)
908        return False
909
910    def uninstall_all_extensions(self):
911        """Uninstalls all extensions
912
913        Returns `True` if a rebuild is recommended, `False` otherwise
914        """
915        should_rebuild = False
916        for (extname, _) in self.info['extensions'].items():
917            uninstalled = self.uninstall_extension(extname)
918            should_rebuild = should_rebuild or uninstalled
919        return should_rebuild
920
921    def update_all_extensions(self):
922        """Update all non-local extensions.
923
924        Returns `True` if a rebuild is recommended, `False` otherwise.
925        """
926        should_rebuild = False
927        for (extname, _) in self.info['extensions'].items():
928            if extname in self.info['local_extensions']:
929                continue
930            updated = self._update_extension(extname)
931            # Rebuild if at least one update happens:
932            should_rebuild = should_rebuild or updated
933        return should_rebuild
934
935    def update_extension(self, name):
936        """Update an extension by name.
937
938        Returns `True` if a rebuild is recommended, `False` otherwise.
939        """
940        if name not in self.info['extensions']:
941            self.logger.warning('No labextension named "%s" installed' % name)
942            return False
943        return self._update_extension(name)
944
945    def _update_extension(self, name):
946        """Update an extension by name.
947
948        Returns `True` if a rebuild is recommended, `False` otherwise.
949        """
950        data = self.info['extensions'][name]
951        if data["alias_package_source"]:
952            self.logger.warn("Skipping updating pinned extension '%s'." % name)
953            return False
954        try:
955            latest = self._latest_compatible_package_version(name)
956        except URLError:
957            return False
958        if latest is None:
959            self.logger.warn('No compatible version found for %s!' % (name,))
960            return False
961        if latest == data['version']:
962            self.logger.info('Extension %r already up to date' % name)
963            return False
964        self.logger.info('Updating %s to version %s' % (name, latest))
965        return self.install_extension('%s@%s' % (name, latest))
966
967    def link_package(self, path):
968        """Link a package at the given path.
969
970        Returns `True` if a rebuild is recommended, `False` otherwise.
971        """
972        path = _normalize_path(path)
973        if not osp.exists(path) or not osp.isdir(path):
974            msg = 'Cannot install "%s" only link local directories'
975            raise ValueError(msg % path)
976
977        with TemporaryDirectory() as tempdir:
978            info = self._extract_package(path, tempdir)
979
980        messages = _validate_extension(info['data'])
981        if not messages:
982            return self.install_extension(path)
983
984        # Warn that it is a linked package.
985        self.logger.warning('Installing %s as a linked package because it does not have extension metadata:', path)
986        [self.logger.warning('   %s' % m) for m in messages]
987
988        # Add to metadata.
989        config = self._read_build_config()
990        linked = config.setdefault('linked_packages', dict())
991        linked[info['name']] = info['source']
992        self._write_build_config(config)
993
994        return True
995
996    def unlink_package(self, path):
997        """Unlink a package by name or at the given path.
998
999        A ValueError is raised if the path is not an unlinkable package.
1000
1001        Returns `True` if a rebuild is recommended, `False` otherwise.
1002        """
1003        path = _normalize_path(path)
1004        config = self._read_build_config()
1005        linked = config.setdefault('linked_packages', dict())
1006
1007        found = None
1008        for (name, source) in linked.items():
1009            if name == path or source == path:
1010                found = name
1011
1012        if found:
1013            del linked[found]
1014        else:
1015            local = config.setdefault('local_extensions', dict())
1016            for (name, source) in local.items():
1017                if name == path or source == path:
1018                    found = name
1019            if found:
1020                del local[found]
1021                path = self.info['extensions'][found]['path']
1022                os.remove(path)
1023
1024        if not found:
1025            raise ValueError('No linked package for %s' % path)
1026
1027        self._write_build_config(config)
1028        return True
1029
1030    def toggle_extension(self, extension, value, level='sys_prefix'):
1031        """Enable or disable a lab extension.
1032
1033        Returns `True` if a rebuild is recommended, `False` otherwise.
1034        """
1035        lab_config = LabConfig()
1036        app_settings_dir = osp.join(self.app_dir, 'settings')
1037
1038        page_config = get_static_page_config(app_settings_dir=app_settings_dir, logger=self.logger, level=level)
1039
1040        disabled = page_config.get('disabledExtensions', {})
1041        did_something = False
1042        is_disabled = disabled.get(extension, False)
1043        if value and not is_disabled:
1044            disabled[extension] = True
1045            did_something = True
1046        elif not value and is_disabled:
1047            disabled[extension] = False
1048            did_something = True
1049
1050        if did_something:
1051            page_config['disabledExtensions'] = disabled
1052            write_page_config(page_config, level=level)
1053        return did_something
1054
1055    def check_extension(self, extension, check_installed_only=False):
1056        """Check if a lab extension is enabled or disabled
1057        """
1058        self._ensure_disabled_info()
1059        info = self.info
1060
1061        if extension in info["core_extensions"]:
1062            return self._check_core_extension(
1063                extension, info, check_installed_only)
1064
1065        if extension in info["linked_packages"]:
1066            self.logger.info('%s:%s' % (extension, GREEN_ENABLED))
1067            return True
1068
1069        return self._check_common_extension(
1070            extension, info, check_installed_only)
1071
1072    def _check_core_extension(self, extension, info, check_installed_only):
1073        """Check if a core extension is enabled or disabled
1074        """
1075        if extension in info['uninstalled_core']:
1076            self.logger.info('%s:%s' % (extension, RED_X))
1077            return False
1078        if check_installed_only:
1079            self.logger.info('%s: %s' % (extension, GREEN_OK))
1080            return True
1081        if extension in info['disabled_core']:
1082            self.logger.info('%s: %s' % (extension, RED_DISABLED))
1083            return False
1084        self.logger.info('%s:%s' % (extension, GREEN_ENABLED))
1085        return True
1086
1087    def _check_common_extension(self, extension, info, check_installed_only):
1088        """Check if a common (non-core) extension is enabled or disabled
1089        """
1090        if extension not in info['extensions']:
1091            self.logger.info('%s:%s' % (extension, RED_X))
1092            return False
1093
1094        errors = self._get_extension_compat()[extension]
1095        if errors:
1096            self.logger.info('%s:%s (compatibility errors)' %
1097                             (extension, RED_X))
1098            return False
1099
1100        if check_installed_only:
1101            self.logger.info('%s: %s' % (extension, GREEN_OK))
1102            return True
1103
1104        if _is_disabled(extension, info['disabled']):
1105            self.logger.info('%s: %s' % (extension, RED_DISABLED))
1106            return False
1107
1108        self.logger.info('%s:%s' % (extension, GREEN_ENABLED))
1109        return True
1110
1111    def _get_app_info(self):
1112        """Get information about the app.
1113        """
1114
1115        info = dict()
1116        info['core_data'] = core_data = self.core_data
1117        info['extensions'] = extensions = self._get_extensions(core_data)
1118
1119        info['local_extensions'] = self._get_local_extensions()
1120        info['linked_packages'] = self._get_linked_packages()
1121        info['app_extensions'] = app = []
1122        info['sys_extensions'] = sys = []
1123        for (name, data) in extensions.items():
1124            data['is_local'] = name in info['local_extensions']
1125            if data['location'] == 'app':
1126                app.append(name)
1127            else:
1128                sys.append(name)
1129
1130        info['uninstalled_core'] = self._get_uninstalled_core_extensions()
1131
1132        info['static_data'] = _get_static_data(self.app_dir)
1133        app_data = info['static_data'] or core_data
1134        info['version'] = app_data['jupyterlab']['version']
1135        info['staticUrl'] = app_data['jupyterlab'].get('staticUrl', '')
1136
1137        info['sys_dir'] = self.sys_dir
1138        info['app_dir'] = self.app_dir
1139
1140        info['core_extensions'] = _get_core_extensions(self.core_data)
1141
1142        info['federated_extensions'] = get_federated_extensions(self.labextensions_path)
1143        info['shadowed_exts'] = [ext for ext in info['extensions'] if ext in info['federated_extensions']]
1144        return info
1145
1146    def _ensure_disabled_info(self):
1147        info = self.info
1148        if 'disabled' in info:
1149            return
1150
1151        labextensions_path = self.labextensions_path
1152        app_settings_dir = osp.join(self.app_dir, 'settings')
1153
1154        page_config = get_page_config(labextensions_path, app_settings_dir=app_settings_dir, logger=self.logger)
1155
1156        info['disabled'] = page_config.get('disabledExtensions', [])
1157
1158        disabled_core = []
1159        for key in info['core_extensions']:
1160            if key in info['disabled']:
1161                disabled_core.append(key)
1162
1163        info['disabled_core'] = disabled_core
1164
1165    def _populate_staging(self, name=None, version=None, static_url=None,
1166                          clean=False):
1167        """Set up the assets in the staging directory.
1168        """
1169        app_dir = self.app_dir
1170        staging = pjoin(app_dir, 'staging')
1171        if clean and osp.exists(staging):
1172            self.logger.info("Cleaning %s", staging)
1173            _rmtree(staging, self.logger)
1174
1175        self._ensure_app_dirs()
1176        if not version:
1177            version = self.info['core_data']['jupyterlab']['version']
1178
1179        splice_source = self._options.splice_source
1180        if splice_source:
1181            self.logger.debug('Splicing dev packages into app directory.')
1182            source_dir = DEV_DIR
1183            version = __version__ + '-spliced'
1184        else:
1185            source_dir = pjoin(HERE, 'staging')
1186
1187        # Look for mismatched version.
1188        pkg_path = pjoin(staging, 'package.json')
1189
1190        if osp.exists(pkg_path):
1191            with open(pkg_path) as fid:
1192                data = json.load(fid)
1193            if data['jupyterlab'].get('version', '') != version:
1194                _rmtree(staging, self.logger)
1195                os.makedirs(staging)
1196
1197        for fname in ['index.js', 'bootstrap.js', 'publicpath.js',
1198                      'webpack.config.js',
1199                      'webpack.prod.config.js',
1200                      'webpack.prod.minimize.config.js']:
1201            target = pjoin(staging, fname)
1202            shutil.copy(pjoin(source_dir, fname), target)
1203
1204        for fname in ['.yarnrc', 'yarn.js']:
1205            target = pjoin(staging, fname)
1206            shutil.copy(pjoin(HERE, 'staging', fname), target)
1207
1208        # Ensure a clean templates directory
1209        templates = pjoin(staging, 'templates')
1210        if osp.exists(templates):
1211            _rmtree(templates, self.logger)
1212
1213        try:
1214            shutil.copytree(pjoin(source_dir, 'templates'), templates)
1215        except shutil.Error as error:
1216            # `copytree` throws an error if copying to + from NFS even though
1217            # the copy is successful (see https://bugs.python.org/issue24564
1218            # and https://github.com/jupyterlab/jupyterlab/issues/5233)
1219
1220            real_error = '[Errno 22]' not in str(error) and '[Errno 5]' not in str(error)
1221            if real_error or not osp.exists(templates):
1222                raise
1223
1224        # Ensure a clean linked packages directory.
1225        linked_dir = pjoin(staging, 'linked_packages')
1226        if osp.exists(linked_dir):
1227            _rmtree(linked_dir, self.logger)
1228        os.makedirs(linked_dir)
1229
1230        # Template the package.json file.
1231        # Update the local extensions.
1232        extensions = self.info['extensions']
1233        removed = False
1234        for (key, source) in self.info['local_extensions'].items():
1235            # Handle a local extension that was removed.
1236            if key not in extensions:
1237                config = self._read_build_config()
1238                data = config.setdefault('local_extensions', dict())
1239                del data[key]
1240                self._write_build_config(config)
1241                removed = True
1242                continue
1243            dname = pjoin(app_dir, 'extensions')
1244            self._update_local(key, source, dname, extensions[key],
1245                               'local_extensions')
1246
1247        # Update the list of local extensions if any were removed.
1248        if removed:
1249            self.info['local_extensions'] = self._get_local_extensions()
1250
1251        # Update the linked packages.
1252        linked = self.info['linked_packages']
1253        for (key, item) in linked.items():
1254            dname = pjoin(staging, 'linked_packages')
1255            self._update_local(key, item['source'], dname, item,
1256                'linked_packages')
1257
1258        # Then get the package template.
1259        data = self._get_package_template()
1260        jlab = data['jupyterlab']
1261
1262        if version:
1263            jlab['version'] = version
1264
1265        if name:
1266            jlab['name'] = name
1267
1268        if static_url:
1269            jlab['staticUrl'] = static_url
1270
1271        # Handle splicing of packages
1272        if splice_source:
1273            # Splice workspace tree as linked dependencies
1274            for path in glob(pjoin(REPO_ROOT, 'packages', '*', 'package.json')):
1275                local_path = osp.dirname(osp.abspath(path))
1276                pkg_data = json.loads(Path(path).read_text(encoding='utf-8'))
1277                name = pkg_data['name']
1278                if name in data['dependencies']:
1279                    data['dependencies'][name] = local_path
1280                    jlab['linkedPackages'][name] = local_path
1281                if name in data['resolutions']:
1282                    data['resolutions'][name] = local_path
1283
1284            # splice the builder as well
1285            local_path = osp.abspath(pjoin(REPO_ROOT, 'builder'))
1286            data['devDependencies']['@jupyterlab/builder'] = local_path
1287            target = osp.join(staging, 'node_modules', '@jupyterlab', 'builder')
1288
1289            # Remove node_modules so it gets re-populated
1290            node_modules = pjoin(staging, 'node_modules')
1291            if osp.exists(node_modules):
1292                shutil.rmtree(node_modules, ignore_errors=True)
1293
1294        # Write the package file
1295        pkg_path = pjoin(staging, 'package.json')
1296        with open(pkg_path, 'w') as fid:
1297            json.dump(data, fid, indent=4)
1298
1299        # copy known-good yarn.lock if missing
1300        lock_path = pjoin(staging, 'yarn.lock')
1301        lock_template = pjoin(HERE, 'staging', 'yarn.lock')
1302        if self.registry != YARN_DEFAULT_REGISTRY:  # Replace on the fly the yarn repository see #3658
1303            with open(lock_template, encoding='utf-8') as f:
1304                template = f.read()
1305            template = template.replace(YARN_DEFAULT_REGISTRY, self.registry.strip("/"))
1306            with open(lock_path, 'w', encoding='utf-8') as f:
1307                f.write(template)
1308        elif not osp.exists(lock_path):
1309            shutil.copy(lock_template, lock_path)
1310            os.chmod(lock_path, stat.S_IWRITE | stat.S_IREAD)
1311
1312    def _get_package_template(self, silent=False):
1313        """Get the template the for staging package.json file.
1314        """
1315        logger = self.logger
1316        # make a deep copy of the data so we don't influence the core data
1317        data = deepcopy(self.info['core_data'])
1318        local = self.info['local_extensions']
1319        linked = self.info['linked_packages']
1320        extensions = self.info['extensions']
1321        shadowed_exts = self.info['shadowed_exts']
1322        jlab = data['jupyterlab']
1323
1324        def format_path(path):
1325            path = osp.relpath(path, pjoin(self.app_dir, 'staging'))
1326            path = 'file:' + path.replace(os.sep, '/')
1327            if os.name == 'nt':
1328                path = path.lower()
1329            return path
1330
1331        jlab['linkedPackages'] = dict()
1332
1333        # Handle local extensions.
1334        for (key, source) in local.items():
1335            if key in shadowed_exts:
1336                continue
1337            jlab['linkedPackages'][key] = source
1338            data['resolutions'][key] = 'file:' + self.info['extensions'][key]['path']
1339
1340        # Handle linked packages.
1341        for (key, item) in linked.items():
1342            if key in shadowed_exts:
1343                continue
1344            path = pjoin(self.app_dir, 'staging', 'linked_packages')
1345            path = pjoin(path, item['filename'])
1346            data['dependencies'][key] = format_path(path)
1347            jlab['linkedPackages'][key] = item['source']
1348            data['resolutions'][key] = format_path(path)
1349
1350        data['jupyterlab']['extensionMetadata'] = dict()
1351
1352        # Handle extensions
1353        compat_errors = self._get_extension_compat()
1354        for (key, value) in extensions.items():
1355            # Reject incompatible extensions with a message.
1356            errors = compat_errors[key]
1357            if errors:
1358                if not silent:
1359                    _log_single_compat_errors(
1360                        logger, key, value['version'], errors
1361                    )
1362                continue
1363
1364            data['dependencies'][key] = format_path(value['path'])
1365
1366            jlab_data = value['jupyterlab']
1367            for item in ['extension', 'mimeExtension']:
1368                ext = jlab_data.get(item, False)
1369                if not ext:
1370                    continue
1371                if ext is True:
1372                    ext = ''
1373                jlab[item + 's'][key] = ext
1374
1375                # Add metadata for the extension
1376                data['jupyterlab']['extensionMetadata'][key] = jlab_data
1377
1378        # Handle uninstalled core extensions.
1379        for item in self.info['uninstalled_core']:
1380            if item in jlab['extensions']:
1381                data['jupyterlab']['extensions'].pop(item)
1382            elif item in jlab['mimeExtensions']:
1383                data['jupyterlab']['mimeExtensions'].pop(item)
1384            # Remove from dependencies as well.
1385            if item in data['dependencies']:
1386                data['dependencies'].pop(item)
1387
1388        return data
1389
1390    def _check_local(self, name, source, dname):
1391        """Check if a local package has changed.
1392
1393        `dname` is the directory name of existing package tar archives.
1394        """
1395        # Extract the package in a temporary directory.
1396        with TemporaryDirectory() as tempdir:
1397            info = self._extract_package(source, tempdir)
1398            # Test if the file content has changed.
1399            # This relies on `_extract_package` adding the hashsum
1400            # to the filename, allowing a simple exist check to
1401            # compare the hash to the "cache" in dname.
1402            target = pjoin(dname, info['filename'])
1403            return not osp.exists(target)
1404
1405    def _update_local(self, name, source, dname, data, dtype):
1406        """Update a local dependency.  Return `True` if changed.
1407        """
1408        # Extract the package in a temporary directory.
1409        existing = data['filename']
1410        if not osp.exists(pjoin(dname, existing)):
1411            existing = ''
1412
1413        with TemporaryDirectory() as tempdir:
1414            info = self._extract_package(source, tempdir)
1415
1416            # Bail if the file content has not changed.
1417            if info['filename'] == existing:
1418                return existing
1419
1420            shutil.move(info['path'], pjoin(dname, info['filename']))
1421
1422        # Remove the previous tarball and return the new file name.
1423        if existing:
1424            os.remove(pjoin(dname, existing))
1425
1426        data['filename'] = info['filename']
1427        data['path'] = pjoin(data['tar_dir'], data['filename'])
1428        return info['filename']
1429
1430    def _get_extensions(self, core_data):
1431        """Get the extensions for the application.
1432        """
1433        app_dir = self.app_dir
1434        extensions = dict()
1435
1436        # Get system level packages.
1437        sys_path = pjoin(self.sys_dir, 'extensions')
1438        app_path = pjoin(self.app_dir, 'extensions')
1439
1440        extensions = self._get_extensions_in_dir(self.sys_dir, core_data)
1441
1442        # Look in app_dir if different.
1443        app_path = pjoin(app_dir, 'extensions')
1444        if app_path == sys_path or not osp.exists(app_path):
1445            return extensions
1446
1447        extensions.update(self._get_extensions_in_dir(app_dir, core_data))
1448
1449        return extensions
1450
1451    def _get_extensions_in_dir(self, dname, core_data):
1452        """Get the extensions in a given directory.
1453        """
1454        extensions = dict()
1455        location = 'app' if dname == self.app_dir else 'sys'
1456        for target in glob(pjoin(dname, 'extensions', '*.tgz')):
1457            data = read_package(target)
1458            deps = data.get('dependencies', dict())
1459            name = data['name']
1460            jlab = data.get('jupyterlab', dict())
1461            path = osp.abspath(target)
1462
1463            filename = osp.basename(target)
1464            if filename.startswith(PIN_PREFIX):
1465                alias = filename[len(PIN_PREFIX):-len(".tgz")]
1466            else:
1467                alias = None
1468            url = get_package_url(data)
1469            extensions[alias or name] = dict(path=path,
1470                                    filename=osp.basename(path),
1471                                    url=url,
1472                                    version=data['version'],
1473                                    # Only save the package name if the extension name is an alias
1474                                    alias_package_source=name if alias else None,
1475                                    jupyterlab=jlab,
1476                                    dependencies=deps,
1477                                    tar_dir=osp.dirname(path),
1478                                    location=location)
1479        return extensions
1480
1481    def _get_extension_compat(self):
1482        """Get the extension compatibility info.
1483        """
1484        compat = dict()
1485        core_data = self.info['core_data']
1486        seen = set()
1487        for (name, data) in self.info['federated_extensions'].items():
1488            deps = data['dependencies']
1489            compat[name] = _validate_compatibility(name, deps, core_data)
1490            seen.add(name)
1491        for (name, data) in self.info['extensions'].items():
1492            if name in seen:
1493                continue
1494            deps = data['dependencies']
1495            compat[name] = _validate_compatibility(name, deps, core_data)
1496        return compat
1497
1498    def _get_local_extensions(self):
1499        """Get the locally installed extensions.
1500        """
1501        return self._get_local_data('local_extensions')
1502
1503    def _get_linked_packages(self):
1504        """Get the linked packages.
1505        """
1506        info = self._get_local_data('linked_packages')
1507        dname = pjoin(self.app_dir, 'staging', 'linked_packages')
1508        for (name, source) in info.items():
1509            info[name] = dict(source=source, filename='', tar_dir=dname)
1510
1511        if not osp.exists(dname):
1512            return info
1513
1514        for path in glob(pjoin(dname, '*.tgz')):
1515            path = osp.abspath(path)
1516            data = read_package(path)
1517            name = data['name']
1518            if name not in info:
1519                self.logger.warn('Removing orphaned linked package %s' % name)
1520                os.remove(path)
1521                continue
1522            item = info[name]
1523            item['filename'] = osp.basename(path)
1524            item['path'] = path
1525            item['version'] = data['version']
1526            item['data'] = data
1527        return info
1528
1529    def _get_uninstalled_core_extensions(self):
1530        """Get the uninstalled core extensions.
1531        """
1532        config = self._read_build_config()
1533        return config.get('uninstalled_core_extensions', [])
1534
1535    def _ensure_app_dirs(self):
1536        """Ensure that the application directories exist"""
1537        dirs = ['extensions', 'settings', 'staging', 'schemas', 'themes']
1538        for dname in dirs:
1539            path = pjoin(self.app_dir, dname)
1540            if not osp.exists(path):
1541                try:
1542                    os.makedirs(path)
1543                except OSError as e:
1544                    if e.errno != errno.EEXIST:
1545                        raise
1546
1547    def _list_extensions(self, info, ext_type):
1548        """List the extensions of a given type.
1549        """
1550        self._ensure_disabled_info()
1551        logger = self.logger
1552        names = info['%s_extensions' % ext_type]
1553        if not names:
1554            return
1555
1556        dname = info['%s_dir' % ext_type]
1557
1558        error_accumulator = {}
1559
1560        logger.info('   %s dir: %s' % (ext_type, dname))
1561        for name in sorted(names):
1562            if name in info['federated_extensions']:
1563                continue
1564            data = info['extensions'][name]
1565            version = data['version']
1566            errors = info['compat_errors'][name]
1567            extra = ''
1568            if _is_disabled(name, info['disabled']):
1569                extra += ' %s' % RED_DISABLED
1570            else:
1571                extra += ' %s' % GREEN_ENABLED
1572            if errors:
1573                extra += ' %s' % RED_X
1574            else:
1575                extra += ' %s' % GREEN_OK
1576            if data['is_local']:
1577                extra += '*'
1578            # If we have the package name in the data, this means this extension's name is the alias name
1579            alias_package_source = data['alias_package_source']
1580            if alias_package_source:
1581                logger.info('        %s %s v%s%s' % (name, alias_package_source, version, extra))
1582            else:
1583                logger.info('        %s v%s%s' % (name, version, extra))
1584            if errors:
1585                error_accumulator[name] = (version, errors)
1586
1587        # Write all errors at end:
1588        _log_multiple_compat_errors(logger, error_accumulator)
1589
1590        # Write a blank line separator
1591        logger.info('')
1592
1593    def _list_federated_extensions(self):
1594        self._ensure_disabled_info()
1595        info = self.info
1596        logger = self.logger
1597
1598        error_accumulator = {}
1599
1600        ext_dirs = dict((p, False) for p in self.labextensions_path)
1601        for value in info['federated_extensions'].values():
1602            ext_dirs[value['ext_dir']] = True
1603
1604        for ext_dir, has_exts in ext_dirs.items():
1605            if not has_exts:
1606                continue
1607            logger.info(ext_dir)
1608            for name in info['federated_extensions']:
1609                data = info['federated_extensions'][name]
1610                if data['ext_dir'] != ext_dir:
1611                    continue
1612                version = data['version']
1613                errors = info['compat_errors'][name]
1614                extra = ''
1615                if _is_disabled(name, info['disabled']):
1616                    extra += ' %s' % RED_DISABLED
1617                else:
1618                    extra += ' %s' % GREEN_ENABLED
1619                if errors:
1620                    extra += ' %s' % RED_X
1621                else:
1622                    extra += ' %s' % GREEN_OK
1623                if data['is_local']:
1624                    extra += '*'
1625
1626                install = data.get('install')
1627                if install:
1628                    extra += ' (%s, %s)' % (
1629                        install['packageManager'],
1630                        install['packageName']
1631                    )
1632                logger.info('        %s v%s%s' % (name, version, extra))
1633                if errors:
1634                    error_accumulator[name] = (version, errors)
1635            # Add a spacer line after
1636            logger.info('')
1637
1638        # Write all errors at end:
1639        _log_multiple_compat_errors(logger, error_accumulator)
1640
1641    def _read_build_config(self):
1642        """Get the build config data for the app dir.
1643        """
1644        target = pjoin(self.app_dir, 'settings', 'build_config.json')
1645        if not osp.exists(target):
1646            return {}
1647        else:
1648            with open(target) as fid:
1649                return json.load(fid)
1650
1651    def _write_build_config(self, config):
1652        """Write the build config to the app dir.
1653        """
1654        self._ensure_app_dirs()
1655        target = pjoin(self.app_dir, 'settings', 'build_config.json')
1656        with open(target, 'w') as fid:
1657            json.dump(config, fid, indent=4)
1658
1659    def _get_local_data(self, source):
1660        """Get the local data for extensions or linked packages.
1661        """
1662        config = self._read_build_config()
1663
1664        data = config.setdefault(source, dict())
1665        dead = []
1666        for (name, source) in data.items():
1667            if not osp.exists(source):
1668                dead.append(name)
1669
1670        for name in dead:
1671            link_type = source.replace('_', ' ')
1672            msg = '**Note: Removing dead %s "%s"' % (link_type, name)
1673            self.logger.warn(msg)
1674            del data[name]
1675
1676        if dead:
1677            self._write_build_config(config)
1678
1679        return data
1680
1681    def _install_extension(self, extension, tempdir, pin=None):
1682        """Install an extension with validation and return the name and path.
1683        """
1684        info = self._extract_package(extension, tempdir, pin=pin)
1685        data = info['data']
1686
1687        # Check for compatible version unless:
1688        # - A specific version was requested (@ in name,
1689        #   but after first char to allow for scope marker).
1690        # - Package is locally installed.
1691        allow_fallback = '@' not in extension[1:] and not info['is_dir']
1692        name = info['name']
1693
1694        # Verify that the package is an extension.
1695        messages = _validate_extension(data)
1696        if messages:
1697            msg = '"%s" is not a valid extension:\n%s'
1698            msg = msg % (extension, '\n'.join(messages))
1699            if allow_fallback:
1700                try:
1701                    version = self._latest_compatible_package_version(name)
1702                except URLError:
1703                    raise ValueError(msg)
1704            else:
1705                raise ValueError(msg)
1706
1707        # Verify package compatibility.
1708        deps = data.get('dependencies', dict())
1709        errors = _validate_compatibility(extension, deps, self.core_data)
1710        if errors:
1711            msg = _format_compatibility_errors(
1712                data['name'], data['version'], errors
1713            )
1714            if allow_fallback:
1715                try:
1716                    version = self._latest_compatible_package_version(name)
1717                except URLError:
1718                    # We cannot add any additional information to error message
1719                    raise ValueError(msg)
1720
1721                if version and name:
1722                    self.logger.debug('Incompatible extension:\n%s', name)
1723                    self.logger.debug('Found compatible version: %s', version)
1724                    with TemporaryDirectory() as tempdir2:
1725                        return self._install_extension(
1726                            '%s@%s' % (name, version), tempdir2)
1727
1728                # Extend message to better guide the user what to do:
1729                conflicts = '\n'.join(msg.splitlines()[2:])
1730                msg = ''.join((
1731                    self._format_no_compatible_package_version(name),
1732                    "\n\n",
1733                    conflicts))
1734
1735            raise ValueError(msg)
1736
1737        # Move the file to the app directory.
1738        target = pjoin(self.app_dir, 'extensions', info['filename'])
1739        if osp.exists(target):
1740            os.remove(target)
1741
1742        shutil.move(info['path'], target)
1743
1744        info['path'] = target
1745        return info
1746
1747    def _extract_package(self, source, tempdir, pin=None):
1748        """Call `npm pack` for an extension.
1749
1750        The pack command will download the package tar if `source` is
1751        a package name, or run `npm pack` locally if `source` is a
1752        directory.
1753        """
1754        is_dir = osp.exists(source) and osp.isdir(source)
1755        if is_dir and not osp.exists(pjoin(source, 'node_modules')):
1756            self._run(['node', YARN_PATH, 'install'], cwd=source)
1757
1758        info = dict(source=source, is_dir=is_dir)
1759
1760        ret = self._run([which('npm'), 'pack', source], cwd=tempdir)
1761        if ret != 0:
1762            msg = '"%s" is not a valid npm package'
1763            raise ValueError(msg % source)
1764
1765        path = glob(pjoin(tempdir, '*.tgz'))[0]
1766        info['data'] = read_package(path)
1767        if is_dir:
1768            info['sha'] = sha = _tarsum(path)
1769            target = path.replace('.tgz', '-%s.tgz' % sha)
1770            shutil.move(path, target)
1771            info['path'] = target
1772        else:
1773            info['path'] = path
1774        if pin:
1775            old_path = info['path']
1776            new_path = pjoin(osp.dirname(old_path), '{}{}.tgz'.format(PIN_PREFIX, pin))
1777            shutil.move(old_path, new_path)
1778            info['path'] = new_path
1779
1780        info['filename'] = osp.basename(info['path'])
1781        info['name'] = info['data']['name']
1782        info['version'] = info['data']['version']
1783
1784        return info
1785
1786
1787    def _latest_compatible_package_version(self, name):
1788        """Get the latest compatible version of a package"""
1789        core_data = self.info['core_data']
1790        try:
1791            metadata = _fetch_package_metadata(self.registry, name, self.logger)
1792        except URLError:
1793            return
1794        versions = metadata.get('versions', {})
1795
1796        # Sort pre-release first, as we will reverse the sort:
1797        def sort_key(key_value):
1798            return _semver_key(key_value[0], prerelease_first=True)
1799
1800        for version, data in sorted(versions.items(),
1801                                    key=sort_key,
1802                                    reverse=True):
1803            deps = data.get('dependencies', {})
1804            errors = _validate_compatibility(name, deps, core_data)
1805            if not errors:
1806                # Found a compatible version
1807                # skip deprecated versions
1808                if 'deprecated' in data:
1809                    self.logger.debug(
1810                        'Disregarding compatible version of package as it is deprecated: %s@%s'
1811                        % (name, version)
1812                    )
1813                    continue
1814                # Verify that the version is a valid extension.
1815                with TemporaryDirectory() as tempdir:
1816                    info = self._extract_package(
1817                        '%s@%s' % (name, version), tempdir)
1818                if _validate_extension(info['data']):
1819                    # Invalid, do not consider other versions
1820                    return
1821                # Valid
1822                return version
1823
1824    def latest_compatible_package_versions(self, names):
1825        """Get the latest compatible versions of several packages
1826
1827        Like _latest_compatible_package_version, but optimized for
1828        retrieving the latest version for several packages in one go.
1829        """
1830        core_data = self.info['core_data']
1831
1832        keys = []
1833        for name in names:
1834            try:
1835                metadata = _fetch_package_metadata(self.registry, name, self.logger)
1836            except URLError:
1837                continue
1838            versions = metadata.get('versions', {})
1839
1840            # Sort pre-release first, as we will reverse the sort:
1841            def sort_key(key_value):
1842                return _semver_key(key_value[0], prerelease_first=True)
1843
1844            for version, data in sorted(versions.items(),
1845                                        key=sort_key,
1846                                        reverse=True):
1847
1848                # skip deprecated versions
1849                if 'deprecated' in data:
1850                    continue
1851
1852                deps = data.get('dependencies', {})
1853                errors = _validate_compatibility(name, deps, core_data)
1854                if not errors:
1855                    # Found a compatible version
1856                    keys.append('%s@%s' % (name, version))
1857                    break  # break inner for
1858
1859
1860        versions = {}
1861        if not keys:
1862            return versions
1863        with TemporaryDirectory() as tempdir:
1864            ret = self._run([which('npm'), 'pack'] + keys, cwd=tempdir)
1865            if ret != 0:
1866                msg = '"%s" is not a valid npm package'
1867                raise ValueError(msg % keys)
1868
1869            for key in keys:
1870                fname = key[0].replace('@', '') + key[1:].replace('@', '-').replace('/', '-') + '.tgz'
1871                data = read_package(osp.join(tempdir, fname))
1872                # Verify that the version is a valid extension.
1873                if not _validate_extension(data):
1874                    # Valid
1875                    versions[data['name']] = data['version']
1876        return versions
1877
1878    def _format_no_compatible_package_version(self, name):
1879        """Get the latest compatible version of a package"""
1880        core_data = self.info['core_data']
1881        # Whether lab version is too new:
1882        lab_newer_than_latest = False
1883        # Whether the latest version of the extension depend on a "future" version
1884        # of a singleton package (from the perspective of current lab version):
1885        latest_newer_than_lab = False
1886        try:
1887            metadata = _fetch_package_metadata(self.registry, name, self.logger)
1888        except URLError:
1889            pass
1890        else:
1891            versions = metadata.get('versions', {})
1892
1893            # Sort pre-release first, as we will reverse the sort:
1894            def sort_key(key_value):
1895                return _semver_key(key_value[0], prerelease_first=True)
1896
1897            store = tuple(sorted(versions.items(), key=sort_key, reverse=True))
1898            latest_deps = store[0][1].get('dependencies', {})
1899            core_deps = core_data['resolutions']
1900            singletons = core_data['jupyterlab']['singletonPackages']
1901
1902            for (key, value) in latest_deps.items():
1903                if key in singletons:
1904                    # Drop prereleases in comparisons to allow extension authors
1905                    # to not have to update their versions for each
1906                    # Jupyterlab prerelease version.
1907                    c = _compare_ranges(core_deps[key], value, drop_prerelease1=True)
1908                    lab_newer_than_latest = lab_newer_than_latest or c < 0
1909                    latest_newer_than_lab = latest_newer_than_lab or c > 0
1910
1911        if lab_newer_than_latest:
1912            # All singleton deps in current version of lab are newer than those
1913            # in the latest version of the extension
1914            return ("The extension \"%s\" does not yet support the current version of "
1915                    "JupyterLab.\n" % name)
1916
1917
1918        parts = ["No version of {extension} could be found that is compatible with "
1919                 "the current version of JupyterLab."]
1920        if latest_newer_than_lab:
1921            parts.extend(("However, it seems to support a new version of JupyterLab.",
1922                          "Consider upgrading JupyterLab."))
1923
1924        return " ".join(parts).format(extension=name)
1925
1926    def _run(self, cmd, **kwargs):
1927        """Run the command using our logger and abort callback.
1928
1929        Returns the exit code.
1930        """
1931        if self.kill_event.is_set():
1932            raise ValueError('Command was killed')
1933
1934        kwargs['logger'] = self.logger
1935        kwargs['kill_event'] = self.kill_event
1936        proc = ProgressProcess(cmd, **kwargs)
1937        return proc.wait()
1938
1939
1940def _node_check(logger):
1941    """Check for the existence of nodejs with the correct version.
1942    """
1943    node = which('node')
1944    try:
1945        output = subprocess.check_output([node, 'node-version-check.js'], cwd=HERE)
1946        logger.debug(output.decode('utf-8'))
1947    except Exception:
1948        data = CoreConfig()._data
1949        ver = data['engines']['node']
1950        msg = 'Please install nodejs %s before continuing. nodejs may be installed using conda or directly from the nodejs website.' % ver
1951        raise ValueError(msg)
1952
1953def _yarn_config(logger):
1954    """Get the yarn configuration.
1955
1956    Returns
1957    -------
1958    {"yarn config": dict, "npm config": dict} if unsuccessfull the subdictionary are empty
1959    """
1960    configuration = {"yarn config": {}, "npm config": {}}
1961    try:
1962        node = which('node')
1963    except ValueError:  # Node not found == user with no need for building jupyterlab
1964        logger.debug("NodeJS was not found. Yarn user configuration is ignored.")
1965        return configuration
1966
1967    try:
1968        output_binary = subprocess.check_output([node, YARN_PATH, 'config', 'list', '--json'], stderr=subprocess.PIPE, cwd=HERE)
1969        output = output_binary.decode('utf-8')
1970        lines = iter(output.splitlines())
1971        try:
1972            for line in lines:
1973                info = json.loads(line)
1974                if info["type"] == "info":
1975                    key = info["data"]
1976                    inspect = json.loads(next(lines))
1977                    if inspect["type"] == "inspect":
1978                        configuration[key] = inspect["data"]
1979        except StopIteration:
1980            pass
1981        logger.debug("Yarn configuration loaded.")
1982    except subprocess.CalledProcessError as e:
1983        logger.error("Fail to get yarn configuration. {!s}{!s}".format(e.stderr.decode('utf-8'), e.output.decode('utf-8')))
1984    except Exception as e:
1985        logger.error("Fail to get yarn configuration. {!s}".format(e))
1986    finally:
1987        return configuration
1988
1989
1990def _ensure_logger(logger=None):
1991    """Ensure that we have a logger"""
1992    return logger or logging.getLogger('jupyterlab')
1993
1994
1995def _normalize_path(extension):
1996    """Normalize a given extension if it is a path.
1997    """
1998    extension = osp.expanduser(extension)
1999    if osp.exists(extension):
2000        extension = osp.abspath(extension)
2001    return extension
2002
2003
2004def _rmtree(path, logger):
2005    """Remove a tree, logging errors"""
2006    def onerror(*exc_info):
2007        logger.debug('Error in shutil.rmtree', exc_info=exc_info)
2008
2009    shutil.rmtree(path, onerror=onerror)
2010
2011
2012def _unlink(path, logger):
2013    """Remove a file, logging errors"""
2014    try:
2015        os.unlink(path)
2016    except Exception:
2017        logger.debug('Error in os.unlink', exc_info=sys.exc_info())
2018
2019
2020def _rmtree_star(path, logger):
2021    """Remove all files/trees within a dir, logging errors"""
2022    for filename in os.listdir(path):
2023        file_path = osp.join(path, filename)
2024        if osp.isfile(file_path) or osp.islink(file_path):
2025            _unlink(file_path, logger)
2026        elif osp.isdir(file_path):
2027            _rmtree(file_path, logger)
2028
2029
2030def _validate_extension(data):
2031    """Detect if a package is an extension using its metadata.
2032
2033    Returns any problems it finds.
2034    """
2035    jlab = data.get('jupyterlab', None)
2036    if jlab is None:
2037        return ['No `jupyterlab` key']
2038    if not isinstance(jlab, dict):
2039        return ['The `jupyterlab` key must be a JSON object']
2040    extension = jlab.get('extension', False)
2041    mime_extension = jlab.get('mimeExtension', False)
2042    themePath = jlab.get('themePath', '')
2043    schemaDir = jlab.get('schemaDir', '')
2044
2045    messages = []
2046    if not extension and not mime_extension:
2047        messages.append('No `extension` or `mimeExtension` key present')
2048
2049    if extension == mime_extension:
2050        msg = '`mimeExtension` and `extension` must point to different modules'
2051        messages.append(msg)
2052
2053    files = data['jupyterlab_extracted_files']
2054    main = data.get('main', 'index.js')
2055    if not main.endswith('.js'):
2056        main += '.js'
2057
2058    if extension is True:
2059        extension = main
2060    elif extension and not extension.endswith('.js'):
2061        extension += '.js'
2062
2063    if mime_extension is True:
2064        mime_extension = main
2065    elif mime_extension and not mime_extension.endswith('.js'):
2066        mime_extension += '.js'
2067
2068    if extension and extension not in files:
2069        messages.append('Missing extension module "%s"' % extension)
2070
2071    if mime_extension and mime_extension not in files:
2072        messages.append('Missing mimeExtension module "%s"' % mime_extension)
2073
2074    if themePath and not any(f.startswith(themePath) for f in files):
2075        messages.append('themePath is empty: "%s"' % themePath)
2076
2077    if schemaDir and not any(f.startswith(schemaDir) for f in files):
2078        messages.append('schemaDir is empty: "%s"' % schemaDir)
2079
2080    return messages
2081
2082
2083def _tarsum(input_file):
2084    """
2085    Compute the recursive sha sum of a tar file.
2086    """
2087    tar = tarfile.open(input_file, "r")
2088    chunk_size = 100 * 1024
2089    h = hashlib.new("sha1")
2090
2091    for member in tar:
2092        if not member.isfile():
2093            continue
2094        f = tar.extractfile(member)
2095        data = f.read(chunk_size)
2096        while data:
2097            h.update(data)
2098            data = f.read(chunk_size)
2099    return h.hexdigest()
2100
2101
2102def _get_static_data(app_dir):
2103    """Get the data for the app static dir.
2104    """
2105    target = pjoin(app_dir, 'static', 'package.json')
2106    if osp.exists(target):
2107        with open(target) as fid:
2108            return json.load(fid)
2109    else:
2110        return None
2111
2112
2113def _validate_compatibility(extension, deps, core_data):
2114    """Validate the compatibility of an extension.
2115    """
2116    core_deps = core_data['resolutions']
2117    singletons = core_data['jupyterlab']['singletonPackages']
2118
2119    errors = []
2120
2121    for (key, value) in deps.items():
2122        if key in singletons:
2123            # Drop prereleases in comparisons to allow extension authors
2124            # to not have to update their versions for each
2125            # Jupyterlab prerelease version.
2126            overlap = _test_overlap(core_deps[key], value, drop_prerelease1=True)
2127            if overlap is False:
2128                errors.append((key, core_deps[key], value))
2129
2130    return errors
2131
2132
2133def _test_overlap(spec1, spec2, drop_prerelease1=False, drop_prerelease2=False):
2134    """Test whether two version specs overlap.
2135
2136    Returns `None` if we cannot determine compatibility,
2137    otherwise whether there is an overlap
2138    """
2139    cmp = _compare_ranges(spec1, spec2, drop_prerelease1=drop_prerelease1,
2140        drop_prerelease2=drop_prerelease2)
2141    if cmp is None:
2142        return
2143    return cmp == 0
2144
2145
2146def _compare_ranges(spec1, spec2, drop_prerelease1=False, drop_prerelease2=False):
2147    """Test whether two version specs overlap.
2148
2149    Returns `None` if we cannot determine compatibility,
2150    otherwise return 0 if there is an overlap, 1 if
2151    spec1 is lower/older than spec2, and -1 if spec1
2152    is higher/newer than spec2.
2153    """
2154    # Test for overlapping semver ranges.
2155    r1 = Range(spec1, True)
2156    r2 = Range(spec2, True)
2157
2158    # If either range is empty, we cannot verify.
2159    if not r1.range or not r2.range:
2160        return
2161
2162    # Set return_value to a sentinel value
2163    return_value = False
2164
2165    # r1.set may be a list of ranges if the range involved an ||, so we need to test for overlaps between each pair.
2166    for r1set, r2set in itertools.product(r1.set, r2.set):
2167        x1 = r1set[0].semver
2168        x2 = r1set[-1].semver
2169        y1 = r2set[0].semver
2170        y2 = r2set[-1].semver
2171
2172        if x1.prerelease and drop_prerelease1:
2173            x1 = x1.inc('patch')
2174
2175        if y1.prerelease and drop_prerelease2:
2176            y1 = y1.inc('patch')
2177
2178        o1 = r1set[0].operator
2179        o2 = r2set[0].operator
2180
2181        # We do not handle (<) specifiers.
2182        if (o1.startswith('<') or o2.startswith('<')):
2183            continue
2184
2185        # Handle single value specifiers.
2186        lx = lte if x1 == x2 else lt
2187        ly = lte if y1 == y2 else lt
2188        gx = gte if x1 == x2 else gt
2189        gy = gte if x1 == x2 else gt
2190
2191        # Handle unbounded (>) specifiers.
2192        def noop(x, y, z):
2193            return True
2194
2195        if x1 == x2 and o1.startswith('>'):
2196            lx = noop
2197        if y1 == y2 and o2.startswith('>'):
2198            ly = noop
2199
2200        # Check for overlap.
2201        if (gte(x1, y1, True) and ly(x1, y2, True) or
2202            gy(x2, y1, True) and ly(x2, y2, True) or
2203            gte(y1, x1, True) and lx(y1, x2, True) or
2204            gx(y2, x1, True) and lx(y2, x2, True)
2205        ):
2206            # if we ever find an overlap, we can return immediately
2207            return 0
2208
2209        if gte(y1, x2, True):
2210            if return_value is False:
2211                # We can possibly return 1
2212                return_value = 1
2213            elif return_value == -1:
2214                # conflicting information, so we must return None
2215                return_value = None
2216            continue
2217
2218        if gte(x1, y2, True):
2219            if return_value is False:
2220                return_value = -1
2221            elif return_value == 1:
2222                # conflicting information, so we must return None
2223                return_value = None
2224            continue
2225
2226        raise AssertionError('Unexpected case comparing version ranges')
2227
2228    if return_value is False:
2229        return_value = None
2230    return return_value
2231
2232
2233def _is_disabled(name, disabled=[]):
2234    """Test whether the package is disabled.
2235    """
2236    for pattern in disabled:
2237        if name == pattern:
2238            return True
2239        if re.compile(pattern).match(name) is not None:
2240            return True
2241    return False
2242
2243
2244def _format_compatibility_errors(name, version, errors):
2245    """Format a message for compatibility errors.
2246    """
2247    msgs = []
2248    l0 = 10
2249    l1 = 10
2250    for error in errors:
2251        pkg, jlab, ext = error
2252        jlab = str(Range(jlab, True))
2253        ext = str(Range(ext, True))
2254        msgs.append((pkg, jlab, ext))
2255        l0 = max(l0, len(pkg) + 1)
2256        l1 = max(l1, len(jlab) + 1)
2257
2258    msg = '\n"%s@%s" is not compatible with the current JupyterLab'
2259    msg = msg % (name, version)
2260    msg += '\nConflicting Dependencies:\n'
2261    msg += 'JupyterLab'.ljust(l0)
2262    msg += 'Extension'.ljust(l1)
2263    msg += 'Package\n'
2264
2265    for (pkg, jlab, ext) in msgs:
2266        msg += jlab.ljust(l0) + ext.ljust(l1) + pkg + '\n'
2267
2268    return msg
2269
2270
2271def _log_multiple_compat_errors(logger, errors_map):
2272    """Log compatibility errors for multiple extensions at once"""
2273
2274    outdated = []
2275    others = []
2276
2277    for name, (version, errors) in errors_map.items():
2278        age = _compat_error_age(errors)
2279        if age > 0:
2280            outdated.append(name)
2281        else:
2282            others.append(name)
2283
2284    if outdated:
2285        logger.warn('\n        '.join(
2286            ['\n   The following extension are outdated:'] +
2287            outdated +
2288            ['\n   Consider running "jupyter labextension update --all" '
2289             'to check for updates.\n']
2290        ))
2291
2292    for name in others:
2293        version, errors = errors_map[name]
2294        msg = _format_compatibility_errors(name, version, errors)
2295        logger.warn(msg + '\n')
2296
2297
2298def _log_single_compat_errors(logger, name, version, errors):
2299    """Log compatability errors for a single extension"""
2300
2301    age = _compat_error_age(errors)
2302    if age > 0:
2303        logger.warn('The extension "%s" is outdated.\n', name)
2304    else:
2305        msg = _format_compatibility_errors(name, version, errors)
2306        logger.warn(msg + '\n')
2307
2308
2309def _compat_error_age(errors):
2310    """Compare all incompatabilites for an extension.
2311
2312    Returns a number > 0 if all extensions are older than that supported by lab.
2313    Returns a number < 0 if all extensions are newer than that supported by lab.
2314    Returns 0 otherwise (i.e. a mix).
2315    """
2316    # Do any extensions depend on too old lab packages?
2317    any_older = False
2318    # Do any extensions depend on too new lab packages?
2319    any_newer = False
2320
2321    for _, jlab, ext in errors:
2322        # Drop prereleases in comparisons to allow extension authors
2323        # to not have to update their versions for each
2324        # Jupyterlab prerelease version.
2325        c = _compare_ranges(ext, jlab, drop_prerelease1=True)
2326        any_newer = any_newer or c < 0
2327        any_older = any_older or c > 0
2328    if any_older and not any_newer:
2329        return 1
2330    elif any_newer and not any_older:
2331        return -1
2332    return 0
2333
2334
2335def _get_core_extensions(core_data):
2336    """Get the core extensions.
2337    """
2338    data = core_data['jupyterlab']
2339    return list(data['extensions']) + list(data['mimeExtensions'])
2340
2341
2342def _semver_prerelease_key(prerelease):
2343    """Sort key for prereleases.
2344
2345    Precedence for two pre-release versions with the same
2346    major, minor, and patch version MUST be determined by
2347    comparing each dot separated identifier from left to
2348    right until a difference is found as follows:
2349    identifiers consisting of only digits are compare
2350    numerically and identifiers with letters or hyphens
2351    are compared lexically in ASCII sort order. Numeric
2352    identifiers always have lower precedence than non-
2353    numeric identifiers. A larger set of pre-release
2354    fields has a higher precedence than a smaller set,
2355    if all of the preceding identifiers are equal.
2356    """
2357    for entry in prerelease:
2358        if isinstance(entry, int):
2359            # Assure numerics always sort before string
2360            yield ('', entry)
2361        else:
2362            # Use ASCII compare:
2363            yield (entry,)
2364
2365
2366def _semver_key(version, prerelease_first=False):
2367    """A sort key-function for sorting semver version string.
2368
2369    The default sorting order is ascending (0.x -> 1.x -> 2.x).
2370
2371    If `prerelease_first`, pre-releases will come before
2372    ALL other semver keys (not just those with same version).
2373    I.e (1.0-pre, 2.0-pre -> 0.x -> 1.x -> 2.x).
2374
2375    Otherwise it will sort in the standard way that it simply
2376    comes before any release with shared version string
2377    (0.x -> 1.0-pre -> 1.x -> 2.0-pre -> 2.x).
2378    """
2379    v = make_semver(version, True)
2380    if prerelease_first:
2381        key = (0,) if v.prerelease else (1,)
2382    else:
2383        key = ()
2384    key = key + (v.major, v.minor, v.patch)
2385    if not prerelease_first:
2386        #  NOT having a prerelease is > having one
2387        key = key + (0,) if v.prerelease else (1,)
2388    if v.prerelease:
2389        key = key + tuple(_semver_prerelease_key(
2390            v.prerelease))
2391
2392    return key
2393
2394
2395def _fetch_package_metadata(registry, name, logger):
2396    """Fetch the metadata for a package from the npm registry"""
2397    req = Request(
2398        urljoin(registry, quote(name, safe='@')),
2399        headers={
2400            'Accept': ('application/vnd.npm.install-v1+json;'
2401                        ' q=1.0, application/json; q=0.8, */*')
2402        }
2403    )
2404    try:
2405        logger.debug('Fetching URL: %s' % (req.full_url))
2406    except AttributeError:
2407        logger.debug('Fetching URL: %s' % (req.get_full_url()))
2408    try:
2409        with contextlib.closing(urlopen(req)) as response:
2410            return json.loads(response.read().decode('utf-8'))
2411    except URLError as exc:
2412        logger.warning(
2413            'Failed to fetch package metadata for %r: %r',
2414            name, exc)
2415        raise
2416
2417
2418if __name__ == '__main__':
2419    watch_dev(HERE)
2420