Bladeren bron

Add extension manager server handlers

Vidar Tonaas Fauske 7 jaren geleden
bovenliggende
commit
61594615f7
4 gewijzigde bestanden met toevoegingen van 370 en 38 verwijderingen
  1. 103 37
      jupyterlab/commands.py
  2. 11 1
      jupyterlab/extension.py
  3. 255 0
      jupyterlab/extension_manager_handler.py
  4. 1 0
      packages/services/src/kernel/index.ts

+ 103 - 37
jupyterlab/commands.py

@@ -32,7 +32,7 @@ if sys.version_info.major < 3:
     from urllib2 import Request, urlopen, quote
     from urllib2 import URLError, HTTPError
     from urlparse import urljoin
-
+    import contextlib
 else:
     from urllib.request import Request, urlopen, urljoin, quote
     from urllib.error import URLError, HTTPError
@@ -352,6 +352,27 @@ def get_app_version(app_dir=None):
     return handler.info['version']
 
 
+def get_latest_compatible_package_versions(names, app_dir=None, logger=None):
+    """Get the latest compatible version of a list of packages.
+    """
+    app_dir = app_dir or get_app_dir()
+    handler = _AppHandler(app_dir, logger)
+    return handler.latest_compatible_package_versions(names)
+
+
+def read_package(target):
+    """Read the package data in a given target tarball.
+    """
+    tar = tarfile.open(target, "r")
+    f = tar.extractfile('package/package.json')
+    data = json.loads(f.read().decode('utf8'))
+    data['jupyterlab_extracted_files'] = [
+        f.path[len('package/'):] for f in tar.getmembers()
+    ]
+    tar.close()
+    return data
+
+
 # ----------------------------------------------------------------------
 # Implementation details
 # ----------------------------------------------------------------------
@@ -1043,7 +1064,7 @@ class _AppHandler(object):
         extensions = dict()
         location = 'app' if dname == self.app_dir else 'sys'
         for target in glob.glob(pjoin(dname, 'extensions', '*.tgz')):
-            data = _read_package(target)
+            data = read_package(target)
             deps = data.get('dependencies', dict())
             name = data['name']
             jlab = data.get('jupyterlab', dict())
@@ -1085,7 +1106,7 @@ class _AppHandler(object):
 
         for path in glob.glob(pjoin(dname, '*.tgz')):
             path = osp.realpath(path)
-            data = _read_package(path)
+            data = read_package(path)
             name = data['name']
             if name not in info:
                 self.logger.warn('Removing orphaned linked package %s' % name)
@@ -1285,7 +1306,7 @@ class _AppHandler(object):
             raise ValueError(msg % source)
 
         path = glob.glob(pjoin(tempdir, '*.tgz'))[0]
-        info['data'] = _read_package(path)
+        info['data'] = read_package(path)
         if is_dir:
             info['sha'] = sha = _tarsum(path)
             target = path.replace('.tgz', '-%s.tgz' % sha)
@@ -1304,7 +1325,10 @@ class _AppHandler(object):
     def _latest_compatible_package_version(self, name):
         """Get the latest compatible version of a package"""
         core_data = self.info['core_data']
-        metadata = _fetch_package_metadata(self.registry, name, self.logger)
+        try:
+            metadata = _fetch_package_metadata(self.registry, name, self.logger)
+        except URLError:
+            return
         versions = metadata.get('versions', [])
 
         # Sort pre-release first, as we will reverse the sort:
@@ -1328,32 +1352,84 @@ class _AppHandler(object):
                 # Valid
                 return version
 
-    def _format_no_compatible_package_version(self, name):
-        """Get the latest compatible version of a package"""
-        core_data = self.info['core_data']
-        metadata = _fetch_package_metadata(self.registry, name, self.logger)
-        versions = metadata.get('versions', [])
+    def latest_compatible_package_versions(self, names):
+        """Get the latest compatible versions of several packages
 
-        # Sort pre-release first, as we will reverse the sort:
-        def sort_key(key_value):
-            return _semver_key(key_value[0], prerelease_first=True)
+        Like _latest_compatible_package_version, but optimized for
+        retrieving the latest version for several packages in one go.
+        """
+        core_data = self.info['core_data']
 
-        store = tuple(sorted(versions.items(), key=sort_key, reverse=True))
-        latest_deps = store[0][1].get('dependencies', {})
-        core_deps = core_data['dependencies']
-        singletons = core_data['jupyterlab']['singletonPackages']
+        keys = []
+        for name in names:
+            try:
+                metadata = _fetch_package_metadata(self.registry, name, self.logger)
+            except URLError:
+                continue
+            versions = metadata.get('versions', [])
+
+            # Sort pre-release first, as we will reverse the sort:
+            def sort_key(key_value):
+                return _semver_key(key_value[0], prerelease_first=True)
+
+            for version, data in sorted(versions.items(),
+                                        key=sort_key,
+                                        reverse=True):
+                deps = data.get('dependencies', {})
+                errors = _validate_compatibility(name, deps, core_data)
+                if not errors:
+                    # Found a compatible version
+                    keys.append('%s@%s' % (name, version))
+                    break  # break inner for
+
+
+        versions = {}
+        if not keys:
+            return versions
+        with TemporaryDirectory() as tempdir:
+            ret = self._run([which('npm'), 'pack'] + keys, cwd=tempdir, quiet=True)
+            if ret != 0:
+                msg = '"%s" is not a valid npm package'
+                raise ValueError(msg % keys)
+
+            for key in keys:
+                fname = key[0].replace('@', '') + key[1:].replace('@', '-').replace('/', '-') + '.tgz'
+                data = read_package(os.path.join(tempdir, fname))
+                # Verify that the version is a valid extension.
+                if not _validate_extension(data):
+                    # Valid
+                    versions[key] = data['version']
+        return versions
 
+    def _format_no_compatible_package_version(self, name):
+        """Get the latest compatible version of a package"""
+        core_data = self.info['core_data']
         # Whether lab version is too new:
         lab_newer_than_latest = False
         # Whether the latest version of the extension depend on a "future" version
         # of a singleton package (from the perspective of current lab version):
         latest_newer_than_lab = False
+        try:
+            metadata = _fetch_package_metadata(self.registry, name, self.logger)
+        except URLError:
+            pass
+        else:
+            versions = metadata.get('versions', [])
+
+            # Sort pre-release first, as we will reverse the sort:
+            def sort_key(key_value):
+                return _semver_key(key_value[0], prerelease_first=True)
+
+            store = tuple(sorted(versions.items(), key=sort_key, reverse=True))
+            latest_deps = store[0][1].get('dependencies', {})
+            core_deps = core_data['dependencies']
+            singletons = core_data['jupyterlab']['singletonPackages']
 
-        for (key, value) in latest_deps.items():
-            if key in singletons:
-                c = _compare_ranges(core_deps[key], value)
-                lab_newer_than_latest = lab_newer_than_latest or c < 0
-                latest_newer_than_lab = latest_newer_than_lab or c > 0
+            for (key, value) in latest_deps.items():
+                if key in singletons:
+                    c = _compare_ranges(core_deps[key], value)
+                    lab_newer_than_latest = lab_newer_than_latest or c < 0
+                    latest_newer_than_lab = latest_newer_than_lab or c > 0
 
         if lab_newer_than_latest:
             # All singleton deps in current version of lab are newer than those
@@ -1404,19 +1480,6 @@ def _normalize_path(extension):
     return extension
 
 
-def _read_package(target):
-    """Read the package data in a given target tarball.
-    """
-    tar = tarfile.open(target, "r")
-    f = tar.extractfile('package/package.json')
-    data = json.loads(f.read().decode('utf8'))
-    data['jupyterlab_extracted_files'] = [
-        f.path[len('package/'):] for f in tar.getmembers()
-    ]
-    tar.close()
-    return data
-
-
 def _validate_extension(data):
     """Detect if a package is an extension using its metadata.
 
@@ -1700,9 +1763,12 @@ def _fetch_package_metadata(registry, name, logger):
                         ' q=1.0, application/json; q=0.8, */*')
         }
     )
-    logger.debug('Fetching URL: %s' % (req.full_url))
     try:
-        with urlopen(req) as response:
+        logger.debug('Fetching URL: %s' % (req.full_url))
+    except AttributeError:
+        logger.debug('Fetching URL: %s' % (req.get_full_url()))
+    try:
+        with contextlib.closing(urlopen(req)) as response:
             return json.load(response)
     except URLError as exc:
         logger.warning(

+ 11 - 1
jupyterlab/extension.py

@@ -36,6 +36,9 @@ def load_jupyter_server_extension(nbapp):
     from tornado.ioloop import IOLoop
     from markupsafe import Markup
     from .build_handler import build_path, Builder, BuildHandler
+    from .extension_manager_handler import (
+        extensions_handler_path, ExtensionManager, ExtensionHandler
+    )
     from .commands import (
         get_app_dir, get_user_settings_dir, watch, ensure_dev, watch_dev,
         pjoin, DEV_DIR, HERE, get_app_info, ensure_core, get_workspaces_dir
@@ -148,8 +151,15 @@ def load_jupyter_server_extension(nbapp):
     build_url = ujoin(base_url, build_path)
     builder = Builder(logger, core_mode, app_dir)
     build_handler = (build_url, BuildHandler, {'builder': builder})
+    handlers = [build_handler]
+
+    if not core_mode:
+        ext_url = ujoin(base_url, extensions_handler_path)
+        ext_manager = ExtensionManager(logger, app_dir)
+        ext_handler = (ext_url, ExtensionHandler, {'manager': ext_manager})
+        handlers.append(ext_handler)
 
     # Must add before the launcher handlers to avoid shadowing.
-    web_app.add_handlers('.*$', [build_handler])
+    web_app.add_handlers('.*$', handlers)
 
     add_handlers(web_app, config)

+ 255 - 0
jupyterlab/extension_manager_handler.py

@@ -0,0 +1,255 @@
+"""Tornado handlers for extension management."""
+
+# Copyright (c) Jupyter Development Team.
+# Distributed under the terms of the Modified BSD License.
+
+import json
+import os
+import re
+
+from concurrent.futures import ThreadPoolExecutor
+
+from notebook.base.handlers import APIHandler
+from tornado import gen, web
+from tornado.ioloop import IOLoop
+
+from .commands import (
+    get_app_info, install_extension, uninstall_extension,
+    enable_extension, disable_extension, read_package,
+    _AppHandler, get_latest_compatible_package_versions
+)
+
+
+def _make_extension_entry(name, description, enabled, core, latest_version,
+                          installed_version, status, installed=None):
+    """Create an extension entry that can be sent to the client"""
+    ret = dict(
+        name=name,
+        description=description,
+        enabled=enabled,
+        core=core,
+        latest_version=latest_version,
+        installed_version=installed_version,
+        status=status,
+    )
+    if installed is not None:
+        ret['installed'] = installed
+    return ret
+
+
+def _ensure_compat_errors(info, app_dir, logger):
+    """Ensure that the app info has compat_errors field"""
+    handler = _AppHandler(app_dir, logger)
+    info['compat_errors'] = handler._get_extension_compat()
+
+
+_message_map = {
+    'install': re.compile(r'(?P<name>.*) needs to be included in build'),
+    'uninstall': re.compile(r'(?P<name>.*) needs to be removed from build'),
+    'update': re.compile(r'(?P<name>.*) changed from (?P<oldver>.*) to (?P<newver>.*)'),
+}
+
+def _build_check_info(app_dir, logger):
+    """Get info about packages scheduled for (un)install/update"""
+    handler = _AppHandler(app_dir, logger)
+    messages = handler.build_check(fast=True)
+    # Decode the messages into a dict:
+    status = {'install': [], 'uninstall': [], 'update': []}
+    for msg in messages:
+        for key, pattern in _message_map.items():
+            match = pattern.match(msg)
+            if match:
+                status[key].append(match.group('name'))
+    return status
+
+
+class ExtensionManager(object):
+    executor = ThreadPoolExecutor(max_workers=1)
+
+    def __init__(self, log, app_dir):
+        self.log = log
+        self.app_dir = app_dir
+        self._outdated = None
+        # Start fetching data on outdated extensions immediately
+        IOLoop.current().spawn_callback(self._get_outdated)
+
+    @gen.coroutine
+    def list_extensions(self):
+        """Handle a request for all installed extensions"""
+        info = get_app_info(app_dir=self.app_dir, logger=self.log)
+        build_check_info = _build_check_info(self.app_dir, self.log)
+        _ensure_compat_errors(info, self.app_dir, self.log)
+        extensions = []
+        # TODO: Ensure loops can run in parallel
+        for name, data in info['extensions'].items():
+            status = 'ok'
+            pkg_info = yield self._get_pkg_info(name, data)
+            if info['compat_errors'].get(name, None):
+                status = 'error'
+            else:
+                for packages in build_check_info.values():
+                    if name in packages:
+                        status = 'warning'
+            extensions.append(_make_extension_entry(
+                name=name,
+                description=pkg_info['description'],
+                enabled=(name not in info['disabled']),
+                core=False,
+                # Use wanted version to ensure we limit ourselves
+                # within semver restrictions
+                latest_version=pkg_info['latest_version'],
+                installed_version=data['version'],
+                status=status,
+            ))
+        for name in build_check_info['uninstall']:
+            data = yield self._get_scheduled_uninstall_info(name)
+            if data is not None:
+                extensions.append(_make_extension_entry(
+                    name=name,
+                    description=data['description'],
+                    installed=False,
+                    enabled=False,
+                    core=False,
+                    latest_version=data['version'],
+                    installed_version=data['version'],
+                    status='warning',
+                ))
+        raise gen.Return(extensions)
+
+    @gen.coroutine
+    def install(self, extension):
+        """Handle an install/update request"""
+        try:
+            install_extension(extension, app_dir=self.app_dir, logger=self.log)
+        except ValueError as e:
+            raise gen.Return(dict(status='error', message=str(e)))
+        raise gen.Return(dict(status='ok',))
+
+    @gen.coroutine
+    def uninstall(self, extension):
+        """Handle an uninstall request"""
+        did_uninstall = uninstall_extension(extension, app_dir=self.app_dir, logger=self.log)
+        raise gen.Return(dict(status='ok' if did_uninstall else 'error',))
+
+    @gen.coroutine
+    def enable(self, extension):
+        """Handle an enable request"""
+        enable_extension(extension, app_dir=self.app_dir, logger=self.log)
+        raise gen.Return(dict(status='ok',))
+
+    @gen.coroutine
+    def disable(self, extension):
+        """Handle a disable request"""
+        disable_extension(extension, app_dir=self.app_dir, logger=self.log)
+        raise gen.Return(dict(status='ok',))
+
+    @gen.coroutine
+    def _get_pkg_info(self, name, data):
+        """Get information about a package"""
+        info = read_package(data['path'])
+
+        # Get latest version that is compatible with current lab:
+        outdated = yield self._get_outdated()
+        if outdated and name in outdated:
+            info['latest_version'] = outdated[name]
+        else:
+            # Fallback to indicating that current is latest
+            info['latest_version'] = info['version']
+
+        raise gen.Return(info)
+
+    def _get_outdated(self):
+        """Get a Future to information from `npm/yarn outdated`.
+
+        This will cache the results. To refresh the cache, set
+        self._outdated to None before calling. To bypass the cache,
+        call self._load_outdated directly.
+        """
+        # Ensure self._outdated is a Future for data on outdated extensions
+        if self._outdated is None:
+            self._outdated = self._load_outdated()
+        # Return the Future
+        return self._outdated
+
+    def refresh_outdated(self):
+        self._outdated = self._load_outdated()
+        return self._outdated
+
+    @gen.coroutine
+    def _load_outdated(self):
+        """Get the latest compatible version"""
+        info = get_app_info(app_dir=self.app_dir, logger=self.log)
+        names = tuple(info['extensions'].keys())
+        data = yield self.executor.submit(
+            get_latest_compatible_package_versions,
+            names,
+            app_dir=self.app_dir,
+            logger=self.log,
+        )
+        raise gen.Return(data)
+
+    @gen.coroutine
+    def _get_scheduled_uninstall_info(self, name):
+        """Get information about a package that is scheduled for uninstallation"""
+        target = os.path.join(
+            self.app_dir, 'staging', 'node_modules', name, 'package.json')
+        if os.path.exists(target):
+            with open(target) as fid:
+                raise gen.Return(json.load(fid))
+        else:
+            raise gen.Return(None)
+
+
+class ExtensionHandler(APIHandler):
+
+    def initialize(self, manager):
+        self.manager = manager
+
+    @web.authenticated
+    @gen.coroutine
+    def get(self):
+        """GET query returns info on all installed extensions"""
+        if self.get_argument('refresh', False) == '1':
+            yield self.manager.refresh_outdated()
+        extensions = yield self.manager.list_extensions()
+        self.finish(json.dumps(extensions))
+
+    @web.authenticated
+    @gen.coroutine
+    def post(self):
+        """POST query performs an action on a specific extension"""
+        data = self.get_json_body()
+        cmd = data['cmd']
+        name = data['extension_name']
+        if (cmd not in ('install', 'uninstall', 'enable', 'disable') or
+                not name):
+            raise web.HTTPError(
+                422, 'Could not process instrution %r with extension name %r' % (
+                    cmd, name))
+
+        # TODO: Can we trust extension_name? Does it need sanitation?
+        #       It comes from an authenticated session, but its name is
+        #       ultimately from the NPM repository.
+        ret_value = None
+        try:
+            if cmd == 'install':
+                ret_value = yield self.manager.install(name)
+            elif cmd == 'uninstall':
+                ret_value = yield self.manager.uninstall(name)
+            elif cmd == 'enable':
+                ret_value = yield self.manager.enable(name)
+            elif cmd == 'disable':
+                ret_value = yield self.manager.disable(name)
+        except gen.Return as e:
+            ret_value = e.value
+        except Exception as e:
+            raise web.HTTPError(500, str(e))
+
+        if ret_value is None:
+            self.set_status(200)
+        else:
+            self.finish(json.dumps(ret_value))
+
+
+# The path for lab extensions handler.
+extensions_handler_path = r"/lab/api/extensions"

+ 1 - 0
packages/services/src/kernel/index.ts

@@ -1,5 +1,6 @@
 // Copyright (c) Jupyter Development Team.
 // Distributed under the terms of the Modified BSD License.
+'use strict';
 
 export * from './kernel';
 export * from './manager';