commands.py 67 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995
  1. # coding: utf-8
  2. """JupyterLab command handler"""
  3. # Copyright (c) Jupyter Development Team.
  4. # Distributed under the terms of the Modified BSD License.
  5. import contextlib
  6. from distutils.version import LooseVersion
  7. import errno
  8. import glob
  9. import hashlib
  10. import json
  11. import logging
  12. import os
  13. import os.path as osp
  14. import re
  15. import shutil
  16. import site
  17. import subprocess
  18. import sys
  19. import tarfile
  20. from tempfile import TemporaryDirectory
  21. from threading import Event
  22. from urllib.request import Request, urlopen, urljoin, quote
  23. from urllib.error import URLError
  24. from jupyter_core.paths import jupyter_config_path
  25. from jupyterlab_server.process import which, Process, WatchHelper, list2cmdline
  26. from notebook.nbextensions import GREEN_ENABLED, GREEN_OK, RED_DISABLED, RED_X
  27. from .semver import Range, gte, lt, lte, gt, make_semver
  28. from .jlpmapp import YARN_PATH, HERE
  29. # The regex for expecting the webpack output.
  30. WEBPACK_EXPECT = re.compile(r'.*/index.out.js')
  31. # The dev mode directory.
  32. DEV_DIR = osp.abspath(os.path.join(HERE, '..', 'dev_mode'))
  33. class ProgressProcess(Process):
  34. def __init__(self, cmd, logger=None, cwd=None, kill_event=None,
  35. env=None):
  36. """Start a subprocess that can be run asynchronously.
  37. Parameters
  38. ----------
  39. cmd: list
  40. The command to run.
  41. logger: :class:`~logger.Logger`, optional
  42. The logger instance.
  43. cwd: string, optional
  44. The cwd of the process.
  45. kill_event: :class:`~threading.Event`, optional
  46. An event used to kill the process operation.
  47. env: dict, optional
  48. The environment for the process.
  49. """
  50. if not isinstance(cmd, (list, tuple)):
  51. raise ValueError('Command must be given as a list')
  52. if kill_event and kill_event.is_set():
  53. raise ValueError('Process aborted')
  54. self.logger = logger = logger or logging.getLogger('jupyterlab')
  55. self._last_line = ''
  56. self.cmd = cmd
  57. self.logger.debug('> ' + list2cmdline(cmd))
  58. self.proc = self._create_process(
  59. cwd=cwd,
  60. env=env,
  61. stderr=subprocess.STDOUT,
  62. stdout=subprocess.PIPE,
  63. universal_newlines=True,
  64. )
  65. self._kill_event = kill_event or Event()
  66. Process._procs.add(self)
  67. def wait(self):
  68. cache = []
  69. proc = self.proc
  70. kill_event = self._kill_event
  71. import itertools
  72. spinner = itertools.cycle(['-', '/', '|', '\\'])
  73. while proc.poll() is None:
  74. sys.stdout.write(next(spinner)) # write the next character
  75. sys.stdout.flush() # flush stdout buffer (actual character display)
  76. sys.stdout.write('\b')
  77. if kill_event.is_set():
  78. self.terminate()
  79. raise ValueError('Process was aborted')
  80. try:
  81. out, _ = proc.communicate(timeout=.1)
  82. cache.append(out)
  83. except subprocess.TimeoutExpired:
  84. continue
  85. self.logger.debug('\n'.join(cache))
  86. sys.stdout.flush()
  87. return self.terminate()
  88. def pjoin(*args):
  89. """Join paths to create a real path.
  90. """
  91. return osp.abspath(osp.join(*args))
  92. def get_user_settings_dir():
  93. """Get the configured JupyterLab user settings directory.
  94. """
  95. settings_dir = os.environ.get('JUPYTERLAB_SETTINGS_DIR')
  96. settings_dir = settings_dir or pjoin(
  97. jupyter_config_path()[0], 'lab', 'user-settings'
  98. )
  99. return osp.abspath(settings_dir)
  100. def get_workspaces_dir():
  101. """Get the configured JupyterLab workspaces directory.
  102. """
  103. workspaces_dir = os.environ.get('JUPYTERLAB_WORKSPACES_DIR')
  104. workspaces_dir = workspaces_dir or pjoin(
  105. jupyter_config_path()[0], 'lab', 'workspaces'
  106. )
  107. return osp.abspath(workspaces_dir)
  108. def get_app_dir():
  109. """Get the configured JupyterLab app directory.
  110. """
  111. # Default to the override environment variable.
  112. if os.environ.get('JUPYTERLAB_DIR'):
  113. return osp.abspath(os.environ['JUPYTERLAB_DIR'])
  114. # Use the default locations for data_files.
  115. app_dir = pjoin(sys.prefix, 'share', 'jupyter', 'lab')
  116. # Check for a user level install.
  117. # Ensure that USER_BASE is defined
  118. if hasattr(site, 'getuserbase'):
  119. site.getuserbase()
  120. userbase = getattr(site, 'USER_BASE', None)
  121. if HERE.startswith(userbase) and not app_dir.startswith(userbase):
  122. app_dir = pjoin(userbase, 'share', 'jupyter', 'lab')
  123. # Check for a system install in '/usr/local/share'.
  124. elif (sys.prefix.startswith('/usr') and not
  125. osp.exists(app_dir) and
  126. osp.exists('/usr/local/share/jupyter/lab')):
  127. app_dir = '/usr/local/share/jupyter/lab'
  128. return osp.abspath(app_dir)
  129. def dedupe_yarn(path, logger=None):
  130. """ `yarn-deduplicate` with the `fewer` strategy to minimize total
  131. packages installed in a given staging directory
  132. This means a extension (or dependency) _could_ cause a downgrade of an
  133. version expected at publication time, but core should aggressively set
  134. pins above, for example, known-bad versions
  135. """
  136. had_dupes = ProgressProcess(
  137. ['node', YARN_PATH, 'yarn-deduplicate', '-s', 'fewer', '--fail'],
  138. cwd=path, logger=logger
  139. ).wait() != 0
  140. if had_dupes:
  141. yarn_proc = ProgressProcess(['node', YARN_PATH], cwd=path, logger=logger)
  142. yarn_proc.wait()
  143. def ensure_node_modules(cwd, logger=None):
  144. """Ensure that node_modules is up to date.
  145. Returns true if the node_modules was updated.
  146. """
  147. logger = _ensure_logger(logger)
  148. yarn_proc = ProgressProcess(['node', YARN_PATH, 'check', '--verify-tree'], cwd=cwd, logger=logger)
  149. ret = yarn_proc.wait()
  150. # Update node_modules if needed.
  151. if ret != 0:
  152. yarn_proc = ProgressProcess(['node', YARN_PATH], cwd=cwd, logger=logger)
  153. yarn_proc.wait()
  154. parent = pjoin(HERE, '..')
  155. dedupe_yarn(parent, logger)
  156. return ret != 0
  157. def ensure_dev(logger=None):
  158. """Ensure that the dev assets are available.
  159. """
  160. parent = pjoin(HERE, '..')
  161. logger = _ensure_logger(logger)
  162. target = pjoin(parent, 'dev_mode', 'static')
  163. # Determine whether to build.
  164. if ensure_node_modules(parent, logger) or not osp.exists(target):
  165. yarn_proc = ProgressProcess(['node', YARN_PATH, 'build'], cwd=parent,
  166. logger=logger)
  167. yarn_proc.wait()
  168. def ensure_core(logger=None):
  169. """Ensure that the core assets are available.
  170. """
  171. staging = pjoin(HERE, 'staging')
  172. logger = _ensure_logger(logger)
  173. # Determine whether to build.
  174. target = pjoin(HERE, 'static', 'index.html')
  175. if not osp.exists(target):
  176. ensure_node_modules(staging, logger)
  177. yarn_proc = ProgressProcess(['node', YARN_PATH, 'build'], cwd=staging,
  178. logger=logger)
  179. yarn_proc.wait()
  180. def ensure_app(app_dir):
  181. """Ensure that an application directory is available.
  182. If it does not exist, return a list of messages to prompt the user.
  183. """
  184. if osp.exists(pjoin(app_dir, 'static', 'index.html')):
  185. return
  186. msgs = ['JupyterLab application assets not found in "%s"' % app_dir,
  187. 'Please run `jupyter lab build` or use a different app directory']
  188. return msgs
  189. def watch_packages(logger=None):
  190. """Run watch mode for the source packages.
  191. Parameters
  192. ----------
  193. logger: :class:`~logger.Logger`, optional
  194. The logger instance.
  195. Returns
  196. -------
  197. A list of `WatchHelper` objects.
  198. """
  199. parent = pjoin(HERE, '..')
  200. logger = _ensure_logger(logger)
  201. ensure_node_modules(parent, logger)
  202. ts_dir = osp.abspath(osp.join(HERE, '..', 'packages', 'metapackage'))
  203. # Run typescript watch and wait for the string indicating it is done.
  204. ts_regex = r'.* Found 0 errors\. Watching for file changes\.'
  205. ts_proc = WatchHelper(['node', YARN_PATH, 'run', 'watch'],
  206. cwd=ts_dir, logger=logger, startup_regex=ts_regex)
  207. return [ts_proc]
  208. def watch_dev(logger=None):
  209. """Run watch mode in a given directory.
  210. Parameters
  211. ----------
  212. logger: :class:`~logger.Logger`, optional
  213. The logger instance.
  214. Returns
  215. -------
  216. A list of `WatchHelper` objects.
  217. """
  218. logger = _ensure_logger(logger)
  219. package_procs = watch_packages(logger)
  220. # Run webpack watch and wait for compilation.
  221. wp_proc = WatchHelper(['node', YARN_PATH, 'run', 'watch'],
  222. cwd=DEV_DIR, logger=logger,
  223. startup_regex=WEBPACK_EXPECT)
  224. return package_procs + [wp_proc]
  225. def watch(app_dir=None, logger=None):
  226. """Watch the application.
  227. Parameters
  228. ----------
  229. app_dir: string, optional
  230. The application directory.
  231. logger: :class:`~logger.Logger`, optional
  232. The logger instance.
  233. Returns
  234. -------
  235. A list of processes to run asynchronously.
  236. """
  237. logger = _ensure_logger(logger)
  238. _node_check(logger)
  239. handler = _AppHandler(app_dir, logger)
  240. return handler.watch()
  241. def install_extension(extension, app_dir=None, logger=None):
  242. """Install an extension package into JupyterLab.
  243. The extension is first validated.
  244. Returns `True` if a rebuild is recommended, `False` otherwise.
  245. """
  246. logger = _ensure_logger(logger)
  247. _node_check(logger)
  248. handler = _AppHandler(app_dir, logger)
  249. return handler.install_extension(extension)
  250. def uninstall_extension(name=None, app_dir=None, logger=None, all_=False):
  251. """Uninstall an extension by name or path.
  252. Returns `True` if a rebuild is recommended, `False` otherwise.
  253. """
  254. logger = _ensure_logger(logger)
  255. _node_check(logger)
  256. handler = _AppHandler(app_dir, logger)
  257. if all_ is True:
  258. return handler.uninstall_all_extensions()
  259. return handler.uninstall_extension(name)
  260. def update_extension(name=None, all_=False, app_dir=None, logger=None):
  261. """Update an extension by name, or all extensions.
  262. Either `name` must be given as a string, or `all_` must be `True`.
  263. If `all_` is `True`, the value of `name` is ignored.
  264. Returns `True` if a rebuild is recommended, `False` otherwise.
  265. """
  266. logger = _ensure_logger(logger)
  267. _node_check(logger)
  268. handler = _AppHandler(app_dir, logger)
  269. if all_ is True:
  270. return handler.update_all_extensions()
  271. return handler.update_extension(name)
  272. def clean(app_dir=None, logger=None):
  273. """Clean the JupyterLab application directory."""
  274. logger = _ensure_logger(logger)
  275. app_dir = app_dir or get_app_dir()
  276. logger.info('Cleaning %s...', app_dir)
  277. if app_dir == pjoin(HERE, 'dev'):
  278. raise ValueError('Cannot clean the dev app')
  279. if app_dir == pjoin(HERE, 'core'):
  280. raise ValueError('Cannot clean the core app')
  281. for name in ['staging']:
  282. target = pjoin(app_dir, name)
  283. if osp.exists(target):
  284. _rmtree(target, logger)
  285. logger.info('Success!')
  286. def build(app_dir=None, name=None, version=None, static_url=None,
  287. logger=None, command='build:prod', kill_event=None,
  288. clean_staging=False):
  289. """Build the JupyterLab application.
  290. """
  291. logger = _ensure_logger(logger)
  292. _node_check(logger)
  293. handler = _AppHandler(app_dir, logger, kill_event=kill_event)
  294. return handler.build(name=name, version=version, static_url=static_url,
  295. command=command, clean_staging=clean_staging)
  296. def get_app_info(app_dir=None, logger=None):
  297. """Get a dictionary of information about the app.
  298. """
  299. handler = _AppHandler(app_dir, logger)
  300. return handler.info
  301. def enable_extension(extension, app_dir=None, logger=None):
  302. """Enable a JupyterLab extension.
  303. Returns `True` if a rebuild is recommended, `False` otherwise.
  304. """
  305. handler = _AppHandler(app_dir, logger)
  306. return handler.toggle_extension(extension, False)
  307. def disable_extension(extension, app_dir=None, logger=None):
  308. """Disable a JupyterLab package.
  309. Returns `True` if a rebuild is recommended, `False` otherwise.
  310. """
  311. handler = _AppHandler(app_dir, logger)
  312. return handler.toggle_extension(extension, True)
  313. def check_extension(extension, app_dir=None, installed=False, logger=None):
  314. """Check if a JupyterLab extension is enabled or disabled.
  315. """
  316. handler = _AppHandler(app_dir, logger)
  317. return handler.check_extension(extension, installed)
  318. def build_check(app_dir=None, logger=None):
  319. """Determine whether JupyterLab should be built.
  320. Returns a list of messages.
  321. """
  322. logger = _ensure_logger(logger)
  323. _node_check(logger)
  324. handler = _AppHandler(app_dir, logger)
  325. return handler.build_check()
  326. def list_extensions(app_dir=None, logger=None):
  327. """List the extensions.
  328. """
  329. handler = _AppHandler(app_dir, logger)
  330. return handler.list_extensions()
  331. def link_package(path, app_dir=None, logger=None):
  332. """Link a package against the JupyterLab build.
  333. Returns `True` if a rebuild is recommended, `False` otherwise.
  334. """
  335. handler = _AppHandler(app_dir, logger)
  336. return handler.link_package(path)
  337. def unlink_package(package, app_dir=None, logger=None):
  338. """Unlink a package from JupyterLab by path or name.
  339. Returns `True` if a rebuild is recommended, `False` otherwise.
  340. """
  341. handler = _AppHandler(app_dir, logger)
  342. return handler.unlink_package(package)
  343. def get_app_version(app_dir=None):
  344. """Get the application version."""
  345. app_dir = app_dir or get_app_dir()
  346. handler = _AppHandler(app_dir)
  347. return handler.info['version']
  348. def get_latest_compatible_package_versions(names, app_dir=None, logger=None):
  349. """Get the latest compatible version of a list of packages.
  350. """
  351. app_dir = app_dir or get_app_dir()
  352. handler = _AppHandler(app_dir, logger)
  353. return handler.latest_compatible_package_versions(names)
  354. def read_package(target):
  355. """Read the package data in a given target tarball.
  356. """
  357. tar = tarfile.open(target, "r")
  358. f = tar.extractfile('package/package.json')
  359. data = json.loads(f.read().decode('utf8'))
  360. data['jupyterlab_extracted_files'] = [
  361. f.path[len('package/'):] for f in tar.getmembers()
  362. ]
  363. tar.close()
  364. return data
  365. # ----------------------------------------------------------------------
  366. # Implementation details
  367. # ----------------------------------------------------------------------
  368. class _AppHandler(object):
  369. def __init__(self, app_dir, logger=None, kill_event=None):
  370. """Create a new _AppHandler object
  371. """
  372. self.app_dir = app_dir or get_app_dir()
  373. self.sys_dir = get_app_dir()
  374. self.logger = _ensure_logger(logger)
  375. self.info = self._get_app_info()
  376. self.kill_event = kill_event or Event()
  377. # TODO: Make this configurable
  378. self.registry = 'https://registry.npmjs.org'
  379. def install_extension(self, extension, existing=None):
  380. """Install an extension package into JupyterLab.
  381. The extension is first validated.
  382. Returns `True` if a rebuild is recommended, `False` otherwise.
  383. """
  384. extension = _normalize_path(extension)
  385. extensions = self.info['extensions']
  386. # Check for a core extensions.
  387. if extension in self.info['core_extensions']:
  388. config = self._read_build_config()
  389. uninstalled = config.get('uninstalled_core_extensions', [])
  390. if extension in uninstalled:
  391. self.logger.info('Installing core extension %s' % extension)
  392. uninstalled.remove(extension)
  393. config['uninstalled_core_extensions'] = uninstalled
  394. self._write_build_config(config)
  395. return True
  396. return False
  397. # Create the app dirs if needed.
  398. self._ensure_app_dirs()
  399. # Install the package using a temporary directory.
  400. with TemporaryDirectory() as tempdir:
  401. info = self._install_extension(extension, tempdir)
  402. name = info['name']
  403. # Local directories get name mangled and stored in metadata.
  404. if info['is_dir']:
  405. config = self._read_build_config()
  406. local = config.setdefault('local_extensions', dict())
  407. local[name] = info['source']
  408. self._write_build_config(config)
  409. # Remove an existing extension with the same name and different path
  410. if name in extensions:
  411. other = extensions[name]
  412. if other['path'] != info['path'] and other['location'] == 'app':
  413. os.remove(other['path'])
  414. return True
  415. def build(self, name=None, version=None, static_url=None,
  416. command='build:prod', clean_staging=False):
  417. """Build the application.
  418. """
  419. self.logger.info('Building jupyterlab assets')
  420. # Set up the build directory.
  421. app_dir = self.app_dir
  422. self._populate_staging(
  423. name=name, version=version, static_url=static_url,
  424. clean=clean_staging
  425. )
  426. staging = pjoin(app_dir, 'staging')
  427. # Make sure packages are installed.
  428. ret = self._run(['node', YARN_PATH, 'install', '--non-interactive'], cwd=staging)
  429. if ret != 0:
  430. msg = 'npm dependencies failed to install'
  431. self.logger.debug(msg)
  432. raise RuntimeError(msg)
  433. dedupe_yarn(staging, self.logger)
  434. # Build the app.
  435. ret = self._run(['node', YARN_PATH, 'run', command], cwd=staging)
  436. if ret != 0:
  437. msg = 'JupyterLab failed to build'
  438. self.logger.debug(msg)
  439. raise RuntimeError(msg)
  440. def watch(self):
  441. """Start the application watcher and then run the watch in
  442. the background.
  443. """
  444. staging = pjoin(self.app_dir, 'staging')
  445. self._populate_staging()
  446. # Make sure packages are installed.
  447. self._run(['node', YARN_PATH, 'install'], cwd=staging)
  448. dedupe_yarn(staging, self.logger)
  449. proc = WatchHelper(['node', YARN_PATH, 'run', 'watch'],
  450. cwd=pjoin(self.app_dir, 'staging'),
  451. startup_regex=WEBPACK_EXPECT,
  452. logger=self.logger)
  453. return [proc]
  454. def list_extensions(self):
  455. """Print an output of the extensions.
  456. """
  457. logger = self.logger
  458. info = self.info
  459. print('JupyterLab v%s' % info['version'])
  460. if info['extensions']:
  461. info['compat_errors'] = self._get_extension_compat()
  462. print('Known labextensions:')
  463. self._list_extensions(info, 'app')
  464. self._list_extensions(info, 'sys')
  465. else:
  466. print('No installed extensions')
  467. local = info['local_extensions']
  468. if local:
  469. print('\n local extensions:')
  470. for name in sorted(local):
  471. print(' %s: %s' % (name, local[name]))
  472. linked_packages = info['linked_packages']
  473. if linked_packages:
  474. print('\n linked packages:')
  475. for key in sorted(linked_packages):
  476. source = linked_packages[key]['source']
  477. print(' %s: %s' % (key, source))
  478. uninstalled_core = info['uninstalled_core']
  479. if uninstalled_core:
  480. print('\nUninstalled core extensions:')
  481. [print(' %s' % item) for item in sorted(uninstalled_core)]
  482. disabled_core = info['disabled_core']
  483. if disabled_core:
  484. print('\nDisabled core extensions:')
  485. [print(' %s' % item) for item in sorted(disabled_core)]
  486. messages = self.build_check(fast=True)
  487. if messages:
  488. print('\nBuild recommended, please run `jupyter lab build`:')
  489. [print(' %s' % item) for item in messages]
  490. def build_check(self, fast=False):
  491. """Determine whether JupyterLab should be built.
  492. Returns a list of messages.
  493. """
  494. app_dir = self.app_dir
  495. local = self.info['local_extensions']
  496. linked = self.info['linked_packages']
  497. messages = []
  498. # Check for no application.
  499. pkg_path = pjoin(app_dir, 'static', 'package.json')
  500. if not osp.exists(pkg_path):
  501. return ['No built application']
  502. static_data = self.info['static_data']
  503. old_jlab = static_data['jupyterlab']
  504. old_deps = static_data.get('dependencies', dict())
  505. # Look for mismatched version.
  506. static_version = old_jlab.get('version', '')
  507. core_version = old_jlab['version']
  508. if LooseVersion(static_version) != LooseVersion(core_version):
  509. msg = 'Version mismatch: %s (built), %s (current)'
  510. return [msg % (static_version, core_version)]
  511. # Look for mismatched extensions.
  512. new_package = self._get_package_template(silent=fast)
  513. new_jlab = new_package['jupyterlab']
  514. new_deps = new_package.get('dependencies', dict())
  515. for ext_type in ['extensions', 'mimeExtensions']:
  516. # Extensions that were added.
  517. for ext in new_jlab[ext_type]:
  518. if ext not in old_jlab[ext_type]:
  519. messages.append('%s needs to be included in build' % ext)
  520. # Extensions that were removed.
  521. for ext in old_jlab[ext_type]:
  522. if ext not in new_jlab[ext_type]:
  523. messages.append('%s needs to be removed from build' % ext)
  524. # Look for mismatched dependencies
  525. for (pkg, dep) in new_deps.items():
  526. if pkg not in old_deps:
  527. continue
  528. # Skip local and linked since we pick them up separately.
  529. if pkg in local or pkg in linked:
  530. continue
  531. if old_deps[pkg] != dep:
  532. msg = '%s changed from %s to %s'
  533. messages.append(msg % (pkg, old_deps[pkg], new_deps[pkg]))
  534. # Look for updated local extensions.
  535. for (name, source) in local.items():
  536. if fast:
  537. continue
  538. dname = pjoin(app_dir, 'extensions')
  539. if self._check_local(name, source, dname):
  540. messages.append('%s content changed' % name)
  541. # Look for updated linked packages.
  542. for (name, item) in linked.items():
  543. if fast:
  544. continue
  545. dname = pjoin(app_dir, 'staging', 'linked_packages')
  546. if self._check_local(name, item['source'], dname):
  547. messages.append('%s content changed' % name)
  548. return messages
  549. def uninstall_extension(self, name):
  550. """Uninstall an extension by name.
  551. Returns `True` if a rebuild is recommended, `False` otherwise.
  552. """
  553. # Allow for uninstalled core extensions.
  554. data = self.info['core_data']
  555. if name in self.info['core_extensions']:
  556. config = self._read_build_config()
  557. uninstalled = config.get('uninstalled_core_extensions', [])
  558. if name not in uninstalled:
  559. self.logger.info('Uninstalling core extension %s' % name)
  560. uninstalled.append(name)
  561. config['uninstalled_core_extensions'] = uninstalled
  562. self._write_build_config(config)
  563. return True
  564. return False
  565. local = self.info['local_extensions']
  566. for (extname, data) in self.info['extensions'].items():
  567. path = data['path']
  568. if extname == name:
  569. msg = 'Uninstalling %s from %s' % (name, osp.dirname(path))
  570. self.logger.info(msg)
  571. os.remove(path)
  572. # Handle local extensions.
  573. if extname in local:
  574. config = self._read_build_config()
  575. data = config.setdefault('local_extensions', dict())
  576. del data[extname]
  577. self._write_build_config(config)
  578. return True
  579. self.logger.warn('No labextension named "%s" installed' % name)
  580. return False
  581. def uninstall_all_extensions(self):
  582. """Uninstalls all extensions
  583. Returns `True` if a rebuild is recommended, `False` otherwise
  584. """
  585. should_rebuild = False
  586. for (extname, _) in self.info['extensions'].items():
  587. uninstalled = self.uninstall_extension(extname)
  588. should_rebuild = should_rebuild or uninstalled
  589. return should_rebuild
  590. def update_all_extensions(self):
  591. """Update all non-local extensions.
  592. Returns `True` if a rebuild is recommended, `False` otherwise.
  593. """
  594. should_rebuild = False
  595. for (extname, _) in self.info['extensions'].items():
  596. if extname in self.info['local_extensions']:
  597. continue
  598. updated = self._update_extension(extname)
  599. # Rebuild if at least one update happens:
  600. should_rebuild = should_rebuild or updated
  601. return should_rebuild
  602. def update_extension(self, name):
  603. """Update an extension by name.
  604. Returns `True` if a rebuild is recommended, `False` otherwise.
  605. """
  606. if name not in self.info['extensions']:
  607. self.logger.warn('No labextension named "%s" installed' % name)
  608. return False
  609. return self._update_extension(name)
  610. def _update_extension(self, name):
  611. """Update an extension by name.
  612. Returns `True` if a rebuild is recommended, `False` otherwise.
  613. """
  614. try:
  615. latest = self._latest_compatible_package_version(name)
  616. except URLError:
  617. return False
  618. if latest is None:
  619. return False
  620. if latest == self.info['extensions'][name]['version']:
  621. self.logger.info('Extension %r already up to date' % name)
  622. return False
  623. self.logger.info('Updating %s to version %s' % (name, latest))
  624. return self.install_extension('%s@%s' % (name, latest))
  625. def link_package(self, path):
  626. """Link a package at the given path.
  627. Returns `True` if a rebuild is recommended, `False` otherwise.
  628. """
  629. path = _normalize_path(path)
  630. if not osp.exists(path) or not osp.isdir(path):
  631. msg = 'Can install "%s" only link local directories'
  632. raise ValueError(msg % path)
  633. with TemporaryDirectory() as tempdir:
  634. info = self._extract_package(path, tempdir)
  635. messages = _validate_extension(info['data'])
  636. if not messages:
  637. return self.install_extension(path)
  638. # Warn that it is a linked package.
  639. self.logger.warn('Installing %s as a linked package:', path)
  640. [self.logger.warn(m) for m in messages]
  641. # Add to metadata.
  642. config = self._read_build_config()
  643. linked = config.setdefault('linked_packages', dict())
  644. linked[info['name']] = info['source']
  645. self._write_build_config(config)
  646. return True
  647. def unlink_package(self, path):
  648. """Unlink a package by name or at the given path.
  649. A ValueError is raised if the path is not an unlinkable package.
  650. Returns `True` if a rebuild is recommended, `False` otherwise.
  651. """
  652. path = _normalize_path(path)
  653. config = self._read_build_config()
  654. linked = config.setdefault('linked_packages', dict())
  655. found = None
  656. for (name, source) in linked.items():
  657. if name == path or source == path:
  658. found = name
  659. if found:
  660. del linked[found]
  661. else:
  662. local = config.setdefault('local_extensions', dict())
  663. for (name, source) in local.items():
  664. if name == path or source == path:
  665. found = name
  666. if found:
  667. del local[found]
  668. path = self.info['extensions'][found]['path']
  669. os.remove(path)
  670. if not found:
  671. raise ValueError('No linked package for %s' % path)
  672. self._write_build_config(config)
  673. return True
  674. def toggle_extension(self, extension, value):
  675. """Enable or disable a lab extension.
  676. Returns `True` if a rebuild is recommended, `False` otherwise.
  677. """
  678. config = self._read_page_config()
  679. disabled = config.setdefault('disabledExtensions', [])
  680. did_something = False
  681. if value and extension not in disabled:
  682. disabled.append(extension)
  683. did_something = True
  684. elif not value and extension in disabled:
  685. disabled.remove(extension)
  686. did_something = True
  687. if did_something:
  688. self._write_page_config(config)
  689. return did_something
  690. def check_extension(self, extension, check_installed_only=False):
  691. """Check if a lab extension is enabled or disabled
  692. """
  693. info = self.info
  694. if extension in info["core_extensions"]:
  695. return self._check_core_extension(
  696. extension, info, check_installed_only)
  697. if extension in info["linked_packages"]:
  698. self.logger.info('%s:%s' % (extension, GREEN_ENABLED))
  699. return True
  700. return self._check_common_extension(
  701. extension, info, check_installed_only)
  702. def _check_core_extension(self, extension, info, check_installed_only):
  703. """Check if a core extension is enabled or disabled
  704. """
  705. if extension in info['uninstalled_core']:
  706. self.logger.info('%s:%s' % (extension, RED_X))
  707. return False
  708. if check_installed_only:
  709. self.logger.info('%s: %s' % (extension, GREEN_OK))
  710. return True
  711. if extension in info['disabled_core']:
  712. self.logger.info('%s: %s' % (extension, RED_DISABLED))
  713. return False
  714. self.logger.info('%s:%s' % (extension, GREEN_ENABLED))
  715. return True
  716. def _check_common_extension(self, extension, info, check_installed_only):
  717. """Check if a common (non-core) extension is enabled or disabled
  718. """
  719. if extension not in info['extensions']:
  720. self.logger.info('%s:%s' % (extension, RED_X))
  721. return False
  722. errors = self._get_extension_compat()[extension]
  723. if errors:
  724. self.logger.info('%s:%s (compatibility errors)' %
  725. (extension, RED_X))
  726. return False
  727. if check_installed_only:
  728. self.logger.info('%s: %s' % (extension, GREEN_OK))
  729. return True
  730. if _is_disabled(extension, info['disabled']):
  731. self.logger.info('%s: %s' % (extension, RED_DISABLED))
  732. return False
  733. self.logger.info('%s:%s' % (extension, GREEN_ENABLED))
  734. return True
  735. def _get_app_info(self):
  736. """Get information about the app.
  737. """
  738. info = dict()
  739. info['core_data'] = core_data = _get_core_data()
  740. info['extensions'] = extensions = self._get_extensions(core_data)
  741. page_config = self._read_page_config()
  742. info['disabled'] = page_config.get('disabledExtensions', [])
  743. info['local_extensions'] = self._get_local_extensions()
  744. info['linked_packages'] = self._get_linked_packages()
  745. info['app_extensions'] = app = []
  746. info['sys_extensions'] = sys = []
  747. for (name, data) in extensions.items():
  748. data['is_local'] = name in info['local_extensions']
  749. if data['location'] == 'app':
  750. app.append(name)
  751. else:
  752. sys.append(name)
  753. info['uninstalled_core'] = self._get_uninstalled_core_extensions()
  754. info['static_data'] = _get_static_data(self.app_dir)
  755. app_data = info['static_data'] or core_data
  756. info['version'] = app_data['jupyterlab']['version']
  757. info['staticUrl'] = app_data['jupyterlab'].get('staticUrl', '')
  758. info['sys_dir'] = self.sys_dir
  759. info['app_dir'] = self.app_dir
  760. info['core_extensions'] = core_extensions = _get_core_extensions()
  761. disabled_core = []
  762. for key in core_extensions:
  763. if key in info['disabled']:
  764. disabled_core.append(key)
  765. info['disabled_core'] = disabled_core
  766. return info
  767. def _populate_staging(self, name=None, version=None, static_url=None,
  768. clean=False):
  769. """Set up the assets in the staging directory.
  770. """
  771. app_dir = self.app_dir
  772. staging = pjoin(app_dir, 'staging')
  773. if clean and osp.exists(staging):
  774. self.logger.info("Cleaning %s", staging)
  775. _rmtree(staging, self.logger)
  776. self._ensure_app_dirs()
  777. if not version:
  778. version = self.info['core_data']['jupyterlab']['version']
  779. # Look for mismatched version.
  780. pkg_path = pjoin(staging, 'package.json')
  781. if osp.exists(pkg_path):
  782. with open(pkg_path) as fid:
  783. data = json.load(fid)
  784. if data['jupyterlab'].get('version', '') != version:
  785. _rmtree(staging, self.logger)
  786. os.makedirs(staging)
  787. for fname in ['index.js', 'webpack.config.js',
  788. 'webpack.prod.config.js',
  789. '.yarnrc', 'yarn.js']:
  790. target = pjoin(staging, fname)
  791. shutil.copy(pjoin(HERE, 'staging', fname), target)
  792. # Ensure a clean templates directory
  793. templates = pjoin(staging, 'templates')
  794. if osp.exists(templates):
  795. _rmtree(templates, self.logger)
  796. try:
  797. shutil.copytree(pjoin(HERE, 'staging', 'templates'), templates)
  798. except shutil.Error as error:
  799. # `copytree` throws an error if copying to + from NFS even though
  800. # the copy is successful (see https://bugs.python.org/issue24564
  801. # and https://github.com/jupyterlab/jupyterlab/issues/5233)
  802. real_error = '[Errno 22]' not in str(error) and '[Errno 5]' not in str(error)
  803. if real_error or not osp.exists(templates):
  804. raise
  805. # Ensure a clean linked packages directory.
  806. linked_dir = pjoin(staging, 'linked_packages')
  807. if osp.exists(linked_dir):
  808. _rmtree(linked_dir, self.logger)
  809. os.makedirs(linked_dir)
  810. # Template the package.json file.
  811. # Update the local extensions.
  812. extensions = self.info['extensions']
  813. removed = False
  814. for (key, source) in self.info['local_extensions'].items():
  815. # Handle a local extension that was removed.
  816. if key not in extensions:
  817. config = self._read_build_config()
  818. data = config.setdefault('local_extensions', dict())
  819. del data[key]
  820. self._write_build_config(config)
  821. removed = True
  822. continue
  823. dname = pjoin(app_dir, 'extensions')
  824. self._update_local(key, source, dname, extensions[key],
  825. 'local_extensions')
  826. # Update the list of local extensions if any were removed.
  827. if removed:
  828. self.info['local_extensions'] = self._get_local_extensions()
  829. # Update the linked packages.
  830. linked = self.info['linked_packages']
  831. for (key, item) in linked.items():
  832. dname = pjoin(staging, 'linked_packages')
  833. self._update_local(key, item['source'], dname, item,
  834. 'linked_packages')
  835. # Then get the package template.
  836. data = self._get_package_template()
  837. if version:
  838. data['jupyterlab']['version'] = version
  839. if name:
  840. data['jupyterlab']['name'] = name
  841. if static_url:
  842. data['jupyterlab']['staticUrl'] = static_url
  843. pkg_path = pjoin(staging, 'package.json')
  844. with open(pkg_path, 'w') as fid:
  845. json.dump(data, fid, indent=4)
  846. # copy known-good yarn.lock if missing
  847. lock_path = pjoin(staging, 'yarn.lock')
  848. if not osp.exists(lock_path):
  849. shutil.copy(pjoin(HERE, 'staging', 'yarn.lock'), lock_path)
  850. def _get_package_template(self, silent=False):
  851. """Get the template the for staging package.json file.
  852. """
  853. logger = self.logger
  854. data = self.info['core_data']
  855. local = self.info['local_extensions']
  856. linked = self.info['linked_packages']
  857. extensions = self.info['extensions']
  858. jlab = data['jupyterlab']
  859. def format_path(path):
  860. path = osp.relpath(path, pjoin(self.app_dir, 'staging'))
  861. path = 'file:' + path.replace(os.sep, '/')
  862. if os.name == 'nt':
  863. path = path.lower()
  864. return path
  865. jlab['linkedPackages'] = dict()
  866. # Handle local extensions.
  867. for (key, source) in local.items():
  868. jlab['linkedPackages'][key] = source
  869. # Handle linked packages.
  870. for (key, item) in linked.items():
  871. path = pjoin(self.app_dir, 'staging', 'linked_packages')
  872. path = pjoin(path, item['filename'])
  873. data['dependencies'][key] = format_path(path)
  874. jlab['linkedPackages'][key] = item['source']
  875. # Handle extensions
  876. compat_errors = self._get_extension_compat()
  877. for (key, value) in extensions.items():
  878. # Reject incompatible extensions with a message.
  879. errors = compat_errors[key]
  880. if errors:
  881. if not silent:
  882. _log_single_compat_errors(
  883. logger, key, value['version'], errors
  884. )
  885. continue
  886. data['dependencies'][key] = format_path(value['path'])
  887. jlab_data = value['jupyterlab']
  888. for item in ['extension', 'mimeExtension']:
  889. ext = jlab_data.get(item, False)
  890. if not ext:
  891. continue
  892. if ext is True:
  893. ext = ''
  894. jlab[item + 's'][key] = ext
  895. # Handle uninstalled core extensions.
  896. for item in self.info['uninstalled_core']:
  897. if item in jlab['extensions']:
  898. data['jupyterlab']['extensions'].pop(item)
  899. else:
  900. data['jupyterlab']['mimeExtensions'].pop(item)
  901. # Remove from dependencies as well.
  902. data['dependencies'].pop(item)
  903. return data
  904. def _check_local(self, name, source, dname):
  905. """Check if a local package has changed.
  906. `dname` is the directory name of existing package tar archives.
  907. """
  908. # Extract the package in a temporary directory.
  909. with TemporaryDirectory() as tempdir:
  910. info = self._extract_package(source, tempdir)
  911. # Test if the file content has changed.
  912. # This relies on `_extract_package` adding the hashsum
  913. # to the filename, allowing a simple exist check to
  914. # compare the hash to the "cache" in dname.
  915. target = pjoin(dname, info['filename'])
  916. return not osp.exists(target)
  917. def _update_local(self, name, source, dname, data, dtype):
  918. """Update a local dependency. Return `True` if changed.
  919. """
  920. # Extract the package in a temporary directory.
  921. existing = data['filename']
  922. with TemporaryDirectory() as tempdir:
  923. info = self._extract_package(source, tempdir)
  924. # Bail if the file content has not changed.
  925. if info['filename'] == existing:
  926. return existing
  927. shutil.move(info['path'], pjoin(dname, info['filename']))
  928. # Remove the existing tarball and return the new file name.
  929. if existing:
  930. os.remove(pjoin(dname, existing))
  931. data['filename'] = info['filename']
  932. data['path'] = pjoin(data['tar_dir'], data['filename'])
  933. return info['filename']
  934. def _get_extensions(self, core_data):
  935. """Get the extensions for the application.
  936. """
  937. app_dir = self.app_dir
  938. extensions = dict()
  939. # Get system level packages.
  940. sys_path = pjoin(self.sys_dir, 'extensions')
  941. app_path = pjoin(self.app_dir, 'extensions')
  942. extensions = self._get_extensions_in_dir(self.sys_dir, core_data)
  943. # Look in app_dir if different.
  944. app_path = pjoin(app_dir, 'extensions')
  945. if app_path == sys_path or not osp.exists(app_path):
  946. return extensions
  947. extensions.update(self._get_extensions_in_dir(app_dir, core_data))
  948. return extensions
  949. def _get_extensions_in_dir(self, dname, core_data):
  950. """Get the extensions in a given directory.
  951. """
  952. extensions = dict()
  953. location = 'app' if dname == self.app_dir else 'sys'
  954. for target in glob.glob(pjoin(dname, 'extensions', '*.tgz')):
  955. data = read_package(target)
  956. deps = data.get('dependencies', dict())
  957. name = data['name']
  958. jlab = data.get('jupyterlab', dict())
  959. path = osp.abspath(target)
  960. # homepage, repository are optional
  961. if 'homepage' in data:
  962. url = data['homepage']
  963. elif 'repository' in data and isinstance(data['repository'], dict):
  964. url = data['repository'].get('url', '')
  965. else:
  966. url = ''
  967. extensions[name] = dict(path=path,
  968. filename=osp.basename(path),
  969. url=url,
  970. version=data['version'],
  971. jupyterlab=jlab,
  972. dependencies=deps,
  973. tar_dir=osp.dirname(path),
  974. location=location)
  975. return extensions
  976. def _get_extension_compat(self):
  977. """Get the extension compatibility info.
  978. """
  979. compat = dict()
  980. core_data = self.info['core_data']
  981. for (name, data) in self.info['extensions'].items():
  982. deps = data['dependencies']
  983. compat[name] = _validate_compatibility(name, deps, core_data)
  984. return compat
  985. def _get_local_extensions(self):
  986. """Get the locally installed extensions.
  987. """
  988. return self._get_local_data('local_extensions')
  989. def _get_linked_packages(self):
  990. """Get the linked packages.
  991. """
  992. info = self._get_local_data('linked_packages')
  993. dname = pjoin(self.app_dir, 'staging', 'linked_packages')
  994. for (name, source) in info.items():
  995. info[name] = dict(source=source, filename='', tar_dir=dname)
  996. if not osp.exists(dname):
  997. return info
  998. for path in glob.glob(pjoin(dname, '*.tgz')):
  999. path = osp.abspath(path)
  1000. data = read_package(path)
  1001. name = data['name']
  1002. if name not in info:
  1003. self.logger.warn('Removing orphaned linked package %s' % name)
  1004. os.remove(path)
  1005. continue
  1006. item = info[name]
  1007. item['filename'] = osp.basename(path)
  1008. item['path'] = path
  1009. item['version'] = data['version']
  1010. item['data'] = data
  1011. return info
  1012. def _get_uninstalled_core_extensions(self):
  1013. """Get the uninstalled core extensions.
  1014. """
  1015. config = self._read_build_config()
  1016. return config.get('uninstalled_core_extensions', [])
  1017. def _ensure_app_dirs(self):
  1018. """Ensure that the application directories exist"""
  1019. dirs = ['extensions', 'settings', 'staging', 'schemas', 'themes']
  1020. for dname in dirs:
  1021. path = pjoin(self.app_dir, dname)
  1022. if not osp.exists(path):
  1023. try:
  1024. os.makedirs(path)
  1025. except OSError as e:
  1026. if e.errno != errno.EEXIST:
  1027. raise
  1028. def _list_extensions(self, info, ext_type):
  1029. """List the extensions of a given type.
  1030. """
  1031. logger = self.logger
  1032. names = info['%s_extensions' % ext_type]
  1033. if not names:
  1034. return
  1035. dname = info['%s_dir' % ext_type]
  1036. error_accumulator = {}
  1037. logger.info(' %s dir: %s' % (ext_type, dname))
  1038. for name in sorted(names):
  1039. data = info['extensions'][name]
  1040. version = data['version']
  1041. errors = info['compat_errors'][name]
  1042. extra = ''
  1043. if _is_disabled(name, info['disabled']):
  1044. extra += ' %s' % RED_DISABLED
  1045. else:
  1046. extra += ' %s' % GREEN_ENABLED
  1047. if errors:
  1048. extra += ' %s' % RED_X
  1049. else:
  1050. extra += ' %s' % GREEN_OK
  1051. if data['is_local']:
  1052. extra += '*'
  1053. logger.info(' %s v%s%s' % (name, version, extra))
  1054. if errors:
  1055. error_accumulator[name] = (version, errors)
  1056. # Write all errors at end:
  1057. _log_multiple_compat_errors(logger, error_accumulator)
  1058. def _read_build_config(self):
  1059. """Get the build config data for the app dir.
  1060. """
  1061. target = pjoin(self.app_dir, 'settings', 'build_config.json')
  1062. if not osp.exists(target):
  1063. return {}
  1064. else:
  1065. with open(target) as fid:
  1066. return json.load(fid)
  1067. def _write_build_config(self, config):
  1068. """Write the build config to the app dir.
  1069. """
  1070. self._ensure_app_dirs()
  1071. target = pjoin(self.app_dir, 'settings', 'build_config.json')
  1072. with open(target, 'w') as fid:
  1073. json.dump(config, fid, indent=4)
  1074. def _read_page_config(self):
  1075. """Get the page config data for the app dir.
  1076. """
  1077. target = pjoin(self.app_dir, 'settings', 'page_config.json')
  1078. if not osp.exists(target):
  1079. return {}
  1080. else:
  1081. with open(target) as fid:
  1082. return json.load(fid)
  1083. def _write_page_config(self, config):
  1084. """Write the build config to the app dir.
  1085. """
  1086. self._ensure_app_dirs()
  1087. target = pjoin(self.app_dir, 'settings', 'page_config.json')
  1088. with open(target, 'w') as fid:
  1089. json.dump(config, fid, indent=4)
  1090. def _get_local_data(self, source):
  1091. """Get the local data for extensions or linked packages.
  1092. """
  1093. config = self._read_build_config()
  1094. data = config.setdefault(source, dict())
  1095. dead = []
  1096. for (name, source) in data.items():
  1097. if not osp.exists(source):
  1098. dead.append(name)
  1099. for name in dead:
  1100. link_type = source.replace('_', ' ')
  1101. msg = '**Note: Removing dead %s "%s"' % (link_type, name)
  1102. self.logger.warn(msg)
  1103. del data[name]
  1104. if dead:
  1105. self._write_build_config(config)
  1106. return data
  1107. def _install_extension(self, extension, tempdir):
  1108. """Install an extension with validation and return the name and path.
  1109. """
  1110. info = self._extract_package(extension, tempdir)
  1111. data = info['data']
  1112. # Verify that the package is an extension.
  1113. messages = _validate_extension(data)
  1114. if messages:
  1115. msg = '"%s" is not a valid extension:\n%s'
  1116. raise ValueError(msg % (extension, '\n'.join(messages)))
  1117. # Verify package compatibility.
  1118. core_data = _get_core_data()
  1119. deps = data.get('dependencies', dict())
  1120. errors = _validate_compatibility(extension, deps, core_data)
  1121. if errors:
  1122. msg = _format_compatibility_errors(
  1123. data['name'], data['version'], errors
  1124. )
  1125. # Check for compatible version unless:
  1126. # - A specific version was requested (@ in name,
  1127. # but after first char to allow for scope marker).
  1128. # - Package is locally installed.
  1129. if '@' not in extension[1:] and not info['is_dir']:
  1130. name = info['name']
  1131. try:
  1132. version = self._latest_compatible_package_version(name)
  1133. except URLError:
  1134. # We cannot add any additional information to error message
  1135. raise ValueError(msg)
  1136. if version and name:
  1137. self.logger.debug('Incompatible extension:\n%s', name)
  1138. self.logger.debug('Found compatible version: %s', version)
  1139. with TemporaryDirectory() as tempdir2:
  1140. return self._install_extension(
  1141. '%s@%s' % (name, version), tempdir2)
  1142. # Extend message to better guide the user what to do:
  1143. conflicts = '\n'.join(msg.splitlines()[2:])
  1144. msg = ''.join((
  1145. self._format_no_compatible_package_version(name),
  1146. "\n\n",
  1147. conflicts))
  1148. raise ValueError(msg)
  1149. # Move the file to the app directory.
  1150. target = pjoin(self.app_dir, 'extensions', info['filename'])
  1151. if osp.exists(target):
  1152. os.remove(target)
  1153. shutil.move(info['path'], target)
  1154. info['path'] = target
  1155. return info
  1156. def _extract_package(self, source, tempdir):
  1157. """Call `npm pack` for an extension.
  1158. The pack command will download the package tar if `source` is
  1159. a package name, or run `npm pack` locally if `source` is a
  1160. directory.
  1161. """
  1162. is_dir = osp.exists(source) and osp.isdir(source)
  1163. if is_dir and not osp.exists(pjoin(source, 'node_modules')):
  1164. self._run(['node', YARN_PATH, 'install'], cwd=source)
  1165. info = dict(source=source, is_dir=is_dir)
  1166. ret = self._run([which('npm'), 'pack', source], cwd=tempdir)
  1167. if ret != 0:
  1168. msg = '"%s" is not a valid npm package'
  1169. raise ValueError(msg % source)
  1170. path = glob.glob(pjoin(tempdir, '*.tgz'))[0]
  1171. info['data'] = read_package(path)
  1172. if is_dir:
  1173. info['sha'] = sha = _tarsum(path)
  1174. target = path.replace('.tgz', '-%s.tgz' % sha)
  1175. shutil.move(path, target)
  1176. info['path'] = target
  1177. else:
  1178. info['path'] = path
  1179. info['filename'] = osp.basename(info['path'])
  1180. info['name'] = info['data']['name']
  1181. info['version'] = info['data']['version']
  1182. return info
  1183. def _latest_compatible_package_version(self, name):
  1184. """Get the latest compatible version of a package"""
  1185. core_data = self.info['core_data']
  1186. try:
  1187. metadata = _fetch_package_metadata(self.registry, name, self.logger)
  1188. except URLError:
  1189. return
  1190. versions = metadata.get('versions', [])
  1191. # Sort pre-release first, as we will reverse the sort:
  1192. def sort_key(key_value):
  1193. return _semver_key(key_value[0], prerelease_first=True)
  1194. for version, data in sorted(versions.items(),
  1195. key=sort_key,
  1196. reverse=True):
  1197. deps = data.get('dependencies', {})
  1198. errors = _validate_compatibility(name, deps, core_data)
  1199. if not errors:
  1200. # Found a compatible version
  1201. # Verify that the version is a valid extension.
  1202. with TemporaryDirectory() as tempdir:
  1203. info = self._extract_package(
  1204. '%s@%s' % (name, version), tempdir, quiet=True)
  1205. if _validate_extension(info['data']):
  1206. # Invalid, do not consider other versions
  1207. return
  1208. # Valid
  1209. return version
  1210. def latest_compatible_package_versions(self, names):
  1211. """Get the latest compatible versions of several packages
  1212. Like _latest_compatible_package_version, but optimized for
  1213. retrieving the latest version for several packages in one go.
  1214. """
  1215. core_data = self.info['core_data']
  1216. keys = []
  1217. for name in names:
  1218. try:
  1219. metadata = _fetch_package_metadata(self.registry, name, self.logger)
  1220. except URLError:
  1221. continue
  1222. versions = metadata.get('versions', [])
  1223. # Sort pre-release first, as we will reverse the sort:
  1224. def sort_key(key_value):
  1225. return _semver_key(key_value[0], prerelease_first=True)
  1226. for version, data in sorted(versions.items(),
  1227. key=sort_key,
  1228. reverse=True):
  1229. deps = data.get('dependencies', {})
  1230. errors = _validate_compatibility(name, deps, core_data)
  1231. if not errors:
  1232. # Found a compatible version
  1233. keys.append('%s@%s' % (name, version))
  1234. break # break inner for
  1235. versions = {}
  1236. if not keys:
  1237. return versions
  1238. with TemporaryDirectory() as tempdir:
  1239. ret = self._run([which('npm'), 'pack'] + keys, cwd=tempdir)
  1240. if ret != 0:
  1241. msg = '"%s" is not a valid npm package'
  1242. raise ValueError(msg % keys)
  1243. for key in keys:
  1244. fname = key[0].replace('@', '') + key[1:].replace('@', '-').replace('/', '-') + '.tgz'
  1245. data = read_package(os.path.join(tempdir, fname))
  1246. # Verify that the version is a valid extension.
  1247. if not _validate_extension(data):
  1248. # Valid
  1249. versions[key] = data['version']
  1250. return versions
  1251. def _format_no_compatible_package_version(self, name):
  1252. """Get the latest compatible version of a package"""
  1253. core_data = self.info['core_data']
  1254. # Whether lab version is too new:
  1255. lab_newer_than_latest = False
  1256. # Whether the latest version of the extension depend on a "future" version
  1257. # of a singleton package (from the perspective of current lab version):
  1258. latest_newer_than_lab = False
  1259. try:
  1260. metadata = _fetch_package_metadata(self.registry, name, self.logger)
  1261. except URLError:
  1262. pass
  1263. else:
  1264. versions = metadata.get('versions', [])
  1265. # Sort pre-release first, as we will reverse the sort:
  1266. def sort_key(key_value):
  1267. return _semver_key(key_value[0], prerelease_first=True)
  1268. store = tuple(sorted(versions.items(), key=sort_key, reverse=True))
  1269. latest_deps = store[0][1].get('dependencies', {})
  1270. core_deps = core_data['dependencies']
  1271. singletons = core_data['jupyterlab']['singletonPackages']
  1272. for (key, value) in latest_deps.items():
  1273. if key in singletons:
  1274. c = _compare_ranges(core_deps[key], value)
  1275. lab_newer_than_latest = lab_newer_than_latest or c < 0
  1276. latest_newer_than_lab = latest_newer_than_lab or c > 0
  1277. if lab_newer_than_latest:
  1278. # All singleton deps in current version of lab are newer than those
  1279. # in the latest version of the extension
  1280. return ("This extension does not yet support the current version of "
  1281. "JupyterLab.\n")
  1282. parts = ["No version of {extension} could be found that is compatible with "
  1283. "the current version of JupyterLab."]
  1284. if latest_newer_than_lab:
  1285. parts.extend(("However, it seems to support a new version of JupyterLab.",
  1286. "Consider upgrading JupyterLab."))
  1287. return " ".join(parts).format(extension=name)
  1288. def _run(self, cmd, **kwargs):
  1289. """Run the command using our logger and abort callback.
  1290. Returns the exit code.
  1291. """
  1292. if self.kill_event.is_set():
  1293. raise ValueError('Command was killed')
  1294. kwargs['logger'] = self.logger
  1295. kwargs['kill_event'] = self.kill_event
  1296. proc = ProgressProcess(cmd, **kwargs)
  1297. return proc.wait()
  1298. def _node_check(logger):
  1299. """Check for the existence of nodejs with the correct version.
  1300. """
  1301. node = which('node')
  1302. try:
  1303. output = subprocess.check_output([node, 'node-version-check.js'], cwd=HERE)
  1304. logger.debug(output.decode('utf-8'))
  1305. except Exception:
  1306. data = _get_core_data()
  1307. ver = data['engines']['node']
  1308. msg = 'Please install nodejs %s before continuing. nodejs may be installed using conda or directly from the nodejs website.' % ver
  1309. raise ValueError(msg)
  1310. def _ensure_logger(logger=None):
  1311. """Ensure that we have a logger"""
  1312. return logger or logging.getLogger('jupyterlab')
  1313. def _normalize_path(extension):
  1314. """Normalize a given extension if it is a path.
  1315. """
  1316. extension = osp.expanduser(extension)
  1317. if osp.exists(extension):
  1318. extension = osp.abspath(extension)
  1319. return extension
  1320. def _rmtree(path, logger):
  1321. """Remove a tree, logging errors"""
  1322. def onerror(*exc_info):
  1323. logger.debug('Error in rmtree', exc_info=exc_info)
  1324. shutil.rmtree(path, onerror=onerror)
  1325. def _validate_extension(data):
  1326. """Detect if a package is an extension using its metadata.
  1327. Returns any problems it finds.
  1328. """
  1329. jlab = data.get('jupyterlab', None)
  1330. if jlab is None:
  1331. return ['No `jupyterlab` key']
  1332. if not isinstance(jlab, dict):
  1333. return ['The `jupyterlab` key must be a JSON object']
  1334. extension = jlab.get('extension', False)
  1335. mime_extension = jlab.get('mimeExtension', False)
  1336. themePath = jlab.get('themePath', '')
  1337. schemaDir = jlab.get('schemaDir', '')
  1338. messages = []
  1339. if not extension and not mime_extension:
  1340. messages.append('No `extension` or `mimeExtension` key present')
  1341. if extension == mime_extension:
  1342. msg = '`mimeExtension` and `extension` must point to different modules'
  1343. messages.append(msg)
  1344. files = data['jupyterlab_extracted_files']
  1345. main = data.get('main', 'index.js')
  1346. if not main.endswith('.js'):
  1347. main += '.js'
  1348. if extension is True:
  1349. extension = main
  1350. elif extension and not extension.endswith('.js'):
  1351. extension += '.js'
  1352. if mime_extension is True:
  1353. mime_extension = main
  1354. elif mime_extension and not mime_extension.endswith('.js'):
  1355. mime_extension += '.js'
  1356. if extension and extension not in files:
  1357. messages.append('Missing extension module "%s"' % extension)
  1358. if mime_extension and mime_extension not in files:
  1359. messages.append('Missing mimeExtension module "%s"' % mime_extension)
  1360. if themePath and not any(f.startswith(themePath) for f in files):
  1361. messages.append('themePath is empty: "%s"' % themePath)
  1362. if schemaDir and not any(f.startswith(schemaDir) for f in files):
  1363. messages.append('schemaDir is empty: "%s"' % schemaDir)
  1364. return messages
  1365. def _tarsum(input_file):
  1366. """
  1367. Compute the recursive sha sum of a tar file.
  1368. """
  1369. tar = tarfile.open(input_file, "r")
  1370. chunk_size = 100 * 1024
  1371. h = hashlib.new("sha1")
  1372. for member in tar:
  1373. if not member.isfile():
  1374. continue
  1375. f = tar.extractfile(member)
  1376. data = f.read(chunk_size)
  1377. while data:
  1378. h.update(data)
  1379. data = f.read(chunk_size)
  1380. return h.hexdigest()
  1381. def _get_core_data():
  1382. """Get the data for the app template.
  1383. """
  1384. with open(pjoin(HERE, 'staging', 'package.json')) as fid:
  1385. return json.load(fid)
  1386. def _get_static_data(app_dir):
  1387. """Get the data for the app static dir.
  1388. """
  1389. target = pjoin(app_dir, 'static', 'package.json')
  1390. if os.path.exists(target):
  1391. with open(target) as fid:
  1392. return json.load(fid)
  1393. else:
  1394. return None
  1395. def _validate_compatibility(extension, deps, core_data):
  1396. """Validate the compatibility of an extension.
  1397. """
  1398. core_deps = core_data['dependencies']
  1399. singletons = core_data['jupyterlab']['singletonPackages']
  1400. errors = []
  1401. for (key, value) in deps.items():
  1402. if key in singletons:
  1403. overlap = _test_overlap(core_deps[key], value)
  1404. if overlap is False:
  1405. errors.append((key, core_deps[key], value))
  1406. return errors
  1407. def _test_overlap(spec1, spec2):
  1408. """Test whether two version specs overlap.
  1409. Returns `None` if we cannot determine compatibility,
  1410. otherwise whether there is an overlap
  1411. """
  1412. cmp = _compare_ranges(spec1, spec2)
  1413. if cmp is None:
  1414. return
  1415. return cmp == 0
  1416. def _compare_ranges(spec1, spec2):
  1417. """Test whether two version specs overlap.
  1418. Returns `None` if we cannot determine compatibility,
  1419. otherwise return 0 if there is an overlap, 1 if
  1420. spec1 is lower/older than spec2, and -1 if spec1
  1421. is higher/newer than spec2.
  1422. """
  1423. # Test for overlapping semver ranges.
  1424. r1 = Range(spec1, True)
  1425. r2 = Range(spec2, True)
  1426. # If either range is empty, we cannot verify.
  1427. if not r1.range or not r2.range:
  1428. return
  1429. x1 = r1.set[0][0].semver
  1430. x2 = r1.set[0][-1].semver
  1431. y1 = r2.set[0][0].semver
  1432. y2 = r2.set[0][-1].semver
  1433. o1 = r1.set[0][0].operator
  1434. o2 = r2.set[0][0].operator
  1435. # We do not handle (<) specifiers.
  1436. if (o1.startswith('<') or o2.startswith('<')):
  1437. return
  1438. # Handle single value specifiers.
  1439. lx = lte if x1 == x2 else lt
  1440. ly = lte if y1 == y2 else lt
  1441. gx = gte if x1 == x2 else gt
  1442. gy = gte if x1 == x2 else gt
  1443. # Handle unbounded (>) specifiers.
  1444. def noop(x, y, z):
  1445. return True
  1446. if x1 == x2 and o1.startswith('>'):
  1447. lx = noop
  1448. if y1 == y2 and o2.startswith('>'):
  1449. ly = noop
  1450. # Check for overlap.
  1451. if (gte(x1, y1, True) and ly(x1, y2, True) or
  1452. gy(x2, y1, True) and ly(x2, y2, True) or
  1453. gte(y1, x1, True) and lx(y1, x2, True) or
  1454. gx(y2, x1, True) and lx(y2, x2, True)
  1455. ):
  1456. return 0
  1457. if gte(y1, x2, True):
  1458. return 1
  1459. if gte(x1, y2, True):
  1460. return -1
  1461. raise AssertionError('Unexpected case comparing version ranges')
  1462. def _is_disabled(name, disabled=[]):
  1463. """Test whether the package is disabled.
  1464. """
  1465. for pattern in disabled:
  1466. if name == pattern:
  1467. return True
  1468. if re.compile(pattern).match(name) is not None:
  1469. return True
  1470. return False
  1471. def _format_compatibility_errors(name, version, errors):
  1472. """Format a message for compatibility errors.
  1473. """
  1474. msgs = []
  1475. l0 = 10
  1476. l1 = 10
  1477. for error in errors:
  1478. pkg, jlab, ext = error
  1479. jlab = str(Range(jlab, True))
  1480. ext = str(Range(ext, True))
  1481. msgs.append((pkg, jlab, ext))
  1482. l0 = max(l0, len(pkg) + 1)
  1483. l1 = max(l1, len(jlab) + 1)
  1484. msg = '\n"%s@%s" is not compatible with the current JupyterLab'
  1485. msg = msg % (name, version)
  1486. msg += '\nConflicting Dependencies:\n'
  1487. msg += 'JupyterLab'.ljust(l0)
  1488. msg += 'Extension'.ljust(l1)
  1489. msg += 'Package\n'
  1490. for (pkg, jlab, ext) in msgs:
  1491. msg += jlab.ljust(l0) + ext.ljust(l1) + pkg + '\n'
  1492. return msg
  1493. def _log_multiple_compat_errors(logger, errors_map):
  1494. """Log compatability errors for multiple extensions at once"""
  1495. outdated = []
  1496. others = []
  1497. for name, (version, errors) in errors_map.items():
  1498. age = _compat_error_age(errors)
  1499. if age > 0:
  1500. outdated.append(name)
  1501. else:
  1502. others.append(name)
  1503. if outdated:
  1504. logger.warn('\n '.join(
  1505. ['\n The following extension are outdated:'] +
  1506. outdated +
  1507. ['\n Consider running "jupyter labextension update --all" '
  1508. 'to check for updates.\n']
  1509. ))
  1510. for name in others:
  1511. version, errors = errors_map[name]
  1512. msg = _format_compatibility_errors(name, version, errors)
  1513. logger.warn(msg + '\n')
  1514. def _log_single_compat_errors(logger, name, version, errors):
  1515. """Log compatability errors for a single extension"""
  1516. age = _compat_error_age(errors)
  1517. if age > 0:
  1518. logger.warn('The extension "%s" is outdated.\n', name)
  1519. else:
  1520. msg = _format_compatibility_errors(name, version, errors)
  1521. logger.warn(msg + '\n')
  1522. def _compat_error_age(errors):
  1523. """Compare all incompatabilites for an extension.
  1524. Returns a number > 0 if all extensions are older than that supported by lab.
  1525. Returns a number < 0 if all extensions are newer than that supported by lab.
  1526. Returns 0 otherwise (i.e. a mix).
  1527. """
  1528. # Do any extensions depend on too old lab packages?
  1529. any_older = False
  1530. # Do any extensions depend on too new lab packages?
  1531. any_newer = False
  1532. for _, jlab, ext in errors:
  1533. c = _compare_ranges(ext, jlab)
  1534. any_newer = any_newer or c < 0
  1535. any_older = any_older or c > 0
  1536. if any_older and not any_newer:
  1537. return 1
  1538. elif any_newer and not any_older:
  1539. return -1
  1540. return 0
  1541. def _get_core_extensions():
  1542. """Get the core extensions.
  1543. """
  1544. data = _get_core_data()['jupyterlab']
  1545. return list(data['extensions']) + list(data['mimeExtensions'])
  1546. def _semver_prerelease_key(prerelease):
  1547. """Sort key for prereleases.
  1548. Precedence for two pre-release versions with the same
  1549. major, minor, and patch version MUST be determined by
  1550. comparing each dot separated identifier from left to
  1551. right until a difference is found as follows:
  1552. identifiers consisting of only digits are compare
  1553. numerically and identifiers with letters or hyphens
  1554. are compared lexically in ASCII sort order. Numeric
  1555. identifiers always have lower precedence than non-
  1556. numeric identifiers. A larger set of pre-release
  1557. fields has a higher precedence than a smaller set,
  1558. if all of the preceding identifiers are equal.
  1559. """
  1560. for entry in prerelease:
  1561. if isinstance(entry, int):
  1562. # Assure numerics always sort before string
  1563. yield ('', entry)
  1564. else:
  1565. # Use ASCII compare:
  1566. yield (entry,)
  1567. def _semver_key(version, prerelease_first=False):
  1568. """A sort key-function for sorting semver version string.
  1569. The default sorting order is ascending (0.x -> 1.x -> 2.x).
  1570. If `prerelease_first`, pre-releases will come before
  1571. ALL other semver keys (not just those with same version).
  1572. I.e (1.0-pre, 2.0-pre -> 0.x -> 1.x -> 2.x).
  1573. Otherwise it will sort in the standard way that it simply
  1574. comes before any release with shared version string
  1575. (0.x -> 1.0-pre -> 1.x -> 2.0-pre -> 2.x).
  1576. """
  1577. v = make_semver(version, True)
  1578. if prerelease_first:
  1579. key = (0,) if v.prerelease else (1,)
  1580. else:
  1581. key = ()
  1582. key = key + (v.major, v.minor, v.patch)
  1583. if not prerelease_first:
  1584. # NOT having a prerelease is > having one
  1585. key = key + (0,) if v.prerelease else (1,)
  1586. if v.prerelease:
  1587. key = key + tuple(_semver_prerelease_key(
  1588. v.prerelease))
  1589. return key
  1590. def _fetch_package_metadata(registry, name, logger):
  1591. """Fetch the metadata for a package from the npm registry"""
  1592. req = Request(
  1593. urljoin(registry, quote(name, safe='@')),
  1594. headers={
  1595. 'Accept': ('application/vnd.npm.install-v1+json;'
  1596. ' q=1.0, application/json; q=0.8, */*')
  1597. }
  1598. )
  1599. try:
  1600. logger.debug('Fetching URL: %s' % (req.full_url))
  1601. except AttributeError:
  1602. logger.debug('Fetching URL: %s' % (req.get_full_url()))
  1603. try:
  1604. with contextlib.closing(urlopen(req)) as response:
  1605. return json.loads(response.read().decode('utf-8'))
  1606. except URLError as exc:
  1607. logger.warning(
  1608. 'Failed to fetch package metadata for %r: %r',
  1609. name, exc)
  1610. raise
  1611. if __name__ == '__main__':
  1612. watch_dev(HERE)