jupyterlab-sparkmagic-cm.yaml 2.7 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485
  1. apiVersion: v1
  2. kind: ConfigMap
  3. metadata:
  4. name: {{ .Release.Name }}-sparkmagic-config
  5. labels:
  6. tier: aihub-spark
  7. component: jupyterlab
  8. release: {{ .Release.Name }}
  9. data:
  10. config.json: |
  11. {
  12. "kernel_python_credentials": {
  13. "username": "",
  14. "password": "",
  15. "url": "{{ .Values.jupyterlab.livyUrl }}",
  16. "auth": "None"
  17. },
  18. "kernel_scala_credentials": {
  19. "username": "",
  20. "password": "",
  21. "url": "{{ .Values.jupyterlab.livyUrl }}",
  22. "auth": "None"
  23. },
  24. "kernel_r_credentials": {
  25. "username": "",
  26. "password": "",
  27. "url": "{{ .Values.jupyterlab.livyUrl }}"
  28. },
  29. "logging_config": {
  30. "version": 1,
  31. "formatters": {
  32. "magicsFormatter": {
  33. "format": "%(asctime)s\t%(levelname)s\t%(message)s",
  34. "datefmt": ""
  35. }
  36. },
  37. "handlers": {
  38. "magicsHandler": {
  39. "class": "hdijupyterutils.filehandler.MagicsFileHandler",
  40. "formatter": "magicsFormatter",
  41. "home_path": "~/.sparkmagic"
  42. }
  43. },
  44. "loggers": {
  45. "magicsLogger": {
  46. "handlers": [
  47. "magicsHandler"
  48. ],
  49. "level": "DEBUG",
  50. "propagate": 0
  51. }
  52. }
  53. },
  54. "authenticators": {
  55. "Kerberos": "sparkmagic.auth.kerberos.Kerberos",
  56. "None": "sparkmagic.auth.customauth.Authenticator",
  57. "Basic_Access": "sparkmagic.auth.basic.Basic"
  58. },
  59. "wait_for_idle_timeout_seconds": 15,
  60. "livy_session_startup_timeout_seconds": 60,
  61. "fatal_error_suggestion": "The code failed because of a fatal error:\n\t{}.\n\nSome things to try:\na) Make sure Spark has enough available resources for Jupyter to create a Spark context.\nb) Contact your Jupyter administrator to make sure the Spark magics library is configured correctly.\nc) Restart the kernel.",
  62. "ignore_ssl_errors": false,
  63. "session_configs": {
  64. "driverMemory": "1000M",
  65. "executorCores": 2
  66. },
  67. "use_auto_viz": true,
  68. "coerce_dataframe": true,
  69. "max_results_sql": 2500,
  70. "pyspark_dataframe_encoding": "utf-8",
  71. "heartbeat_refresh_seconds": 30,
  72. "livy_server_heartbeat_timeout_seconds": 0,
  73. "heartbeat_retry_seconds": 10,
  74. "server_extension_default_kernel_name": "pysparkkernel",
  75. "custom_headers": {},
  76. "retry_policy": "configurable",
  77. "retry_seconds_to_sleep_list": [
  78. 0.2,
  79. 0.5,
  80. 1,
  81. 3,
  82. 5
  83. ],
  84. "configurable_retry_policy_max_retries": 8
  85. }