Zhang Li 1 年間 前
コミット
5dcd71b0cc
3 ファイル変更100 行追加0 行削除
  1. 18 0
      Dockerfile.dev
  2. 7 0
      Makefile
  3. 75 0
      confs/idcprod/config.json

+ 18 - 0
Dockerfile.dev

@@ -169,3 +169,21 @@ redirect_stderr=true\n\
 stdout_logfile=/var/log/jupyter.log\n\
 stdout_logfile_maxbytes=50MB\n\
 " > /etc/supervisor/conf.d/jupyter.conf
+
+
+# 电信云生产
+FROM builder3 as image-idcprod
+COPY --from=builder-fe /src/dist  dist
+RUN /opt/conda/bin/pip install dist/*.whl  -i https://mirror.baidu.com/pypi/simple
+ADD confs/idcprod/config.json .
+RUN mkdir -p $HOME/.sparkmagic && cp config.json $HOME/.sparkmagic
+RUN echo "\
+[program:jupyter]\n\
+directory=/workspace\n\
+command=/bin/bash -c '/opt/conda/bin/jupyter lab --ip 0.0.0.0 --port 8888 --allow-root --no-browser --NotebookApp.allow_origin=* --NotebookApp.allow_remote_access=1' \n\
+autorestart=true\n\
+startretries=0\n\
+redirect_stderr=true\n\
+stdout_logfile=/var/log/jupyter.log\n\
+stdout_logfile_maxbytes=50MB\n\
+" > /etc/supervisor/conf.d/jupyter.conf

+ 7 - 0
Makefile

@@ -25,6 +25,13 @@ idctest:
 	@docker build -f Dockerfile.dev  --target image-idctest -t registry.cn-hangzhou.aliyuncs.com/sxtest/jupyterlab:idctest .
 	@docker push registry.cn-hangzhou.aliyuncs.com/sxtest/jupyterlab:idctest
 
+idcprod:
+	@sed 's#http.*\.com#http://aihub-dag-idc\.digitalyili\.com#' -i  packages/yili-dag/src/request.ts
+	@sed 's#http.*\.com#http://aihub-dag-idc\.digitalyili\.com#' -i  packages/jldbq-extenison/src/api/config.ts
+	@sed 's#http.*\.com#http://aihub-dag-idc\.digitalyili\.com#' -i  packages/filebrowser/src/api/config.ts
+	@docker build -f Dockerfile.dev  --target image-idcprod -t registry.cn-hangzhou.aliyuncs.com/sxtest/jupyterlab:idcprod .
+	@docker push registry.cn-hangzhou.aliyuncs.com/sxtest/jupyterlab:idcprod
+
 
 txtest:
 	@sed 's#http.*\.com#http://aihub-dag-test\.digitalyili\.com#' -i  packages/yili-dag/src/request.ts

+ 75 - 0
confs/idcprod/config.json

@@ -0,0 +1,75 @@
+{
+    "kernel_python_credentials": {
+        "username": "",
+        "password": "",
+        "url": "http://10.138.130.94:8998",
+        "auth": "None"
+    },
+    "kernel_scala_credentials": {
+        "username": "",
+        "password": "",
+        "url": "http://10.138.130.94:8998",
+        "auth": "None"
+    },
+    "kernel_r_credentials": {
+        "username": "",
+        "password": "",
+        "url": "http://10.138.130.94:8998"
+    },
+    "logging_config": {
+        "version": 1,
+        "formatters": {
+            "magicsFormatter": {
+                "format": "%(asctime)s\t%(levelname)s\t%(message)s",
+                "datefmt": ""
+            }
+        },
+        "handlers": {
+            "magicsHandler": {
+                "class": "hdijupyterutils.filehandler.MagicsFileHandler",
+                "formatter": "magicsFormatter",
+                "home_path": "~/.sparkmagic"
+            }
+        },
+        "loggers": {
+            "magicsLogger": {
+                "handlers": [
+                    "magicsHandler"
+                ],
+                "level": "DEBUG",
+                "propagate": 0
+            }
+        }
+    },
+    "authenticators": {
+        "Kerberos": "sparkmagic.auth.kerberos.Kerberos",
+        "None": "sparkmagic.auth.customauth.Authenticator",
+        "Basic_Access": "sparkmagic.auth.basic.Basic"
+    },
+    "wait_for_idle_timeout_seconds": 15,
+    "livy_session_startup_timeout_seconds": 60,
+    "fatal_error_suggestion": "The code failed because of a fatal error:\n\t{}.\n\nSome things to try:\na) Make sure Spark has enough available resources for Jupyter to create a Spark context.\nb) Contact your Jupyter administrator to make sure the Spark magics library is configured correctly.\nc) Restart the kernel.",
+    "ignore_ssl_errors": false,
+    "session_configs": {
+        "driverMemory": "1000M",
+        "executorCores": 2
+    },
+    "use_auto_viz": true,
+    "coerce_dataframe": true,
+    "max_results_sql": 2500,
+    "pyspark_dataframe_encoding": "utf-8",
+    "heartbeat_refresh_seconds": 30,
+    "livy_server_heartbeat_timeout_seconds": 0,
+    "heartbeat_retry_seconds": 10,
+    "server_extension_default_kernel_name": "pysparkkernel",
+    "custom_headers": {},
+    "retry_policy": "configurable",
+    "retry_seconds_to_sleep_list": [
+        0.2,
+        0.5,
+        1,
+        3,
+        5
+    ],
+    "configurable_retry_policy_max_retries": 8
+}