Browse Source

Merge branch 'yili' of http://gogsb.soaringnova.com/ylproj/jupyterlab into yili

kilnonedre 1 year ago
parent
commit
561e4cfd8e

+ 22 - 5
Dockerfile.idc

@@ -15,10 +15,24 @@ ENV PYTHONPATH=/src
 RUN cd /src && chmod a+x  ./bdist_wheel.sh && ./bdist_wheel.sh
 
 
-FROM  gettyimages/spark:2.4.0-hadoop-3.0  as builder
-RUN sed -i "s@http://\(deb\|security\).debian.org@http://mirrors.163.com@g" /etc/apt/sources.list
-RUN apt update && apt install -y --no-install-recommends \
-    krb5-user zip unzip procps tar curl supervisor net-tools telnet vim iputils-ping netcat jq wget zsh
+
+FROM  gettyimages/spark:2.4.0-hadoop-3.0  as builder-cdh
+
+
+FROM ubuntu:18.04 AS builder
+
+RUN sed -i 's#archive.ubuntu.com#mirrors.aliyun.com#g' /etc/apt/sources.list  \
+    && sed -i 's#security.ubuntu.com#mirrors.aliyun.com#g' /etc/apt/sources.list
+
+COPY --from=builder-cdh /usr/hadoop-3.0.0/  /usr/hadoop-3.0.0/
+# JAVAHOME
+COPY --from=builder-cdh /usr/jdk1.8.0_202  /usr/jdk1.8.0_202
+ENV JAVA_HOME=/usr/jdk1.8.0_202
+
+# FROM  gettyimages/spark:2.4.0-hadoop-3.0  as builder
+# RUN sed -i "s@http://\(deb\|security\).debian.org@http://mirrors.163.com@g" /etc/apt/sources.list
+# RUN apt update && apt install -y --no-install-recommends \
+#     krb5-user zip unzip procps tar curl supervisor net-tools telnet vim iputils-ping netcat jq wget zsh
 # 目录准备
 ENV WORKSPACE /hadoop
 WORKDIR ${WORKSPACE}
@@ -53,11 +67,12 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
     libpng-dev \
     openssh-server \
     autojump \
+    language-pack-zh-hans \
     ttf-wqy-zenhei \
     libgl1-mesa-glx  \
     libglib2.0-0 \
     locales \
-    krb5-user net-tools telnet && \
+    krb5-user net-tools telnet procps jq tar&& \
     rm -rf /var/lib/apt/lists/*
 
 
@@ -113,6 +128,7 @@ ADD confs/idctest/config.json .
 RUN mkdir -p $HOME/.sparkmagic && cp config.json $HOME/.sparkmagic
 COPY confs/idctest/ydtk /ydtk
 COPY confs/idctest/conf ${WORKSPACE}/conf
+COPY livysession.py /opt/conda/lib/python3.7/site-packages/sparkmagic/livyclientlib
 RUN echo "\
 [program:jupyter]\n\
 directory=/workspace\n\
@@ -153,6 +169,7 @@ ADD confs/idcprod/config.json .
 RUN mkdir -p $HOME/.sparkmagic && cp config.json $HOME/.sparkmagic
 COPY confs/idcprod/ydtk /ydtk
 COPY confs/idcprod/conf ${WORKSPACE}/conf
+COPY livysession.py /opt/conda/lib/python3.7/site-packages/sparkmagic/livyclientlib
 RUN echo "\
 [program:jupyter]\n\
 directory=/workspace\n\

+ 1 - 1
Dockerfile.tx

@@ -19,7 +19,7 @@ RUN cd /src && chmod a+x  ./bdist_wheel.sh && ./bdist_wheel.sh
 FROM openjdk:8-jre-slim as builder
 
 RUN sed -i "s@http://\(deb\|security\).debian.org@https://mirrors.aliyun.com@g" /etc/apt/sources.list
-#RUN sed -i 's#archive.ubuntu.com#mirrors.aliyun.com#g' /etc/apt/sources.list  \
+# RUN sed -i 's#archive.ubuntu.com#mirrors.aliyun.com#g' /etc/apt/sources.list  \
 #    && sed -i 's#security.ubuntu.com#mirrors.aliyun.com#g' /etc/apt/sources.list
 
 ENV LANG=zh_CN.UTF-8 LANGUAGE=zh_CN:zh LC_ALL=zh_CN.UTF-8 DEBIAN_FRONTEND=noninteractive

+ 1 - 1
Makefile

@@ -52,5 +52,5 @@ txprod:
 
 
 rsync:
-	@rsync -azP --exclude ".*/"  --exclude "tmp/" `pwd` sxkj@192.168.199.109:/home/sxkj/zhangli/
+	@rsync -azP --exclude ".*/"  --exclude "tmp/" `pwd` sxkj@192.168.199.107:/home/sxkj/zhangli/
 

+ 2 - 2
confs/idcprod/config.json

@@ -46,8 +46,8 @@
         "None": "sparkmagic.auth.customauth.Authenticator",
         "Basic_Access": "sparkmagic.auth.basic.Basic"
     },
-    "wait_for_idle_timeout_seconds": 15,
-    "livy_session_startup_timeout_seconds": 60,
+    "wait_for_idle_timeout_seconds": 30,
+    "livy_session_startup_timeout_seconds": 80,
     "fatal_error_suggestion": "The code failed because of a fatal error:\n\t{}.\n\nSome things to try:\na) Make sure Spark has enough available resources for Jupyter to create a Spark context.\nb) Contact your Jupyter administrator to make sure the Spark magics library is configured correctly.\nc) Restart the kernel.",
     "ignore_ssl_errors": false,
     "session_configs": {

+ 2 - 2
confs/idctest/config.json

@@ -46,8 +46,8 @@
         "None": "sparkmagic.auth.customauth.Authenticator",
         "Basic_Access": "sparkmagic.auth.basic.Basic"
     },
-    "wait_for_idle_timeout_seconds": 15,
-    "livy_session_startup_timeout_seconds": 60,
+    "wait_for_idle_timeout_seconds": 30,
+    "livy_session_startup_timeout_seconds": 80,
     "fatal_error_suggestion": "The code failed because of a fatal error:\n\t{}.\n\nSome things to try:\na) Make sure Spark has enough available resources for Jupyter to create a Spark context.\nb) Contact your Jupyter administrator to make sure the Spark magics library is configured correctly.\nc) Restart the kernel.",
     "ignore_ssl_errors": false,
     "session_configs": {

+ 3 - 3
confs/txprod/config.json

@@ -2,19 +2,19 @@
   "kernel_python_credentials": {
     "username": "",
     "password": "",
-    "url": "http://172.23.7.140:30998",
+    "url": "http://172.23.7.140:8998",
     "auth": "None"
   },
   "kernel_scala_credentials": {
     "username": "",
     "password": "",
-    "url": "http://172.23.7.140:30998",
+    "url": "http://172.23.7.140:8998",
     "auth": "None"
   },
   "kernel_r_credentials": {
     "username": "",
     "password": "",
-    "url": "http://172.23.7.140:30998"
+    "url": "http://172.23.7.140:8998"
   },
   "logging_config": {
     "version": 1,

+ 5 - 3
deploy/Jenkinsfile

@@ -9,6 +9,8 @@ pipeline {
     IMAGE_REPO = "registry.cn-hangzhou.aliyuncs.com/sxtest"
     url1 = "http://aihub-dag-idctest.digitalyili.com"
     url2 = "http://aihub-dag-idc.digitalyili.com"
+    url3 = "http://aihub-dag-test.digitalyili.com"
+    url4 = "http://aihub-dag.digitalyili.com"
     TAB = "\n   \n  "
     BUILD_TASKS = "\n   \n  "
 
@@ -19,9 +21,9 @@ pipeline {
         container('docker') {
           echo "构建 Docker 镜像阶段"
           sh 'echo "nameserver 114.114.114.114" >> /etc/resolv.conf'
-          sh "sed '/^http/c'$url1'' -i  packages/yili-dag/src/request.ts"
-          sh "sed '/^http/c'$url1'' -i  packages/jldbq-extenison/src/api/config.ts"
-          sh "sed '/^http/c'$url1'' -i  packages/filebrowser/src/api/config.ts"
+          sh "sed '$url' -i  packages/yili-dag/src/request.ts"
+          sh "sed '$url' -i  packages/jldbq-extenison/src/api/config.ts"
+          sh "sed '$url' -i  packages/filebrowser/src/api/config.ts"
           sh 'docker build -f ${DOCKERFILE} --target ${ENV_TARGET0} -t ${IMAGE_REPO}/jupyterlab0:${ENV_APP} --output type=docker .'
           sh 'docker build -f ${DOCKERFILE} --target ${ENV_TARGET} -t ${IMAGE_REPO}/jupyterlab:${ENV_APP}  --output type=docker .'
           echo "build success"

+ 427 - 0
livysession.py

@@ -0,0 +1,427 @@
+# Distributed under the terms of the Modified BSD License.
+import threading
+from time import sleep, time
+
+from hdijupyterutils.guid import ObjectWithGuid
+
+import sparkmagic.utils.configuration as conf
+import sparkmagic.utils.constants as constants
+from sparkmagic.utils.sparklogger import SparkLog
+from sparkmagic.utils.sparkevents import SparkEvents
+from sparkmagic.utils.utils import get_sessions_info_html
+from .configurableretrypolicy import ConfigurableRetryPolicy
+from .command import Command
+from .exceptions import (
+    LivyClientTimeoutException,
+    LivyUnexpectedStatusException,
+    BadUserDataException,
+    SqlContextNotFoundException,
+)
+
+
+class _HeartbeatThread(threading.Thread):
+    def __init__(self, livy_session, refresh_seconds, retry_seconds, run_at_most=None):
+        """
+        Parameters
+        ----------
+        livy_session : LivySession
+        refresh_seconds: int
+            The seconds to sleep between refreshing the Livy session status and info
+        retry_seconds: int
+            The seconds to sleep before retrying on a failed refresh
+        run_at_most: int, optional
+            The max number of loops to execute before ending this thread
+        """
+        super(_HeartbeatThread, self).__init__()
+
+        self.livy_session = livy_session
+        self.refresh_seconds = refresh_seconds
+        self.retry_seconds = retry_seconds
+        if run_at_most is None:
+            # a billion iterations should be sufficient
+            run_at_most = int(1e9)
+        self.run_at_most = run_at_most
+
+    def run(self):
+        loop_counter = 0
+        if self.livy_session is None:
+            print("Will not start heartbeat thread because self.livy_session is None")
+            return
+
+        self.livy_session.logger.info(
+            "Starting heartbeat for session {}".format(self.livy_session.id)
+        )
+
+        while self.livy_session is not None and loop_counter < self.run_at_most:
+            loop_counter += 1
+
+            try:
+                sleep_time = self.refresh_seconds
+                self.livy_session.refresh_status_and_info()
+            except Exception as e:
+                sleep_time = self.retry_seconds
+                # The built-in python logger has exception handling built in. If you expose
+                # the "exception" function in the SparkLog class then you could just make this
+                # self.livy_session.logger.exception("some useful message") and it'll print
+                # out the stack trace too.
+                self.livy_session.logger.error("{}".format(e))
+
+            sleep(sleep_time)
+
+    def stop(self):
+        if self.livy_session is not None:
+            self.livy_session.logger.info(
+                "Stopping heartbeat for session {}".format(self.livy_session.id)
+            )
+
+        self.livy_session = None
+        self.join()
+
+
+class LivySession(ObjectWithGuid):
+    def __init__(
+        self,
+        http_client,
+        properties,
+        ipython_display,
+        session_id=-1,
+        spark_events=None,
+        heartbeat_timeout=0,
+        heartbeat_thread=None,
+    ):
+        super(LivySession, self).__init__()
+        assert constants.LIVY_KIND_PARAM in list(properties.keys())
+        kind = properties[constants.LIVY_KIND_PARAM]
+
+        should_heartbeat = False
+        if heartbeat_timeout > 0:
+            should_heartbeat = True
+            properties[constants.LIVY_HEARTBEAT_TIMEOUT_PARAM] = heartbeat_timeout
+        elif constants.LIVY_HEARTBEAT_TIMEOUT_PARAM in list(properties.keys()):
+            properties.pop(constants.LIVY_HEARTBEAT_TIMEOUT_PARAM)
+
+        self.properties = properties
+        self.ipython_display = ipython_display
+        self._should_heartbeat = should_heartbeat
+        self._user_passed_heartbeat_thread = heartbeat_thread
+
+        if spark_events is None:
+            spark_events = SparkEvents()
+        self._spark_events = spark_events
+
+        self._policy = ConfigurableRetryPolicy(
+            retry_seconds_to_sleep_list=[0.2, 0.5, 0.5, 1, 1, 2], max_retries=5000
+        )
+        wait_for_idle_timeout_seconds = conf.wait_for_idle_timeout_seconds()
+
+        assert wait_for_idle_timeout_seconds > 0
+
+        self.logger = SparkLog("LivySession")
+
+        kind = kind.lower()
+        if kind not in constants.SESSION_KINDS_SUPPORTED:
+            raise BadUserDataException(
+                "Session of kind '{}' not supported. Session must be of kinds {}.".format(
+                    kind, ", ".join(constants.SESSION_KINDS_SUPPORTED)
+                )
+            )
+
+        self._app_id = None
+        self._user = None
+        self._logs = ""
+        self._http_client = http_client
+        self._wait_for_idle_timeout_seconds = wait_for_idle_timeout_seconds
+        self._printed_resource_warning = False
+
+        self.kind = kind
+        self.id = session_id
+        self.session_info = ""
+
+        self._heartbeat_thread = None
+        if session_id == -1:
+            self.status = constants.NOT_STARTED_SESSION_STATUS
+        else:
+            self.status = constants.BUSY_SESSION_STATUS
+            self._start_heartbeat_thread()
+
+    def __str__(self):
+        return "Session id: {}\tYARN id: {}\tKind: {}\tState: {}\n\tSpark UI: {}\n\tDriver Log: {}".format(
+            self.id,
+            self.get_app_id(),
+            self.kind,
+            self.status,
+            self.get_spark_ui_url(),
+            self.get_driver_log_url(),
+        )
+
+    def start(self):
+        """Start the session against actual livy server."""
+        self._spark_events.emit_session_creation_start_event(self.guid, self.kind)
+        self._printed_resource_warning = False
+        count = 0
+        while True:
+            try:
+                r = self._http_client.post_session(self.properties)
+                self.id = r["id"]
+                self.status = str(r["state"])
+
+                self.ipython_display.writeln("Starting Spark application")
+
+                # Start heartbeat thread to keep Livy interactive session alive.
+                self._start_heartbeat_thread()
+
+                # We wait for livy_session_startup_timeout_seconds() for the session to start up.
+                try:
+                    # self.ipython_display.writeln('startup timeout: {} seconds'.format(conf.livy_session_startup_timeout_seconds()))
+                    self.wait_for_idle(conf.livy_session_startup_timeout_seconds())
+                except LivyClientTimeoutException:
+                    raise LivyClientTimeoutException(
+                        "Session {} did not start up in {} seconds.".format(
+                            self.id, conf.livy_session_startup_timeout_seconds()
+                        )
+                    )
+
+                html = get_sessions_info_html([self], self.id)
+                self.ipython_display.html(html)
+
+                command = Command("spark")
+                (success, out, mimetype) = command.execute(self)
+
+                if success:
+                    self.ipython_display.writeln("SparkSession available as 'spark'.")
+                    self.sql_context_variable_name = "spark"
+                else:
+                    command = Command("sqlContext")
+                    (success, out, mimetype) = command.execute(self)
+                    if success:
+                        self.ipython_display.writeln("SparkContext available as 'sc'.")
+                        if "hive" in out.lower():
+                            self.ipython_display.writeln(
+                                "HiveContext available as 'sqlContext'."
+                            )
+                        else:
+                            self.ipython_display.writeln(
+                                "SqlContext available as 'sqlContext'."
+                            )
+                        self.sql_context_variable_name = "sqlContext"
+                    else:
+                        raise SqlContextNotFoundException(
+                            "Neither SparkSession nor HiveContext/SqlContext is available."
+                        )
+            except Exception as e:
+                # print("Error starting the Spark Session. Error: {}, status: {}, type: {}".format(str(e), self.status, type(e)))
+                if count < 10 and (isinstance(e, LivyUnexpectedStatusException) or isinstance(e, LivyClientTimeoutException)):
+                    count += 1
+                    continue
+                self._spark_events.emit_session_creation_end_event(
+                    self.guid,
+                    self.kind,
+                    self.id,
+                    self.status,
+                    False,
+                    e.__class__.__name__,
+                    str(e),
+                )
+                raise
+            else:
+                self._spark_events.emit_session_creation_end_event(
+                    self.guid, self.kind, self.id, self.status, True, "", ""
+                )
+                break
+
+    def get_app_id(self):
+        if self._app_id is None:
+            self._app_id = self._http_client.get_session(self.id).get("appId")
+        return self._app_id
+
+    def get_app_info(self):
+        appInfo = self._http_client.get_session(self.id).get("appInfo")
+        return appInfo if appInfo is not None else {}
+
+    def get_app_info_member(self, member_name):
+        return self.get_app_info().get(member_name)
+
+    def get_driver_log_url(self):
+        return self.get_app_info_member("driverLogUrl")
+
+    def get_logs(self):
+        log_array = self._http_client.get_all_session_logs(self.id)["log"]
+        self._logs = "\n".join(log_array)
+        return self._logs
+
+    def get_spark_ui_url(self):
+        return self.get_app_info_member("sparkUiUrl")
+
+    def get_user(self):
+        if self._user is None:
+            session = self._http_client.get_session(self.id)
+            self._user = session.get("proxyUser", session.get("owner"))
+        return self._user
+
+    @property
+    def http_client(self):
+        return self._http_client
+
+    @property
+    def endpoint(self):
+        return self._http_client.endpoint
+
+    @staticmethod
+    def is_final_status(status):
+        return status in constants.FINAL_STATUS
+
+    def is_posted(self):
+        return self.status != constants.NOT_STARTED_SESSION_STATUS
+
+    def delete(self):
+        session_id = self.id
+        self._spark_events.emit_session_deletion_start_event(
+            self.guid, self.kind, session_id, self.status
+        )
+
+        try:
+            self.logger.debug("Deleting session '{}'".format(session_id))
+
+            if self.status != constants.NOT_STARTED_SESSION_STATUS:
+                self._http_client.delete_session(session_id)
+                self._stop_heartbeat_thread()
+                self.status = constants.DEAD_SESSION_STATUS
+                self.id = -1
+            else:
+                self.ipython_display.send_error(
+                    "Cannot delete session {} that is in state '{}'.".format(
+                        session_id, self.status
+                    )
+                )
+
+        except Exception as e:
+            self._spark_events.emit_session_deletion_end_event(
+                self.guid,
+                self.kind,
+                session_id,
+                self.status,
+                False,
+                e.__class__.__name__,
+                str(e),
+            )
+            raise
+        else:
+            self._spark_events.emit_session_deletion_end_event(
+                self.guid, self.kind, session_id, self.status, True, "", ""
+            )
+
+    def wait_for_idle(self, seconds_to_wait=None):
+        """Wait for session to go to idle status. Sleep meanwhile.
+
+        Parameters:
+            seconds_to_wait : number of seconds to wait before giving up.
+        """
+        if seconds_to_wait is None:
+            seconds_to_wait = self._wait_for_idle_timeout_seconds
+
+        retries = 1
+        while True:
+            self.refresh_status_and_info()
+            # self.ipython_display.writeln(
+            #     "Session {} status '{}', seconds_to_wait: {}".format(self.id, self.status, seconds_to_wait)
+            # )
+            if self.status == constants.IDLE_SESSION_STATUS:
+                return
+
+            if self.status in constants.FINAL_STATUS:
+                error = "Session {} unexpectedly reached final status '{}'.".format(
+                    self.id, self.status
+                )
+                self.logger.error(error)
+                raise LivyUnexpectedStatusException(
+                    "{} See logs:\n{}".format(error, self.get_logs())
+                )
+
+            if seconds_to_wait <= 0.0:
+                error = "Session {} did not reach idle status in time. Current status is {}.".format(
+                    self.id, self.status
+                )
+                self.logger.error(error)
+                raise LivyClientTimeoutException(error)
+
+            if (
+                constants.YARN_RESOURCE_LIMIT_MSG in self.session_info
+                and not self._printed_resource_warning
+            ):
+                self.ipython_display.send_error(
+                    constants.RESOURCE_LIMIT_WARNING.format(
+                        conf.resource_limit_mitigation_suggestion()
+                    )
+                )
+                self._printed_resource_warning = True
+
+            start_time = time()
+            sleep_time = self._policy.seconds_to_sleep(retries)
+            retries += 1
+
+            self.logger.debug(
+                "Session {} in state {}. Sleeping {} seconds.".format(
+                    self.id, self.status, sleep_time
+                )
+            )
+            sleep(sleep_time)
+            seconds_to_wait -= time() - start_time
+
+    def sleep(self, retries):
+        sleep(self._policy.seconds_to_sleep(retries))
+
+    # This function will refresh the status and get the logs in a single call.
+    # Only the status will be returned as the return value.
+    def refresh_status_and_info(self):
+        response = self._http_client.get_session(self.id)
+        status = response["state"]
+        log_array = response["log"]
+
+        if status in constants.POSSIBLE_SESSION_STATUS:
+            self.status = status
+            self.session_info = "\n".join(log_array)
+        else:
+            raise LivyUnexpectedStatusException(
+                "Status '{}' not supported by session.".format(status)
+            )
+
+    def _start_heartbeat_thread(self):
+        if self._should_heartbeat and self._heartbeat_thread is None:
+            refresh_seconds = conf.heartbeat_refresh_seconds()
+            retry_seconds = conf.heartbeat_retry_seconds()
+
+            if self._user_passed_heartbeat_thread is None:
+                self._heartbeat_thread = _HeartbeatThread(
+                    self, refresh_seconds, retry_seconds
+                )
+            else:
+                self._heartbeat_thread = self._user_passed_heartbeat_thread
+
+            self._heartbeat_thread.daemon = True
+            self._heartbeat_thread.start()
+
+    def _stop_heartbeat_thread(self):
+        if self._heartbeat_thread is not None:
+            self._heartbeat_thread.stop()
+            self._heartbeat_thread = None
+
+    def get_row_html(self, current_session_id):
+        return """<tr><td>{0}</td><td>{1}</td><td>{2}</td><td>{3}</td><td>{4}</td><td>{5}</td><td>{6}</td><td>{7}</td></tr>""".format(
+            self.id,
+            self.get_app_id(),
+            self.kind,
+            self.status,
+            self.get_html_link("Link", self.get_spark_ui_url()),
+            self.get_html_link("Link", self.get_driver_log_url()),
+            self.get_user(),
+            ""
+            if current_session_id is None or current_session_id != self.id
+            else "\u2714",
+        )
+
+    @staticmethod
+    def get_html_link(text, url):
+        if url is not None:
+            return """<a target="_blank" href="{1}">{0}</a>""".format(text, url)
+        else:
+            return ""

+ 27 - 27
packages/help-extension/src/index.tsx

@@ -167,32 +167,32 @@ const about: JupyterFrontEndPlugin<void> = {
 /**
  * A plugin to add a command to open the Classic Notebook interface.
  */
-const launchClassic: JupyterFrontEndPlugin<void> = {
-  id: '@jupyterlab/help-extension:launch-classic',
-  autoStart: true,
-  requires: [ITranslator],
-  optional: [ICommandPalette],
-  activate: (
-    app: JupyterFrontEnd,
-    translator: ITranslator,
-    palette: ICommandPalette | null
-  ): void => {
-    const { commands } = app;
-    const trans = translator.load('jupyterlab');
-    const category = trans.__('Help');
-
-    commands.addCommand(CommandIDs.launchClassic, {
-      label: trans.__('Launch Classic Notebook'),
-      execute: () => {
-        window.open(PageConfig.getBaseUrl() + 'tree');
-      }
-    });
-
-    if (palette) {
-      palette.addItem({ command: CommandIDs.launchClassic, category });
-    }
-  }
-};
+// const launchClassic: JupyterFrontEndPlugin<void> = {
+//   id: '@jupyterlab/help-extension:launch-classic',
+//   autoStart: true,
+//   requires: [ITranslator],
+//   optional: [ICommandPalette],
+//   activate: (
+//     app: JupyterFrontEnd,
+//     translator: ITranslator,
+//     palette: ICommandPalette | null
+//   ): void => {
+//     const { commands } = app;
+//     const trans = translator.load('jupyterlab');
+//     const category = trans.__('Help');
+
+//     commands.addCommand(CommandIDs.launchClassic, {
+//       label: trans.__('Launch Classic Notebook'),
+//       execute: () => {
+//         window.open(PageConfig.getBaseUrl() + 'tree');
+//       }
+//     });
+
+//     if (palette) {
+//       palette.addItem({ command: CommandIDs.launchClassic, category });
+//     }
+//   }
+// };
 
 /**
  * A plugin to add a command to open the Jupyter Forum.
@@ -636,7 +636,7 @@ const licenses: JupyterFrontEndPlugin<void> = {
 
 const plugins: JupyterFrontEndPlugin<any>[] = [
   about,
-  launchClassic,
+  // launchClassic,
   jupyterForum,
   resources,
   licenses

+ 2 - 0
packages/yili-dag/src/AlgoNode.tsx

@@ -10,6 +10,7 @@ import undoneNode from '../style/img/default_node.png';
 import runningNode from '../style/img/running.png';
 import outputLogo from '../style/img/output.png';
 
+// 节点状态
 interface NodeStatus {
   id: string;
   status: 'default' | 'success' | 'failed' | 'running' | 'undone';
@@ -39,6 +40,7 @@ export default class AlgoNode extends React.Component<{ node?: Node }> {
     return false;
   }
 
+  // 初始化
   render() {
     const { node } = this.props;
     const data = node?.getData() as NodeStatus;

+ 1 - 0
packages/yili-dag/src/ContextMenu.tsx

@@ -14,6 +14,7 @@ interface IProps {
   handleOpenLog: () => void;
 }
 
+// 菜单
 const ContextMenu: React.FC<IProps> = props => {
   const {
     contextStatus,

+ 9 - 0
packages/yili-dag/src/Dag.tsx

@@ -247,6 +247,7 @@ export default class Dag extends React.Component<any, any> {
     });
   };
 
+  // 格式化数据源
   formatSourceTable = (node_id: string) => {
     const allEdges = this.state.dagGraph.getEdges();
     const input_nodes = {} as any;
@@ -273,6 +274,7 @@ export default class Dag extends React.Component<any, any> {
     this.setState({ scriptInputNodes: input_nodes });
   };
 
+  // 保存数据
   saveData = () => {
     const text = this.props.context.current;
     text.ready.then(() => {
@@ -283,6 +285,7 @@ export default class Dag extends React.Component<any, any> {
     });
   };
 
+  // 执行dag数据
   executeDagData = async (script_nodes: any, script_edges: any) => {
     const dag_script = JSON.stringify({
       sub_nodes: script_nodes,
@@ -314,6 +317,7 @@ export default class Dag extends React.Component<any, any> {
     }
   };
 
+  // 格式化节点状态
   formatNodeStatus = (script_nodes: any) => {
     const runNodes = script_nodes.filter((item: any) => !item.skip);
     const runNodeIds = runNodes.map((item: any) => item.id);
@@ -407,6 +411,7 @@ export default class Dag extends React.Component<any, any> {
     this.setState({ contextMenu: null });
   };
 
+  //  格式化表格数据
   formatTableData = (data: any) => {
     const col = data.header.map((item: any) => ({
       title: item,
@@ -428,6 +433,7 @@ export default class Dag extends React.Component<any, any> {
     };
   };
 
+  // 获取表格数据
   fetchTableData = async (out_pin: any) => {
     const menuNode = this.state.contextMenuNode;
     const { nodeId } = menuNode.getData();
@@ -440,6 +446,7 @@ export default class Dag extends React.Component<any, any> {
     this.fetchResultTable(params);
   };
 
+  //  获取表格信息
   fetchTableInfo = async (params: any) => {
     this.setState({
       resultTableName: '',
@@ -461,6 +468,7 @@ export default class Dag extends React.Component<any, any> {
     }
   };
 
+  // 获取结果
   fetchResultTable = async (params: any) => {
     this.setState({
       tableLoading: true,
@@ -875,6 +883,7 @@ export default class Dag extends React.Component<any, any> {
     this.stencilContainer = container;
   };
 
+  // 保存表格数据
   saveTableData = async () => {
     if (this.state.table_name && !this.state.table_name.includes(' ')) {
       const menuNode = this.state.contextMenuNode;

+ 1 - 0
packages/yili-dag/src/DagEditorWidget.tsx

@@ -86,6 +86,7 @@ const DagWrapper: React.FC<IProps> = ({
     currentContext.ready.then(() => {});
   }, [contextRef]);
 
+  // 保存画布
   const saveGraph = (data: any) => {
     contextRef.current.model.fromJSON(data);
     contextRef.current.save();

+ 17 - 11
packages/yili-dag/src/DatasourceNodeInfo.tsx

@@ -16,17 +16,18 @@ import { CheckboxChangeEvent } from 'antd/es/checkbox';
 export default class DatasourceNodeInfo extends React.Component<any, any> {
   formRef = React.createRef();
 
-constructor(props: any) {
-super(props);
-this.state = {
-  debugTable: [],
-  localDebugTable: [],
-  currentTablePath: 'node',
-  nodeName: '',
-  checkAll: false
-};
-}
+  constructor(props: any) {
+    super(props);
+    this.state = {
+      debugTable: [],
+      localDebugTable: [],
+      currentTablePath: 'node',
+      nodeName: '',
+      checkAll: false
+    };
+  }
 
+  // 获取调试数据
   async getDebugTableList() {
     const { data } = await getDebugTable({
       user_id: 'test',
@@ -38,6 +39,8 @@ this.state = {
       message.error('获取调试数据表失败');
     }
   }
+
+  // 获取本地调试数据
   async getLocalDebugTableList() {
     const { data } = await getLocalDebugTable();
     if (data.code === 200) {
@@ -49,7 +52,6 @@ this.state = {
       message.error('获取本地调试数据表失败');
     }
   }
-
   async getTableSchema(table_name: string) {
     const { data } = await getSchema(table_name);
     if (data.code === 200) {
@@ -70,6 +72,7 @@ this.state = {
     }
   }
 
+  // 选择调试表路径
   changeTablePath(e: any) {
     this.setState({ currentTablePath: e.target.value });
     (this.formRef.current as any).setFieldsValue({
@@ -87,6 +90,7 @@ this.state = {
     this.getLocalDebugTableList();
   }
 
+  // 全选
   onCheckAll(e: CheckboxChangeEvent) {
     const val = (this.formRef.current as any).getFieldValue('inputDatasource');
     const checkData = val.map((item: any) => ({
@@ -101,6 +105,7 @@ this.state = {
     });
   }
 
+  // 提交
   submit() {
     const { inputDatasource, nodeName, tablePath, dataTable } = (this.formRef
       .current as any).getFieldsValue();
@@ -126,6 +131,7 @@ this.state = {
     }
   }
 
+  // 重置
   onReset() {
     this.props.nodeInfo.nodeName = this.state.nodeName;
     this.props.node.setData({ ...this.props.nodeInfo });

+ 7 - 0
packages/yili-dag/src/OutputNodeInfo.tsx

@@ -30,6 +30,7 @@ export default class OutputNodeInfo extends React.Component<any, any> {
     };
   }
 
+  // 获取调试数据
   async getTableSchema(table_name: string) {
     const { data } = await getSchema(table_name);
     if (data.code === 200) {
@@ -48,6 +49,7 @@ export default class OutputNodeInfo extends React.Component<any, any> {
     }
   }
 
+  // 选择调试表路径
   changeTablePath(e: any) {
     this.setState({ currentTablePath: e.target.value });
     (this.formRef.current as any).setFieldsValue({
@@ -56,6 +58,7 @@ export default class OutputNodeInfo extends React.Component<any, any> {
     });
   }
 
+  // 获取调试数据
   async getDebugTableList() {
     const { data } = await getDebugTable({
       user_id: 'test',
@@ -91,6 +94,7 @@ export default class OutputNodeInfo extends React.Component<any, any> {
     this.getLocalDebugTableList();
   }
 
+  // 提交
   submit() {
     const { outputSource, nodeName, tablePath, dataTable } = (this.formRef
       .current as any).getFieldsValue();
@@ -107,12 +111,14 @@ export default class OutputNodeInfo extends React.Component<any, any> {
     }
   }
 
+  // 重置
   onReset() {
     this.props.nodeInfo.nodeName = this.state.nodeName;
     this.props.node.setData({ ...this.props.nodeInfo });
     this.props.close();
   }
 
+  // 输入字段
   enterOutputField(e: any, index: number) {
     const outputSource = (this.formRef.current as any).getFieldValue(
       'outputSource'
@@ -124,6 +130,7 @@ export default class OutputNodeInfo extends React.Component<any, any> {
     }
   }
 
+  // 选择字段类型
   changeOutputFieldType(val: string, index: number) {
     const outputSource = (this.formRef.current as any).getFieldValue(
       'outputSource'

+ 1 - 0
packages/yili-dag/src/ScriptEditor.tsx

@@ -38,6 +38,7 @@ export default class ScriptEditor extends React.Component<any, any> {
     updateLinting(cm, options.error_list);
   }
 
+  // 代码校验
   async checkSyntax() {
     message.loading({ content: '正在校验代码,请稍等...', key: 'check' });
     const editor = this.state.editor;

+ 5 - 0
packages/yili-dag/src/ScriptNodeInfo.tsx

@@ -41,6 +41,7 @@ export default class ScriptNodeInfo extends React.Component<any, any> {
 
   editorRef = React.createRef();
 
+  // 脚本编辑弹窗
   showScriptModal = () => {
     this.setState({ scriptModalVisible: true });
   };
@@ -123,10 +124,12 @@ export default class ScriptNodeInfo extends React.Component<any, any> {
     this.setState({ inputNumber });
   };
 
+  // 更新输出数量
   updateOutputNumber = (outputNumber: number) => {
     this.setState({ outputNumber });
   };
 
+  // 更新输出节点
   updateOutputPorts = (number: number) => {
     const id = this.props.nodeInfo.id;
     const node = this.props.graph.getCellById(id);
@@ -143,6 +146,7 @@ export default class ScriptNodeInfo extends React.Component<any, any> {
     }
   };
 
+  // Collapse切换
   changeCollapse = async (val: any) => {
     this.setState({
       columns: [],
@@ -182,6 +186,7 @@ export default class ScriptNodeInfo extends React.Component<any, any> {
     });
   };
 
+  // 格式化表格数据
   formatTableData = (data: any) => {
     const col = data.header.map((item: any) => ({
       title: item,

+ 4 - 0
packages/yili-dag/src/ToolBar.tsx

@@ -34,6 +34,7 @@ export default class ToolBar extends React.Component<any, any> {
     this.setState({ modalVisible: true });
   };
 
+  // 提交编辑
   submitEdit = async () => {
     const { data } = await postRequirements({
       requirements: this.state.packageData,
@@ -76,6 +77,7 @@ export default class ToolBar extends React.Component<any, any> {
     }
   };
 
+  // 上传dag文件
   uploadDagFile() {
     const text = this.props.context.current;
     const filename = text.path.split('/').pop();
@@ -101,6 +103,7 @@ export default class ToolBar extends React.Component<any, any> {
     });
   }
 
+  // 获取依赖
   openRequirements() {
     const text = this.props.context.current;
     text.ready.then(async () => {
@@ -129,6 +132,7 @@ export default class ToolBar extends React.Component<any, any> {
     });
   }
 
+  // menu下拉事件
   DropDown(toolType: string) {
     const MenuItem = Menu.Item; // eslint-disable-line
 

+ 1 - 0
packages/yili-dag/src/request.ts

@@ -64,6 +64,7 @@ export const getDebugTable = (params: any) =>
     params
   });
 
+// 获取本地调试数据表
 export const getLocalDebugTable = () =>
   request({
     url: `/jpt/datasource/ailab_table`,

+ 2 - 0
packages/yili-dag/src/utils.ts

@@ -130,6 +130,7 @@ const getScriptEdgesAndNodes = (graph: any) => {
   return { script_nodes, script_edges };
 };
 
+// 转换为sql语句
 const datasourceToSql = (nodeData: any) => {
   const { inputSource, dataTableName } = nodeData;
   console.log(nodeData, dataTableName);
@@ -162,6 +163,7 @@ const datasourceToSql = (nodeData: any) => {
   return []
 } */
 
+// 输入框生成
 const generateInputs = (graph: any, id: any) => {
   const inputsResult: any = {};
   const edges = graph.getIncomingEdges(id);