[DATABASE] user = sxwl pwd = sxwldba db_name = aihub-dag host = mysql.default port = 3306 ssl_disabled = false [MINIO] k8s_url = aihub-dag-minio:9000 url = aihub-dag-minio:9000 access_key = minioadmin secret_key = minioadmin [AF_BACKEND] uri=aihub-dag-backend-af:8080 host=aihub-dag-backend-af port=8080 dag_files_dir=/dags/ [K8S] image_pull_key= enable_kerberos=false [BACKEND] url=aihub-dag-backend:8080 [AIRFLOW] uri=airflow-webserver:8080 api_token=YWRtaW46YWRtaW4= [HIVE] host = 192.168.199.27 port = 10000 username = hive password = hive database_name = default kerberos=0 keytab = assets/test/user.keytab krb5config = assets/test/krb5.conf kerberos_service_name = hadoop principal = ailab@EMR-5XJSY31F base_path = /home/sxkj/bigdata/apache-hive-2.3.9-bin/warehouse/ [HIVE_METASTORE] uris=thrift://192.168.199.27:9083 [TASK_IMAGES] datax=SXKJ:32775/pod_datax:0.9 python=SXKJ:32775/pod_python:1.1 java=SXKJ:32775/java:1.0 sparks=SXKJ:32775/jupyter:0.981 [HADOOP_INNER] default_fs = hdfs://192.168.199.27:9000 hadoop_config={ } kerberos_config = { "haveKerberos": "false", "kerberosKeytabFilePath": "/workspace/confs/test/user.keytab", "kerberosPrincipal": "ailab@EMR-5XJSY31F" } [LAKE_HADOOP_INNER] default_fs = hdfs://192.168.199.27:9000 hadoop_config={ } kerberos_config = { "haveKerberos": "false", "kerberosKeytabFilePath": "/workspace/confs/test/user.keytab", "kerberosPrincipal": "ailab@EMR-5XJSY31F" } [HOST_ALIAS] enable = false host_alias = { "192.168.199.31": "SXKJ" } [PROGRAMME] url = aihub-dag-helmbe:8080 namespace = airflow super_image = SXKJ:32775/jupyterlab ordinary_image = SXKJ:32775/jupyterlab tag = sxkj host = aihub-dag.sxkj.com chart = aihub-dag-jupyter.tgz path_type = ImplementationSpecific ingress_class = image_pull_secret=test node_selector = h107 [CRON_CONFIG] hour_min = 4 hour_max = 22 enable = 1 [REQUIREMENTS_CONFIG] prefix = hdfs://192.168.199.27:9000 path = /user/sxkj/pys [JOB_CONFIG] timeout = 5 auth_token = d0b52538-712a-11ed-8873-87519e90e821 ; 腾讯云测试环境地址 [AI_YIQI] url = ailab-test.digitalyili.com