[DATABASE] user = sxwl pwd = sxwldba db_name = aihub-dag host = 192.168.199.110 port = 32306 ssl_disabled = false [MINIO] url = minio-api.sxkj.com k8s_url=minio.default:9000 access_key = admin secret_key = sxkjadmin [AF_BACKEND] uri=aihub-dag-backend-af:8080 ; host=aihub-dag-backend-af ; port=8080 host=aihub-dag.sxkj.com port=80 dag_files_dir=/dags/ [BACKEND] url=192.168.199.107:18082 [K8S] ;image_pull_key=codingregistrykey enable_kerberos=false [AIRFLOW] uri=192.168.199.109 host_in_header=airflow-web.sxkj.com api_token=YWRtaW46YWRtaW4= [HIVE] host = 192.168.199.27 port = 10000 username = hive password = hive database_name = default kerberos=0 base_path = /home/sxkj/bigdata/apache-hive-2.3.9-bin/warehouse/ [HIVE_METASTORE] uris=thrift://192.168.199.27:9083 [TASK_IMAGES] datax=SXKJ:32775/pod_datax:0.9 python=SXKJ:32775/pod_python:1.1 java=SXKJ:32775/java:1.0 sparks=SXKJ:32775/jupyter:0.981 [HADOOP_INNER] default_fs = hdfs://192.168.199.27:9000 hadoop_config={ } kerberos_config = { "haveKerberos": "false", "kerberosKeytabFilePath": "/workspace/confs/test/user.keytab", "kerberosPrincipal": "ailab@EMR-5XJSY31F" } [LAKE_HADOOP_INNER] default_fs = hdfs://192.168.199.27:9000 hadoop_config={ } kerberos_config = { "haveKerberos": "false", "kerberosKeytabFilePath": "/workspace/confs/test/user.keytab", "kerberosPrincipal": "ailab@EMR-5XJSY31F" } [PERMISSIONS] ; 超级管理员 super_admin_role = TENANT_ADMIN ; 项目管理员 project_admin_role = PROJECT_ADMIN ; 算法组项目id special_project_id = sfzxm [PROGRAMME] url = aihub-dag.sxkj.com namespace = airflow super_image = SXKJ:32775/jupyterlab ordinary_image = SXKJ:32775/jupyterlab tag = sxkj host = aihub-dag.sxkj.com chart = aihub-dag-jupyter.tgz path_type = ImplementationSpecific ingress_class = '' [CRON_CONFIG] hour_min = 4 hour_max = 22 enable = 1 [REQUIREMENTS_CONFIG] prefix = hdfs://192.168.199.27:9000 path = /user/sxkj/pys [JOB_CONFIG] timeout = 5 auth_token = d0b52538-712a-11ed-8873-87519e90e821