[DATABASE] user = root pwd = happylay db_name = datax_web_dev host = 192.168.199.107 port = 10086 ssl_disabled = true [MINIO] url = minio-api.sxkj.com k8s_url=minio.default:9000 access_key = admin secret_key = sxkjadmin [AF_BACKEND] uri=192.168.199.109:18082 host=192.168.199.109 port=18082 dag_files_dir=/dags/ [BACKEND] url=192.168.199.107:18082 [K8S] ;image_pull_key=codingregistrykey enable_kerberos=false [AIRFLOW] uri=192.168.199.109 host_in_header=airflow-web.sxkj.com api_token=YWRtaW46YWRtaW4= [HIVE] host = 192.168.199.27 port = 10000 username = hive password = hive database_name = default kerberos=0 [HIVE_METASTORE] uris=thrift://192.168.199.27:9083 [TASK_IMAGES] datax=SXKJ:32775/pod_datax:0.9 python=SXKJ:32775/pod_python:1.1 java=SXKJ:32775/java:1.0 sparks=SXKJ:32775/jupyter:0.981 [HADOOP_INNER] datasource = -1 default_fs = hdfs://HDFS8000912 hadoop_config={ "dfs.nameservices": "HDFS8000912", "dfs.ha.namenodes.HDFS8000912": "nn1,nn2", "dfs.namenode.rpc-address.HDFS8000912.nn1": "10.254.20.18:4007", "dfs.namenode.rpc-address.HDFS8000912.nn2": "10.254.20.22:4007", "dfs.client.failover.proxy.provider.HDFS8000912": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider" } kerberos_config = { "haveKerberos": "true", "kerberosKeytabFilePath": "/workspace/confs/test/user.keytab", "kerberosPrincipal": "ailab@EMR-5XJSY31F" }