[DATABASE] user = aihub_prod pwd = RttN4RbjQBfv db_name = aihub-dag-prod host = 172.23.12.194 port = 3306 ssl_disabled = true [MINIO] k8s_url = aihub-dag-minio:9000 url = aihub-dag-minio:9000 access_key = minioadmin secret_key = minioadmin [AF_BACKEND] uri=aihub-dag-backend-af:8080 host=aihub-dag-backend-af port=8080 dag_files_dir=/dags/ [K8S] image_pull_key=codingregistrykey enable_kerberos=true node_selectors=aihub-dag [BACKEND] url=aihub-dag-backend:8080 [AIRFLOW] uri=airflow-webserver:8080 api_token=YWRtaW46YWRtaW4= [HIVE] host = 172.23.21.7 port = 7001 username = hive password = hive database_name = ailab kerberos = 1 keytab = assets/txprod/user.keytab krb5config = assets/txprod/krb5.conf kerberos_service_name = hadoop principal = ylaiuser@EMR-56L6ZNTS base_path = /user/hive/warehouse/ailab.db/ zookeeper_enable=0 zookeeper_hosts = 172.23.21.10:2181,172.23.21.15:2181,172.23.21.17:2181 zookeeper_namespace = hiveserver2 [HIVE_METASTORE] uris=thrift://172.23.21.7:7004,thrift://172.23.21.8:7004 [TASK_IMAGES] datax=registry.cn-hangzhou.aliyuncs.com/sxtest/txprod:datax python=registry.cn-hangzhou.aliyuncs.com/sxtest/txprod:python java=registry.cn-hangzhou.aliyuncs.com/sxtest/txprod:java sparks=registry.cn-hangzhou.aliyuncs.com/sxtest/txprod:spark [HADOOP_INNER] default_fs = hdfs://HDFS84854 hadoop_config={ "dfs.nameservices": "HDFS84854", "dfs.ha.namenodes.HDFS84854": "nn1,nn2", "dfs.namenode.rpc-address.HDFS84854.nn1": "172.23.21.7:4007", "dfs.namenode.rpc-address.HDFS84854.nn2": "172.23.21.8:4007", "dfs.client.failover.proxy.provider.HDFS84854": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider" } kerberos_config = { "haveKerberos": "true", "kerberosKeytabFilePath": "/workspace/confs/user.keytab", "kerberosPrincipal": "ylaiuser@EMR-56L6ZNTS" } [LAKE_HADOOP_INNER] default_fs = hdfs://HDFS84854 hadoop_config={ "dfs.nameservices": "HDFS84854", "dfs.ha.namenodes.HDFS84854": "nn1,nn2", "dfs.namenode.rpc-address.HDFS84854.nn1": "172.23.21.7:4007", "dfs.namenode.rpc-address.HDFS84854.nn2": "172.23.21.8:4007", "dfs.client.failover.proxy.provider.HDFS84854": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider" } kerberos_config = { "haveKerberos": "true", "kerberosKeytabFilePath": "/workspace/confs/user.keytab", "kerberosPrincipal": "ylaiuser@EMR-56L6ZNTS" } [HOST_ALIAS] enable = false host_alias = {} [PERMISSIONS] ; 超级管理员 super_admin_role = TENANT_ADMIN ; 项目管理员 project_admin_role = PROJECT_ADMIN ; 算法组项目id special_project_id = sfzxm [PROGRAMME] url = aihub-dag-helmbe:8080 namespace = aihub-prod super_image = registry.cn-hangzhou.aliyuncs.com/sxtest/jupyterlab ordinary_image = registry.cn-hangzhou.aliyuncs.com/sxtest/jupyterlab0 tag = txprod host = ailab.digitalyili.com chart = aihub-dag-jupyter.tgz path_type = ImplementationSpecific ingress_class = prod-pri-ingress image_pull_secret=codingregistrykey node_selector = cpu = 2000 memory = 8192 [CRON_CONFIG] hour_min = 4 hour_max = 22 enable = 1 [REQUIREMENTS_CONFIG] prefix = hdfs://HDFS84854 path = /user/ylaiuser/pys [JOB_CONFIG] timeout = 5 auth_token = d0b52538-712a-11ed-8873-87519e90e821 [AI_YIQI] url = ailab.digitalyili.com structuring_data_dbid = 639c2ff123fb35008e7d1c52