[DATABASE] user = test_dev pwd = Yldve35@! db_name = aihubtest_dag_admin_db host = 10.138.143.44 port = 3306 ssl_disabled = true [MINIO] k8s_url = aihub-dag-minio:9000 url = aihub-dag-minio:9000 access_key = minioadmin secret_key = minioadmin [AF_BACKEND] uri=aihub-dag-backend-af:8080 host=aihub-dag-backend-af port=8080 dag_files_dir=/dags/ [K8S] image_pull_key= enable_kerberos=false node_selectors=aihub-dag [BACKEND] url=aihub-dag-backend:8080 [AIRFLOW] uri=airflow-webserver:8080 api_token=YWRtaW46YWRtaW4= [HIVE] host = 10.116.1.75 port = 10000 ;infa username = aiuser password = aiuser ;database_name = default database_name = dataming kerberos = 0 keytab = assets/test/user.keytab krb5config = assets/test/krb5.conf kerberos_service_name = hadoop principal = ailab@EMR-5XJSY31F base_path = /user/hive/warehouse/dataming zookeeper_enable = 1 zookeeper_hosts = cdhtestdn06.yili.com:2181,cdhtestdn07.yili.com:2181,cdhtestdn08.yili.com:2181 ; zookeeper_hosts = 10.116.1.76:2181,10.116.1.77:2181,10.116.1.78:2181 zookeeper_namespace = hiveserver2 [HIVE_METASTORE] uris=thrift://10.116.1.72:9083 [TASK_IMAGES] datax=registry.cn-hangzhou.aliyuncs.com/sxtest/idctest:datax ; python=registry.cn-hangzhou.aliyuncs.com/sxtest/idctest:python python=registry.cn-hangzhou.aliyuncs.com/sxtest/idctest:spark java=registry.cn-hangzhou.aliyuncs.com/sxtest/idctest:java sparks=registry.cn-hangzhou.aliyuncs.com/sxtest/idctest:spark [HADOOP_INNER] default_fs = hdfs://nameservice1 hadoop_config={ "dfs.nameservices": "nameservice1", "dfs.ha.namenodes.nameservice1": "nn1,nn2", "dfs.namenode.rpc-address.nameservice1.nn1": "10.119.14.67:8020", "dfs.namenode.rpc-address.nameservice1.nn2": "10.119.14.68:8020", "dfs.client.failover.proxy.provider.nameservice1": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider" } kerberos_config = { "haveKerberos": "false", "kerberosKeytabFilePath": "/workspace/confs/test/user.keytab", "kerberosPrincipal": "ailab@EMR-5XJSY31F" } [LAKE_HADOOP_INNER] default_fs = hdfs://nameservice1 hadoop_config={ "dfs.nameservices": "nameservice1", "dfs.ha.namenodes.nameservice1": "nn1,nn2", "dfs.namenode.rpc-address.nameservice1.nn1": "10.119.14.67:8020", "dfs.namenode.rpc-address.nameservice1.nn2": "10.119.14.68:8020", "dfs.client.failover.proxy.provider.nameservice1": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider" } kerberos_config = { "haveKerberos": "false", "kerberosKeytabFilePath": "/workspace/confs/test/user.keytab", "kerberosPrincipal": "ailab@EMR-5XJSY31F" } [HOST_ALIAS] enable = true host_alias = { "10.116.1.78": "cdhtestdn08.yili.com", "10.116.1.77": "cdhtestdn07.yili.com", "10.116.1.76": "cdhtestdn06.yili.com", "10.116.1.75": "cdhtestdn05.yili.com", "10.116.1.74": "cdhtestdn04.yili.com", "10.116.1.73": "cdhtestdn03.yili.com", "10.116.1.72": "cdhtestdn02.yili.com", "10.116.1.71": "cdhtestdn01.yili.com", "10.119.14.67": "cdhtestnm01.yili.com", "10.119.14.68": "cdhtestnm02.yili.com" } [PERMISSIONS] ; 超级管理员 super_admin_role = TENANT_ADMIN ; 项目管理员 project_admin_role = PROJECT_ADMIN ; 算法组项目id special_project_id = sfzxm [PROGRAMME] url = aihub-dag-helmbe:8080 namespace = ns-aihub-dag super_image = registry.cn-hangzhou.aliyuncs.com/sxtest/jupyterlab ordinary_image = registry.cn-hangzhou.aliyuncs.com/sxtest/jupyterlab0 tag = idctest host = ailab-idctest.digitalyili.com chart = aihub-dag-jupyter.tgz path_type = ImplementationSpecific ingress_class = public image_pull_secret=codingregistrykey node_selector = aihub-dag cpu = 2000 memory = 8192 [CRON_CONFIG] hour_min = 4 hour_max = 22 enable = 1 [REQUIREMENTS_CONFIG] prefix = hdfs://nameservice1 path = /user/aiuser/pys [JOB_CONFIG] timeout = 5 auth_token = d0b52538-712a-11ed-8873-87519e90e821 [AI_YIQI] url = ailab-idctest.digitalyili.com structuring_data_dbid = 639c3cdc909e1600b1dc27e3