[DATABASE] user = dag pwd = Dag@2022 db_name = aihub_dag host = 10.138.130.36 port = 3306 ssl_disabled = true [MINIO] k8s_url = aihub-dag-minio:9000 url = aihub-dag-minio:9000 access_key = minioadmin secret_key = minioadmin [AF_BACKEND] uri=aihub-dag-backend-af:8080 host=aihub-dag-backend-af port=8080 dag_files_dir=/dags/ [K8S] image_pull_key= enable_kerberos=false [BACKEND] url=aihub-dag-backend:8080 [AIRFLOW] uri=airflow-webserver:8080 api_token=YWRtaW46YWRtaW4= [HIVE] host = 10.150.5.20 port = 10000 username = aiuser password = qaz2wsx# database_name = dataming kerberos = 0 keytab = assets/test/user.keytab krb5config = assets/test/krb5.conf kerberos_service_name = hadoop principal = ailab@EMR-5XJSY31F base_path = /user/hive/warehouse/dataming zookeeper_enable=0 zookeeper_hosts = cdhproddn03.yili.com:2181,cdhproddn04.yili.com:2181,cdhproddn05.yili.com:2181,cdhproddn06.yili.com:2181,cdhproddn07.yili.com:2181 zookeeper_namespace = hiveserver2 [HIVE_METASTORE] uris=thrift://cdhproddn02.yili.com:9083 [TASK_IMAGES] datax=registry.cn-hangzhou.aliyuncs.com/sxtest/idcprod:datax python=registry.cn-hangzhou.aliyuncs.com/sxtest/idcprod:python java=registry.cn-hangzhou.aliyuncs.com/sxtest/idcprod:java sparks=registry.cn-hangzhou.aliyuncs.com/sxtest/idcprod:spark [HADOOP_INNER] default_fs = hdfs://nameservice1 hadoop_config={ "dfs.nameservices": "nameservice1", "dfs.ha.namenodes.nameservice1": "nn1,nn2", "dfs.namenode.rpc-address.nameservice1.nn1": "10.150.4.50:8020", "dfs.namenode.rpc-address.nameservice1.nn2": "10.150.4.51:8020", "dfs.client.failover.proxy.provider.nameservice1": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider" } kerberos_config = { "haveKerberos": "false", "kerberosKeytabFilePath": "/workspace/confs/test/user.keytab", "kerberosPrincipal": "ailab@EMR-5XJSY31F" } [LAKE_HADOOP_INNER] default_fs = hdfs://nameservice1 hadoop_config={ "dfs.nameservices": "nameservice1", "dfs.ha.namenodes.nameservice1": "nn1,nn2", "dfs.namenode.rpc-address.nameservice1.nn1": "10.150.4.50:8020", "dfs.namenode.rpc-address.nameservice1.nn2": "10.150.4.51:8020", "dfs.client.failover.proxy.provider.nameservice1": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider" } kerberos_config = { "haveKerberos": "false", "kerberosKeytabFilePath": "/workspace/confs/test/user.keytab", "kerberosPrincipal": "ailab@EMR-5XJSY31F" } [HOST_ALIAS] enable = true host_alias = { "10.150.4.25": "YILIGKS01", "10.150.4.47": "cdhprodcm01.yili.com", "10.150.4.48": "cdhprodcm02.yili.com", "10.150.4.49": "cdhprodcm03.yili.com", "10.150.5.10": "cdhproddn01.yili.com", "10.150.5.11": "cdhproddn02.yili.com", "10.150.5.12": "cdhproddn03.yili.com", "10.150.5.13": "cdhproddn04.yili.com", "10.150.5.14": "cdhproddn05.yili.com", "10.150.5.15": "cdhproddn06.yili.com", "10.150.5.16": "cdhproddn07.yili.com", "10.150.5.17": "cdhproddn08.yili.com", "10.150.5.18": "cdhproddn09.yili.com", "10.150.5.19": "cdhproddn10.yili.com", "10.150.5.20": "cdhproddn11.yili.com", "10.150.5.21": "cdhproddn12.yili.com", "10.150.5.22": "cdhproddn13.yili.com", "10.150.5.23": "cdhproddn14.yili.com", "10.150.5.24": "cdhproddn15.yili.com", "10.150.5.25": "cdhproddn16.yili.com", "10.150.5.26": "cdhproddn17.yili.com", "10.150.5.27": "cdhproddn18.yili.com", "10.150.5.28": "cdhproddn19.yili.com", "10.150.5.29": "cdhproddn20.yili.com", "10.150.5.30": "cdhproddn21.yili.com", "10.150.5.31": "cdhproddn22.yili.com", "10.150.5.32": "cdhproddn23.yili.com", "10.150.5.33": "cdhproddn24.yili.com", "10.150.5.34": "cdhproddn25.yili.com", "10.150.5.35": "cdhproddn26.yili.com", "10.150.5.36": "cdhproddn27.yili.com", "10.150.5.37": "cdhproddn28.yili.com", "10.150.5.38": "cdhproddn29.yili.com", "10.150.5.39": "cdhproddn30.yili.com", "10.150.5.40": "cdhproddn31.yili.com", "10.150.5.41": "cdhproddn32.yili.com", "10.150.5.42": "cdhproddn33.yili.com", "10.150.5.43": "cdhproddn34.yili.com", "10.150.5.44": "cdhproddn35.yili.com", "10.150.5.45": "cdhproddn36.yili.com", "10.150.5.46": "cdhproddn37.yili.com", "10.150.5.47": "cdhproddn38.yili.com", "10.150.5.48": "cdhproddn39.yili.com", "10.150.5.49": "cdhproddn40.yili.com", "10.150.5.50": "cdhproddn41.yili.com", "10.150.5.51": "cdhproddn42.yili.com", "10.150.4.50": "cdhprodnm01.yili.com", "10.150.4.51": "cdhprodnm02.yili.com", "10.150.4.26": "etlds1.yili.com", "10.150.4.27": "etlds2.yili.com", "10.150.4.28": "etlds3.yili.com", "10.150.4.29": "etlds4.yili.com" } [PERMISSIONS] ; 超级管理员 super_admin_role = TENANT_ADMIN ; 项目管理员 project_admin_role = PROJECT_ADMIN ; 算法组项目id special_project_id = sfzxm [PROGRAMME] url = aihub-dag-helmbe:8080 namespace = ns-aihub-dag super_image = registry.cn-hangzhou.aliyuncs.com/sxtest/jupyterlab ordinary_image = registry.cn-hangzhou.aliyuncs.com/sxtest/jupyterlab0 tag = idcprod host = ailab-idc.digitalyili.com chart = aihub-dag-jupyter.tgz path_type = ImplementationSpecific ingress_class = public image_pull_secret=codingregistrykey node_selector = [CRON_CONFIG] hour_min = 4 hour_max = 22 enable = 1 [REQUIREMENTS_CONFIG] prefix = hdfs://nameservice1 path = /user/AIuser/pys [JOB_CONFIG] timeout = 5 auth_token = d0b52538-712a-11ed-8873-87519e90e821 [AI_YIQI] url = ailab-idc.digitalyili.com structuring_data_dbid = 639ae17ff57861009e2c87fd #待修改