123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130 |
- [DATABASE]
- user = aihubtest
- pwd = q9WBYDynEy@jh#5N
- db_name = aihubtest_dag_admin_db
- host = 10.254.12.7
- port = 3306
- ssl_disabled = true
- [MINIO]
- k8s_url = aihub-dag-minio:9000
- url = aihub-dag-minio:9000
- access_key = minioadmin
- secret_key = minioadmin
- [AF_BACKEND]
- uri=aihub-dag-backend-af:8080
- host=aihub-dag-backend-af
- port=8080
- dag_files_dir=/dags/
- [K8S]
- image_pull_key=codingregistrykey
- enable_kerberos=true
- [BACKEND]
- url=aihub-dag-backend:8080
- [AIRFLOW]
- uri=airflow-webserver:8080
- api_token=YWRtaW46YWRtaW4=
- [HIVE]
- host = 10.254.20.22
- port = 7001
- username = hive
- password = hive
- database_name = ailab
- kerberos = 1
- keytab = assets/test/user.keytab
- krb5config = assets/test/krb5.conf
- kerberos_service_name = hadoop
- principal = ailab@EMR-5XJSY31F
- base_path = /usr/hive/warehouse/ailab.db/
- zookeeper_enable=1
- zookeeper_hosts = 10.254.20.23:2181,10.254.20.26:2181,10.254.20.29:2181
- zookeeper_namespace = hiveserver2
- [HIVE_METASTORE]
- uris=thrift://10.254.20.18:7004,thrift://10.254.20.22:7004
- [TASK_IMAGES]
- datax=yldc-docker.pkg.coding.yili.com/aiplatform/docker/aihub-datax-yili:latest
- python=yldc-docker.pkg.coding.yili.com/aiplatform/docker/aihub-minio-yili-test:python
- java=yldc-docker.pkg.coding.yili.com/aiplatform/docker/aihub-minio-yili-test:java
- sparks=yldc-docker.pkg.coding.yili.com/aiplatform/docker/aihub-minio-yili-test:spark
- [HADOOP_INNER]
- default_fs = hdfs://HDFS8000912
- hadoop_config={
- "dfs.nameservices": "HDFS8000912",
- "dfs.ha.namenodes.HDFS8000912": "nn1,nn2",
- "dfs.namenode.rpc-address.HDFS8000912.nn1": "10.254.20.18:4007",
- "dfs.namenode.rpc-address.HDFS8000912.nn2": "10.254.20.22:4007",
- "dfs.client.failover.proxy.provider.HDFS8000912": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"
- }
- kerberos_config = {
- "haveKerberos": "true",
- "kerberosKeytabFilePath": "/workspace/confs/test/user.keytab",
- "kerberosPrincipal": "ailab@EMR-5XJSY31F"
- }
- [LAKE_HADOOP_INNER]
- default_fs = hdfs://HDFS8000912
- hadoop_config={
- "dfs.nameservices": "HDFS8000912",
- "dfs.ha.namenodes.HDFS8000912": "nn1,nn2",
- "dfs.namenode.rpc-address.HDFS8000912.nn1": "10.254.20.18:4007",
- "dfs.namenode.rpc-address.HDFS8000912.nn2": "10.254.20.22:4007",
- "dfs.client.failover.proxy.provider.HDFS8000912": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"
- }
- kerberos_config = {
- "haveKerberos": "true",
- "kerberosKeytabFilePath": "/workspace/confs/test/user.keytab",
- "kerberosPrincipal": "ailab@EMR-5XJSY31F"
- }
- [HOST_ALIAS]
- enable = false
- host_alias = {}
- [PERMISSIONS]
- ; 超级管理员
- super_admin_role = TENANT_ADMIN
- ; 项目管理员
- project_admin_role = PROJECT_ADMIN
- ; 算法组项目id
- special_project_id = sfz
- [PROGRAMME]
- url = aihub-dag-helmbe:8080
- namespace = aihub-test
- super_image = registry.cn-hangzhou.aliyuncs.com/sxtest/jupyterlab
- ordinary_image = registry.cn-hangzhou.aliyuncs.com/sxtest/jupyterlab
- tag = txtest
- host = ailab-test.digitalyili.com
- chart = aihub-dag-jupyter.tgz
- path_type = ImplementationSpecific
- ingress_class = test-pri-ingress
- image_pull_secret=codingregistrykey
- node_selector =
- [CRON_CONFIG]
- hour_min = 4
- hour_max = 22
- enable = 1
- [REQUIREMENTS_CONFIG]
- prefix = hdfs://HDFS8000912
- path = /user/ailab/pys
- [JOB_CONFIG]
- timeout = 5
- auth_token = d0b52538-712a-11ed-8873-87519e90e821
|