123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122 |
- [DATABASE]
- user = aihubtest
- pwd = q9WBYDynEy@jh#5N
- db_name = aihubtest_dag_admin_db
- host = 10.254.12.7
- port = 3306
- ssl_disabled = true
- [MINIO]
- k8s_url = aihub-minio-yili-test:9000
- url = aihub-minio-yili-test:9000
- access_key = minioadmin
- secret_key = minioadmin
- [AF_BACKEND]
- uri=aihub-backend-af-yili-test:8080
- host=aihub-backend-af-yili-test
- port=8080
- dag_files_dir=/dags/
- [K8S]
- image_pull_key=codingregistrykey
- enable_kerberos=true
- [BACKEND]
- url=aihub-backend-yili-test:8080
- [AIRFLOW]
- uri=airflow-webserver:8080
- api_token=YWRtaW46YWRtaW4=
- [HIVE]
- host = 10.254.20.22
- port = 7001
- username = hive
- password = hive
- database_name = ailab
- kerberos = 1
- keytab = assets/test/user.keytab
- krb5config = assets/test/krb5.conf
- kerberos_service_name = hadoop
- principal = ailab@EMR-5XJSY31F
- base_path = /usr/hive/warehouse/ailab.db/
- [HIVE_METASTORE]
- uris=thrift://10.254.20.18:7004,thrift://10.254.20.22:7004
- [TASK_IMAGES]
- datax=yldc-docker.pkg.coding.yili.com/aiplatform/docker/aihub-datax-yili:latest
- python=yldc-docker.pkg.coding.yili.com/aiplatform/docker/aihub-minio-yili-test:python
- java=yldc-docker.pkg.coding.yili.com/aiplatform/docker/aihub-minio-yili-test:java
- sparks=yldc-docker.pkg.coding.yili.com/aiplatform/docker/aihub-minio-yili-test:spark
- [HADOOP_INNER]
- datasource = 8,10,11
- default_fs = hdfs://HDFS8000912
- hadoop_config={
- "dfs.nameservices": "HDFS8000912",
- "dfs.ha.namenodes.HDFS8000912": "nn1,nn2",
- "dfs.namenode.rpc-address.HDFS8000912.nn1": "10.254.20.18:4007",
- "dfs.namenode.rpc-address.HDFS8000912.nn2": "10.254.20.22:4007",
- "dfs.client.failover.proxy.provider.HDFS8000912": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"
- }
- kerberos_config = {
- "haveKerberos": "true",
- "kerberosKeytabFilePath": "/workspace/confs/test/user.keytab",
- "kerberosPrincipal": "ailab@EMR-5XJSY31F"
- }
- [LAKE_HADOOP_INNER]
- datasource = 8,10,11
- default_fs = hdfs://HDFS8000912
- hadoop_config={
- "dfs.nameservices": "HDFS8000912",
- "dfs.ha.namenodes.HDFS8000912": "nn1,nn2",
- "dfs.namenode.rpc-address.HDFS8000912.nn1": "10.254.20.18:4007",
- "dfs.namenode.rpc-address.HDFS8000912.nn2": "10.254.20.22:4007",
- "dfs.client.failover.proxy.provider.HDFS8000912": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"
- }
- kerberos_config = {
- "haveKerberos": "true",
- "kerberosKeytabFilePath": "/workspace/confs/test/user.keytab",
- "kerberosPrincipal": "ailab@EMR-5XJSY31F"
- }
- [HOST_ALIAS]
- enable = false
- host_alias = {}
- [PROGRAMME]
- url = aihub-helmbe-yili-test:8080
- namespace = aihub-test
- super_image = yldc-docker.pkg.coding.yili.com/aiplatform/docker/aihub-jupyter-yili-test
- ordinary_image = yldc-docker.pkg.coding.yili.com/aiplatform/docker/aihub-jupyter-yili-test
- tag = test
- host = ailab-test.digitalyili.com
- chart = aihub-dag-jupyter.tgz
- path_type = ImplementationSpecific
- [CRON_CONFIG]
- hour_min = 4
- hour_max = 22
- enable = 1
- [REQUIREMENTS_CONFIG]
- prefix = hdfs://HDFS8000912
- path = /user/sxkj/pys #待修改
- [JOB_CONFIG]
- timeout = 5
- auth_token = d0b52538-712a-11ed-8873-87519e90e821
- ; 腾讯云测试环境地址
- [AI_YIQI]
- url = ailab-test.digitalyili.com
|