txtest.ini 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120
  1. [DATABASE]
  2. user = aihubtest
  3. pwd = q9WBYDynEy@jh#5N
  4. db_name = aihubtest_dag_admin_db
  5. host = 10.254.12.7
  6. port = 3306
  7. ssl_disabled = true
  8. [MINIO]
  9. k8s_url = aihub-dag-minio:9000
  10. url = aihub-dag-minio:9000
  11. access_key = minioadmin
  12. secret_key = minioadmin
  13. [AF_BACKEND]
  14. uri=aihub-dag-backend-af:8080
  15. host=aihub-dag-backend-af
  16. port=8080
  17. dag_files_dir=/dags/
  18. [K8S]
  19. image_pull_key=codingregistrykey
  20. enable_kerberos=true
  21. [BACKEND]
  22. url=aihub-dag-backend:8080
  23. [AIRFLOW]
  24. uri=airflow-webserver:8080
  25. api_token=YWRtaW46YWRtaW4=
  26. [HIVE]
  27. host = 10.254.20.22
  28. port = 7001
  29. username = hive
  30. password = hive
  31. database_name = ailab
  32. kerberos = 1
  33. keytab = assets/test/user.keytab
  34. krb5config = assets/test/krb5.conf
  35. kerberos_service_name = hadoop
  36. principal = ailab@EMR-5XJSY31F
  37. base_path = /usr/hive/warehouse/ailab.db/
  38. zookeeper_enable=1
  39. zookeeper_hosts = 10.254.20.23:2181,10.254.20.26:2181,10.254.20.29:2181
  40. zookeeper_namespace = hiveserver2
  41. [HIVE_METASTORE]
  42. uris=thrift://10.254.20.18:7004,thrift://10.254.20.22:7004
  43. [TASK_IMAGES]
  44. datax=yldc-docker.pkg.coding.yili.com/aiplatform/docker/aihub-datax-yili:latest
  45. python=yldc-docker.pkg.coding.yili.com/aiplatform/docker/aihub-minio-yili-test:python
  46. java=yldc-docker.pkg.coding.yili.com/aiplatform/docker/aihub-minio-yili-test:java
  47. sparks=yldc-docker.pkg.coding.yili.com/aiplatform/docker/aihub-minio-yili-test:spark
  48. [HADOOP_INNER]
  49. default_fs = hdfs://HDFS8000912
  50. hadoop_config={
  51. "dfs.nameservices": "HDFS8000912",
  52. "dfs.ha.namenodes.HDFS8000912": "nn1,nn2",
  53. "dfs.namenode.rpc-address.HDFS8000912.nn1": "10.254.20.18:4007",
  54. "dfs.namenode.rpc-address.HDFS8000912.nn2": "10.254.20.22:4007",
  55. "dfs.client.failover.proxy.provider.HDFS8000912": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"
  56. }
  57. kerberos_config = {
  58. "haveKerberos": "true",
  59. "kerberosKeytabFilePath": "/workspace/confs/test/user.keytab",
  60. "kerberosPrincipal": "ailab@EMR-5XJSY31F"
  61. }
  62. [LAKE_HADOOP_INNER]
  63. default_fs = hdfs://HDFS8000912
  64. hadoop_config={
  65. "dfs.nameservices": "HDFS8000912",
  66. "dfs.ha.namenodes.HDFS8000912": "nn1,nn2",
  67. "dfs.namenode.rpc-address.HDFS8000912.nn1": "10.254.20.18:4007",
  68. "dfs.namenode.rpc-address.HDFS8000912.nn2": "10.254.20.22:4007",
  69. "dfs.client.failover.proxy.provider.HDFS8000912": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"
  70. }
  71. kerberos_config = {
  72. "haveKerberos": "true",
  73. "kerberosKeytabFilePath": "/workspace/confs/test/user.keytab",
  74. "kerberosPrincipal": "ailab@EMR-5XJSY31F"
  75. }
  76. [HOST_ALIAS]
  77. enable = false
  78. host_alias = {}
  79. [PROGRAMME]
  80. url = aihub-dag-helmbe:8080
  81. namespace = aihub-test
  82. super_image = registry.cn-hangzhou.aliyuncs.com/sxtest/jupyterlab
  83. ordinary_image = registry.cn-hangzhou.aliyuncs.com/sxtest/jupyterlab
  84. tag = txtest
  85. host = aihub-dag-test.digitalyili.com
  86. chart = aihub-dag-jupyter.tgz
  87. path_type = ImplementationSpecific
  88. ingress_class = test-pri-ingress
  89. [CRON_CONFIG]
  90. hour_min = 4
  91. hour_max = 22
  92. enable = 1
  93. [REQUIREMENTS_CONFIG]
  94. prefix = hdfs://HDFS8000912
  95. path = /user/ailab/pys
  96. [JOB_CONFIG]
  97. timeout = 5
  98. auth_token = d0b52538-712a-11ed-8873-87519e90e821