txprod.ini 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127
  1. [DATABASE]
  2. user = aihub_prod
  3. pwd = RttN4RbjQBfv
  4. db_name = aihub-dag-prod
  5. host = 172.23.12.194
  6. port = 3306
  7. ssl_disabled = true
  8. [MINIO]
  9. k8s_url = aihub-dag-minio:9000
  10. url = aihub-dag-minio:9000
  11. access_key = minioadmin
  12. secret_key = minioadmin
  13. [AF_BACKEND]
  14. uri=aihub-dag-backend-af:8080
  15. host=aihub-dag-backend-af
  16. port=8080
  17. dag_files_dir=/dags/
  18. [K8S]
  19. image_pull_key=codingregistrykey
  20. enable_kerberos=true
  21. [BACKEND]
  22. url=aihub-dag-backend:8080
  23. [AIRFLOW]
  24. uri=airflow-webserver:8080
  25. api_token=YWRtaW46YWRtaW4=
  26. [HIVE]
  27. host = 172.23.21.7
  28. port = 7001
  29. username = hive
  30. password = hive
  31. database_name = ailab
  32. kerberos = 1
  33. keytab = assets/txprod/user.keytab
  34. krb5config = assets/txprod/krb5.conf
  35. kerberos_service_name = hadoop
  36. principal = ylaiuser@EMR-56L6ZNTS
  37. base_path = /user/hive/warehouse/ailab.db/
  38. [HIVE_METASTORE]
  39. uris=thrift://172.23.21.7:7004,thrift://172.23.21.8:7004
  40. [TASK_IMAGES]
  41. datax=registry.cn-hangzhou.aliyuncs.com/sxtest/txprod:datax
  42. python=registry.cn-hangzhou.aliyuncs.com/sxtest/txprod:python
  43. java=registry.cn-hangzhou.aliyuncs.com/sxtest/txprod:java
  44. sparks=registry.cn-hangzhou.aliyuncs.com/sxtest/txprod:spark
  45. [HADOOP_INNER]
  46. default_fs = hdfs://HDFS84854
  47. hadoop_config={
  48. "dfs.nameservices": "HDFS84854",
  49. "dfs.ha.namenodes.HDFS84854": "nn1,nn2",
  50. "dfs.namenode.rpc-address.HDFS84854.nn1": "172.23.21.7:4007",
  51. "dfs.namenode.rpc-address.HDFS84854.nn2": "172.23.21.8:4007",
  52. "dfs.client.failover.proxy.provider.HDFS84854": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"
  53. }
  54. kerberos_config = {
  55. "haveKerberos": "true",
  56. "kerberosKeytabFilePath": "/workspace/confs/user.keytab",
  57. "kerberosPrincipal": "ylaiuser@EMR-56L6ZNTS"
  58. }
  59. [LAKE_HADOOP_INNER]
  60. default_fs = hdfs://HDFS84854
  61. hadoop_config={
  62. "dfs.nameservices": "HDFS84854",
  63. "dfs.ha.namenodes.HDFS84854": "nn1,nn2",
  64. "dfs.namenode.rpc-address.HDFS84854.nn1": "172.23.21.7:4007",
  65. "dfs.namenode.rpc-address.HDFS84854.nn2": "172.23.21.8:4007",
  66. "dfs.client.failover.proxy.provider.HDFS84854": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"
  67. }
  68. kerberos_config = {
  69. "haveKerberos": "true",
  70. "kerberosKeytabFilePath": "/workspace/confs/user.keytab",
  71. "kerberosPrincipal": "ylaiuser@EMR-56L6ZNTS"
  72. }
  73. [HOST_ALIAS]
  74. enable = false
  75. host_alias = {}
  76. [PERMISSIONS]
  77. ; 超级管理员
  78. super_admin_role = TENANT_ADMIN
  79. ; 项目管理员
  80. project_admin_role = PROJECT_ADMIN
  81. ; 算法组项目id
  82. special_project_id = sfzxm
  83. [PROGRAMME]
  84. url = aihub-dag-helmbe:8080
  85. namespace = aihub-prod
  86. super_image = registry.cn-hangzhou.aliyuncs.com/sxtest/jupyterlab
  87. ordinary_image = registry.cn-hangzhou.aliyuncs.com/sxtest/jupyterlab
  88. tag = txprod
  89. host = ailab.digitalyili.com
  90. chart = aihub-dag-jupyter.tgz
  91. path_type = ImplementationSpecific
  92. ingress_class = prod-pri-ingress
  93. image_pull_secret=codingregistrykey
  94. node_selector =
  95. [CRON_CONFIG]
  96. hour_min = 4
  97. hour_max = 22
  98. enable = 1
  99. [REQUIREMENTS_CONFIG]
  100. prefix = hdfs://HDFS84854
  101. path = /user/ylaiuser/pys
  102. [JOB_CONFIG]
  103. timeout = 5
  104. auth_token = d0b52538-712a-11ed-8873-87519e90e821