production.ini 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119
  1. [DATABASE]
  2. user = aihubtest
  3. pwd = q9WBYDynEy@jh#5N
  4. db_name = aihubtest_dag_admin_db
  5. host = 10.254.12.7
  6. port = 3306
  7. ssl_disabled = true
  8. [MINIO]
  9. k8s_url = aihub-minio-yili-test:9000
  10. url = aihub-minio-yili-test:9000
  11. access_key = minioadmin
  12. secret_key = minioadmin
  13. [AF_BACKEND]
  14. uri=aihub-backend-af-yili-test:8080
  15. host=aihub-backend-af-yili-test
  16. port=8080
  17. dag_files_dir=/dags/
  18. [K8S]
  19. image_pull_key=codingregistrykey
  20. enable_kerberos=true
  21. [BACKEND]
  22. url=aihub-backend-yili-test:8080
  23. [AIRFLOW]
  24. uri=airflow-webserver:8080
  25. api_token=YWRtaW46YWRtaW4=
  26. [HIVE]
  27. host = 10.254.20.22
  28. port = 7001
  29. username = hive
  30. password = hive
  31. database_name = ailab
  32. kerberos = 1
  33. keytab = assets/test/user.keytab
  34. krb5config = assets/test/krb5.conf
  35. kerberos_service_name = hadoop
  36. principal = ailab@EMR-5XJSY31F
  37. base_path = /usr/hive/warehouse/ailab.db/
  38. [HIVE_METASTORE]
  39. uris=thrift://10.254.20.18:7004,thrift://10.254.20.22:7004
  40. [TASK_IMAGES]
  41. datax=yldc-docker.pkg.coding.yili.com/aiplatform/docker/aihub-datax-yili:latest
  42. python=yldc-docker.pkg.coding.yili.com/aiplatform/docker/aihub-minio-yili-test:python
  43. java=yldc-docker.pkg.coding.yili.com/aiplatform/docker/aihub-minio-yili-test:java
  44. sparks=yldc-docker.pkg.coding.yili.com/aiplatform/docker/aihub-minio-yili-test:spark
  45. [HADOOP_INNER]
  46. datasource = 8,10,11
  47. default_fs = hdfs://HDFS8000912
  48. hadoop_config={
  49. "dfs.nameservices": "HDFS8000912",
  50. "dfs.ha.namenodes.HDFS8000912": "nn1,nn2",
  51. "dfs.namenode.rpc-address.HDFS8000912.nn1": "10.254.20.18:4007",
  52. "dfs.namenode.rpc-address.HDFS8000912.nn2": "10.254.20.22:4007",
  53. "dfs.client.failover.proxy.provider.HDFS8000912": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"
  54. }
  55. kerberos_config = {
  56. "haveKerberos": "true",
  57. "kerberosKeytabFilePath": "/workspace/confs/test/user.keytab",
  58. "kerberosPrincipal": "ailab@EMR-5XJSY31F"
  59. }
  60. [LAKE_HADOOP_INNER]
  61. datasource = 8,10,11
  62. default_fs = hdfs://HDFS8000912
  63. hadoop_config={
  64. "dfs.nameservices": "HDFS8000912",
  65. "dfs.ha.namenodes.HDFS8000912": "nn1,nn2",
  66. "dfs.namenode.rpc-address.HDFS8000912.nn1": "10.254.20.18:4007",
  67. "dfs.namenode.rpc-address.HDFS8000912.nn2": "10.254.20.22:4007",
  68. "dfs.client.failover.proxy.provider.HDFS8000912": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"
  69. }
  70. kerberos_config = {
  71. "haveKerberos": "true",
  72. "kerberosKeytabFilePath": "/workspace/confs/test/user.keytab",
  73. "kerberosPrincipal": "ailab@EMR-5XJSY31F"
  74. }
  75. [HOST_ALIAS]
  76. enable = false
  77. host_alias = {}
  78. [PROGRAMME]
  79. url = aihub-helmbe-yili-test:8080
  80. namespace = aihub-test
  81. super_image = registry.cn-hangzhou.aliyuncs.com/sxtest/jupyterlab
  82. ordinary_image = registry.cn-hangzhou.aliyuncs.com/sxtest/jupyterlab
  83. tag = txtest
  84. host = aihub-dag-test.digitalyili.com
  85. chart = aihub-dag-jupyter.tgz
  86. path_type = ImplementationSpecific
  87. ingress_class = public
  88. [CRON_CONFIG]
  89. hour_min = 4
  90. hour_max = 22
  91. enable = 1
  92. [REQUIREMENTS_CONFIG]
  93. prefix = hdfs://HDFS8000912
  94. path = /user/sxkj/pys #待修改
  95. [JOB_CONFIG]
  96. timeout = 5
  97. auth_token = d0b52538-712a-11ed-8873-87519e90e821