datax.py 5.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139
  1. import time
  2. from app import crud, models
  3. from app.utils.send_util import *
  4. from app.utils.utils import get_cmd_parameter
  5. from sqlalchemy.orm import Session
  6. def datax_create_job(job_info: models.JobInfo, db: Session):
  7. af_task = datax_create_task(job_info)
  8. cron: str = job_info.job_cron
  9. cron.replace('?','*')
  10. af_job = {
  11. "tasks": [af_task],
  12. "name": job_info.job_desc,
  13. "dependence": [],
  14. "cron": cron,
  15. "desc": job_info.job_desc,
  16. "route_strategy": job_info.executor_route_strategy,
  17. "block_strategy": job_info.executor_block_strategy,
  18. "executor_timeout": job_info.executor_timeout,
  19. "executor_fail_retry_count": job_info.executor_fail_retry_count,
  20. "trigger_status": job_info.trigger_status,
  21. "job_mode":1,
  22. "job_type": 0,
  23. "user_id": 0,
  24. }
  25. res = send_post('/af/af_job', af_job)
  26. af_job = res['data']
  27. crud.create_relation(db, job_info.id,'datax', af_job['id'])
  28. send_submit(af_job['id'])
  29. on_off_control(af_job['id'], job_info.trigger_status)
  30. def datax_create_task(job_info: models.JobInfo):
  31. cmd_parameter = get_cmd_parameter(job_info.jvm_param)
  32. partition_list = []
  33. if job_info.partition_info is not None and job_info.partition_info != '':
  34. partition_list = job_info.partition_info.split(',')
  35. envs = {}
  36. if job_info.inc_start_time and job_info.last_time and len(partition_list) > 0 and job_info.current_time:
  37. envs = {
  38. "first_begin_time": job_info.inc_start_time,
  39. "last_key": job_info.last_time,
  40. "current_key": job_info.current_time,
  41. "partition_key": "partition",
  42. "partition_word": partition_list[0] if len(partition_list) > 0 else '',
  43. "partition_format": partition_list[2] if len(partition_list) > 0 else '',
  44. "partition_diff": partition_list[1] if len(partition_list) > 0 else ''
  45. }
  46. af_task = {
  47. "name": job_info.job_desc,
  48. "file_urls": [],
  49. "script": job_info.job_json,
  50. "cmd": "",
  51. "cmd_parameters": cmd_parameter,
  52. "envs": envs,
  53. "run_image": "",
  54. "task_type": "datax",
  55. "user_id": 0,
  56. }
  57. res = send_post('/af/af_task', af_task)
  58. af_task = res['data']
  59. return af_task
  60. def datax_update_job(job_info: models.JobInfo, db: Session):
  61. relation = crud.get_af_id(db, job_info.id, 'datax')
  62. af_job_id = relation.af_id
  63. res = send_get("/af/af_job/getOnce",af_job_id)
  64. old_af_job = res['data']
  65. old_af_task = old_af_job['tasks'][0]
  66. af_task = datax_put_task(job_info,old_af_task)
  67. cron: str = job_info.job_cron
  68. cron.replace('?','*')
  69. af_job = {
  70. "tasks": [af_task],
  71. "name": job_info.job_desc,
  72. "dependence": [],
  73. "cron": cron,
  74. "desc": job_info.job_desc,
  75. "route_strategy": job_info.executor_route_strategy,
  76. "block_strategy": job_info.executor_block_strategy,
  77. "executor_timeout": job_info.executor_timeout,
  78. "executor_fail_retry_count": job_info.executor_fail_retry_count,
  79. "trigger_status": job_info.trigger_status,
  80. }
  81. res = send_put('/af/af_job', old_af_job['id'], af_job)
  82. af_job = res['data']
  83. send_submit(af_job['id'])
  84. on_off_control(af_job['id'], job_info.trigger_status)
  85. def datax_put_task(job_info: models.JobInfo,old_af_task):
  86. cmd_parameter = get_cmd_parameter(job_info.jvm_param)
  87. partition_list = []
  88. if job_info.partition_info is not None and job_info.partition_info != '':
  89. partition_list = job_info.partition_info.split(',')
  90. envs = {}
  91. if job_info.inc_start_time and job_info.last_time and len(partition_list) > 0 and job_info.current_time:
  92. envs = {
  93. "first_begin_time": job_info.inc_start_time,
  94. "last_key": job_info.last_time,
  95. "current_key": job_info.current_time,
  96. "partition_key": "partition",
  97. "partition_word": partition_list[0] if len(partition_list) > 0 else '',
  98. "partition_format": partition_list[2] if len(partition_list) > 0 else '',
  99. "partition_diff": partition_list[1] if len(partition_list) > 0 else ''
  100. }
  101. af_task = {
  102. "name": job_info.job_desc,
  103. "file_urls": [],
  104. "script": job_info.job_json,
  105. "cmd": "",
  106. "cmd_parameters": cmd_parameter,
  107. "envs": envs,
  108. "run_image": "",
  109. }
  110. res = send_put('/af/af_task', old_af_task['id'],af_task)
  111. af_task = res['data']
  112. return af_task
  113. def datax_job_submit(job_info: models.JobInfo, db: Session):
  114. relation = crud.get_af_id(db, job_info.id, 'datax')
  115. if not relation:
  116. datax_create_job(job_info,db)
  117. else:
  118. datax_update_job(job_info,db)
  119. def on_off_control(af_job_id: int,status: int):
  120. for i in range(0,11):
  121. parsed_res = get_job_last_parsed_time(af_job_id)
  122. last_parsed_time = parsed_res['data']['last_parsed_time']
  123. if last_parsed_time:
  124. send_pause(af_job_id,status)
  125. print(f"{af_job_id}<==状态修改成功==>{last_parsed_time}")
  126. break
  127. if i >= 10:
  128. raise Exception(f"{af_job_id}==>状态修改失败")
  129. time.sleep(2)