datax.py 5.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150
  1. import time
  2. from app import crud, models
  3. from app.utils.send_util import *
  4. from app.utils.utils import get_cmd_parameter
  5. from sqlalchemy.orm import Session
  6. def datax_create_job(job_info: models.JobInfo, db: Session):
  7. af_task = datax_create_task(job_info)
  8. cron: str = job_info.job_cron
  9. cron = cron.replace('?','*')
  10. af_job = {
  11. "tasks": [af_task],
  12. "name": job_info.job_desc,
  13. "dependence": [],
  14. "cron": cron,
  15. "desc": job_info.job_desc,
  16. "route_strategy": job_info.executor_route_strategy,
  17. "block_strategy": job_info.executor_block_strategy,
  18. "executor_timeout": job_info.executor_timeout,
  19. "executor_fail_retry_count": job_info.executor_fail_retry_count,
  20. "trigger_status": job_info.trigger_status,
  21. "job_mode":1,
  22. "job_type": 0,
  23. "user_id": 0,
  24. }
  25. res = send_post('/af/af_job', af_job)
  26. af_job = res['data']
  27. send_submit(af_job['id'])
  28. return af_job
  29. def datax_create_task(job_info: models.JobInfo):
  30. cmd_parameter = get_cmd_parameter(job_info.jvm_param)
  31. partition_list = []
  32. if job_info.partition_info is not None and job_info.partition_info != '':
  33. partition_list = job_info.partition_info.split(',')
  34. envs = {}
  35. first_begin_time = int(time.time())
  36. if job_info.inc_start_time is not None and job_info.inc_start_time != '':
  37. first_begin_time = job_info.inc_start_time
  38. last_key = 'lastTime'
  39. if job_info.last_time is not None and job_info.last_time != '':
  40. last_key = job_info.last_time
  41. current_key = 'currentTime'
  42. if job_info.current_time is not None and job_info.current_time != '':
  43. current_key = job_info.current_time
  44. envs = {
  45. "first_begin_time": first_begin_time,
  46. "last_key": last_key,
  47. "current_key": current_key,
  48. "partition_key": "partition",
  49. "partition_word": partition_list[0] if len(partition_list) > 0 else '',
  50. "partition_format": partition_list[2] if len(partition_list) > 0 else '',
  51. "partition_diff": partition_list[1] if len(partition_list) > 0 else ''
  52. }
  53. af_task = {
  54. "name": job_info.job_desc,
  55. "file_urls": [],
  56. "script": job_info.job_json,
  57. "cmd": "",
  58. "cmd_parameters": cmd_parameter,
  59. "envs": envs,
  60. "run_image": "",
  61. "task_type": "datax",
  62. "user_id": 0,
  63. }
  64. res = send_post('/af/af_task', af_task)
  65. af_task = res['data']
  66. return af_task
  67. def datax_update_job(job_info: models.JobInfo, db: Session):
  68. relation = crud.get_af_id(db, job_info.id, 'datax')
  69. af_job_id = relation.af_id
  70. res = send_get("/af/af_job/getOnce",af_job_id)
  71. old_af_job = res['data']
  72. old_af_task = old_af_job['tasks'][0]
  73. af_task = datax_put_task(job_info,old_af_task)
  74. cron: str = job_info.job_cron
  75. cron = cron.replace('?','*')
  76. af_job = {
  77. "tasks": [af_task],
  78. "name": job_info.job_desc,
  79. "dependence": [],
  80. "cron": cron,
  81. "desc": job_info.job_desc,
  82. "route_strategy": job_info.executor_route_strategy,
  83. "block_strategy": job_info.executor_block_strategy,
  84. "executor_timeout": job_info.executor_timeout,
  85. "executor_fail_retry_count": job_info.executor_fail_retry_count,
  86. "trigger_status": job_info.trigger_status,
  87. }
  88. res = send_put('/af/af_job', old_af_job['id'], af_job)
  89. af_job = res['data']
  90. send_submit(af_job['id'])
  91. return af_job
  92. def datax_put_task(job_info: models.JobInfo,old_af_task):
  93. cmd_parameter = get_cmd_parameter(job_info.jvm_param)
  94. partition_list = []
  95. if job_info.partition_info is not None and job_info.partition_info != '':
  96. partition_list = job_info.partition_info.split(',')
  97. envs = {}
  98. if job_info.inc_start_time and job_info.last_time and len(partition_list) > 0 and job_info.current_time:
  99. envs = {
  100. "first_begin_time": job_info.inc_start_time,
  101. "last_key": job_info.last_time,
  102. "current_key": job_info.current_time,
  103. "partition_key": "partition",
  104. "partition_word": partition_list[0] if len(partition_list) > 0 else '',
  105. "partition_format": partition_list[2] if len(partition_list) > 0 else '',
  106. "partition_diff": partition_list[1] if len(partition_list) > 0 else ''
  107. }
  108. af_task = {
  109. "name": job_info.job_desc,
  110. "file_urls": [],
  111. "script": job_info.job_json,
  112. "cmd": "",
  113. "cmd_parameters": cmd_parameter,
  114. "envs": envs,
  115. "run_image": "",
  116. }
  117. res = send_put('/af/af_task', old_af_task['id'],af_task)
  118. af_task = res['data']
  119. return af_task
  120. def on_off_control(af_job_id: int,status: int):
  121. for i in range(0,11):
  122. parsed_res = get_job_last_parsed_time(af_job_id)
  123. last_parsed_time = parsed_res['data']['last_parsed_time']
  124. if last_parsed_time:
  125. send_pause(af_job_id,status)
  126. print(f"{af_job_id}<==状态修改成功==>{last_parsed_time}")
  127. break
  128. if i >= 10:
  129. raise Exception(f"{af_job_id}==>状态修改失败")
  130. time.sleep(2)
  131. def execute_job(af_job_id: int):
  132. res = get_job_last_parsed_time(af_job_id)
  133. current_time = res['data']['last_parsed_time'] if 'last_parsed_time' in res['data'].keys() else None
  134. send_submit(af_job_id)
  135. for i in range(0,21):
  136. parsed_res = get_job_last_parsed_time(af_job_id)
  137. last_parsed_time = parsed_res['data']['last_parsed_time']
  138. if last_parsed_time and last_parsed_time != current_time:
  139. res = send_execute(af_job_id)
  140. print(f"{af_job_id}<==任务执行成功==>{last_parsed_time}")
  141. return res
  142. if i >= 20:
  143. raise Exception(f"{af_job_id}==>文件正在转化中")
  144. time.sleep(2)