datax.py 5.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141
  1. import time
  2. from app import crud, models
  3. from app.utils.send_util import *
  4. from app.utils.utils import get_cmd_parameter
  5. from sqlalchemy.orm import Session
  6. def datax_create_job(job_info: models.JobInfo, db: Session):
  7. af_task = datax_create_task(job_info)
  8. cron: str = job_info.job_cron
  9. cron.replace('?','*')
  10. af_job = {
  11. "tasks": [af_task],
  12. "name": job_info.job_desc,
  13. "dependence": [],
  14. "cron": cron,
  15. "desc": job_info.job_desc,
  16. "route_strategy": job_info.executor_route_strategy,
  17. "block_strategy": job_info.executor_block_strategy,
  18. "executor_timeout": job_info.executor_timeout,
  19. "executor_fail_retry_count": job_info.executor_fail_retry_count,
  20. "trigger_status": job_info.trigger_status,
  21. "job_mode":1,
  22. "job_type": 0,
  23. "user_id": 0,
  24. }
  25. res = send_post('/af/af_job', af_job)
  26. af_job = res['data']
  27. send_submit(af_job['id'])
  28. return af_job
  29. def datax_create_task(job_info: models.JobInfo):
  30. cmd_parameter = get_cmd_parameter(job_info.jvm_param)
  31. partition_list = []
  32. if job_info.partition_info is not None and job_info.partition_info != '':
  33. partition_list = job_info.partition_info.split(',')
  34. envs = {}
  35. if job_info.inc_start_time and job_info.last_time and len(partition_list) > 0 and job_info.current_time:
  36. envs = {
  37. "first_begin_time": job_info.inc_start_time,
  38. "last_key": job_info.last_time,
  39. "current_key": job_info.current_time,
  40. "partition_key": "partition",
  41. "partition_word": partition_list[0] if len(partition_list) > 0 else '',
  42. "partition_format": partition_list[2] if len(partition_list) > 0 else '',
  43. "partition_diff": partition_list[1] if len(partition_list) > 0 else ''
  44. }
  45. af_task = {
  46. "name": job_info.job_desc,
  47. "file_urls": [],
  48. "script": job_info.job_json,
  49. "cmd": "",
  50. "cmd_parameters": cmd_parameter,
  51. "envs": envs,
  52. "run_image": "",
  53. "task_type": "datax",
  54. "user_id": 0,
  55. }
  56. res = send_post('/af/af_task', af_task)
  57. af_task = res['data']
  58. return af_task
  59. def datax_update_job(job_info: models.JobInfo, db: Session):
  60. relation = crud.get_af_id(db, job_info.id, 'datax')
  61. af_job_id = relation.af_id
  62. res = send_get("/af/af_job/getOnce",af_job_id)
  63. old_af_job = res['data']
  64. old_af_task = old_af_job['tasks'][0]
  65. af_task = datax_put_task(job_info,old_af_task)
  66. cron: str = job_info.job_cron
  67. cron.replace('?','*')
  68. af_job = {
  69. "tasks": [af_task],
  70. "name": job_info.job_desc,
  71. "dependence": [],
  72. "cron": cron,
  73. "desc": job_info.job_desc,
  74. "route_strategy": job_info.executor_route_strategy,
  75. "block_strategy": job_info.executor_block_strategy,
  76. "executor_timeout": job_info.executor_timeout,
  77. "executor_fail_retry_count": job_info.executor_fail_retry_count,
  78. "trigger_status": job_info.trigger_status,
  79. }
  80. res = send_put('/af/af_job', old_af_job['id'], af_job)
  81. af_job = res['data']
  82. send_submit(af_job['id'])
  83. return af_job
  84. def datax_put_task(job_info: models.JobInfo,old_af_task):
  85. cmd_parameter = get_cmd_parameter(job_info.jvm_param)
  86. partition_list = []
  87. if job_info.partition_info is not None and job_info.partition_info != '':
  88. partition_list = job_info.partition_info.split(',')
  89. envs = {}
  90. if job_info.inc_start_time and job_info.last_time and len(partition_list) > 0 and job_info.current_time:
  91. envs = {
  92. "first_begin_time": job_info.inc_start_time,
  93. "last_key": job_info.last_time,
  94. "current_key": job_info.current_time,
  95. "partition_key": "partition",
  96. "partition_word": partition_list[0] if len(partition_list) > 0 else '',
  97. "partition_format": partition_list[2] if len(partition_list) > 0 else '',
  98. "partition_diff": partition_list[1] if len(partition_list) > 0 else ''
  99. }
  100. af_task = {
  101. "name": job_info.job_desc,
  102. "file_urls": [],
  103. "script": job_info.job_json,
  104. "cmd": "",
  105. "cmd_parameters": cmd_parameter,
  106. "envs": envs,
  107. "run_image": "",
  108. }
  109. res = send_put('/af/af_task', old_af_task['id'],af_task)
  110. af_task = res['data']
  111. return af_task
  112. def on_off_control(af_job_id: int,status: int):
  113. for i in range(0,11):
  114. parsed_res = get_job_last_parsed_time(af_job_id)
  115. last_parsed_time = parsed_res['data']['last_parsed_time']
  116. if last_parsed_time:
  117. send_pause(af_job_id,status)
  118. print(f"{af_job_id}<==状态修改成功==>{last_parsed_time}")
  119. break
  120. if i >= 10:
  121. raise Exception(f"{af_job_id}==>状态修改失败")
  122. time.sleep(2)
  123. def execute_job(af_job_id: int):
  124. current_time = int(time.time())
  125. send_submit(af_job_id)
  126. for i in range(0,21):
  127. parsed_res = get_job_last_parsed_time(af_job_id)
  128. last_parsed_time = parsed_res['data']['last_parsed_time']
  129. if last_parsed_time and int(last_parsed_time) > current_time:
  130. res = send_execute(af_job_id)
  131. print(f"{af_job_id}<==任务执行成功==>{last_parsed_time}")
  132. return res
  133. if i >= 20:
  134. raise Exception(f"{af_job_id}==>文件正在转化中")
  135. time.sleep(2)