dag.py 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475
  1. import json
  2. from sqlalchemy.orm import Session
  3. from fastapi import Depends
  4. from fastapi import APIRouter
  5. from app.services.dag import dag_job_submit, get_tmp_table_name
  6. from app import crud, models, schemas
  7. from app.utils.send_util import get_job_run_status, get_task_log
  8. from constants.constants import RUN_STATUS
  9. from utils.sx_time import sxtimeit
  10. from utils.sx_web import web_try
  11. from app.common.hive import hiveDs
  12. from app import get_db
  13. router = APIRouter(
  14. prefix="/jpt/dag",
  15. tags=["dag-dag管理"],
  16. )
  17. @router.post("/execute")
  18. @web_try()
  19. @sxtimeit
  20. def execute_dag(dag: schemas.Dag,db: Session = Depends(get_db)):
  21. af_job = dag_job_submit(dag.dag_uuid, dag.dag_script,db)
  22. return af_job
  23. @router.get("/debug_status")
  24. @web_try()
  25. @sxtimeit
  26. def get_dag_debug_status(dag_uuid: str, db: Session = Depends(get_db)):
  27. relation = crud.get_dag_af_id(db,dag_uuid, 'debug')
  28. af_job_run = crud.get_airflow_run_once_debug_mode(db,relation.af_id)
  29. job_run_res = get_job_run_status(af_job_run.id)
  30. job_run_status = job_run_res['data']['status']
  31. af_job = crud.get_airflow_job_once(db, af_job_run.job_id)
  32. task = list(af_job.tasks)[0] if len(list(af_job.tasks))>0 else None
  33. nodes = {}
  34. if task:
  35. task_script = json.loads(task['script'])
  36. for node in task_script['sub_nodes']:
  37. task_id = str(task['id'])+'_'+node['id']
  38. task_log_res = get_task_log(af_job.id, af_job_run.af_run_id, task_id)
  39. task_log = task_log_res['data'] if 'data' in task_log_res.keys() else None
  40. if task_log:
  41. nodes.update({node['id']:task_log['status'] if task_log['status'] else 'running'})
  42. res = {
  43. "job":job_run_status if job_run_status else af_job_run.status,
  44. "nodes": nodes
  45. }
  46. return res
  47. @router.get("/node_log")
  48. @web_try()
  49. @sxtimeit
  50. def get_dag_debug_status(dag_uuid: str, node_id: str,db: Session = Depends(get_db)):
  51. relation = crud.get_dag_af_id(db,dag_uuid, 'debug')
  52. af_job_run = crud.get_airflow_run_once_debug_mode(db,relation.af_id)
  53. af_job = crud.get_airflow_job_once(db, af_job_run.job_id)
  54. task = list(af_job.tasks)[0] if len(list(af_job.tasks))>0 else None
  55. if task:
  56. task_id = str(task['id'])+'_'+node_id
  57. task_log_res = get_task_log(af_job.id, af_job_run.af_run_id, task_id)
  58. task_log = task_log_res['data'] if 'data' in task_log_res.keys() else None
  59. if task_log:
  60. return task_log['log']
  61. return None
  62. @router.get("/node_result")
  63. @web_try()
  64. @sxtimeit
  65. def get_dag_debug_result(dag_uuid: str,node_id: str,out_pin: int ,db: Session = Depends(get_db)):
  66. table_name = get_tmp_table_name(dag_uuid, node_id, str(out_pin), db)
  67. result = hiveDs.get_preview_data(table_name,500)
  68. return result