dag.py 3.0 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485
  1. import json
  2. from sqlalchemy.orm import Session
  3. from fastapi import Depends
  4. from fastapi import APIRouter
  5. from app.common.decorators import verify_all
  6. from app.services.dag import dag_job_submit, get_tmp_table_name
  7. from app import crud, models, schemas
  8. from app.utils.send_util import get_job_run_status, get_task_log
  9. from constants.constants import RUN_STATUS
  10. from utils.sx_time import sxtimeit
  11. from utils.sx_web import web_try
  12. from app.common.hive import hiveDs
  13. from app import get_db
  14. router = APIRouter(
  15. prefix="/jpt/dag",
  16. tags=["dag-dag管理"],
  17. )
  18. # , dependencies=[Depends(verify_all)]
  19. @router.post("/execute")
  20. @web_try()
  21. @sxtimeit
  22. def execute_dag(dag: schemas.Dag, db: Session = Depends(get_db)):
  23. af_job = dag_job_submit(dag.dag_uuid, dag.dag_script,db)
  24. return af_job
  25. @router.get("/debug_execute")
  26. @web_try()
  27. @sxtimeit
  28. def debug_execute(dag_uuid: str, db: Session = Depends(get_db)):
  29. relation = crud.get_dag_af_id(db,dag_uuid, 'debug')
  30. if relation is None:
  31. return False
  32. return True
  33. @router.get("/debug_status")
  34. @web_try()
  35. @sxtimeit
  36. def get_dag_debug_status(dag_uuid: str, db: Session = Depends(get_db)):
  37. relation = crud.get_dag_af_id(db,dag_uuid, 'debug')
  38. af_job_run = crud.get_airflow_run_once_debug_mode(db,relation.af_id)
  39. job_run_res = get_job_run_status(af_job_run.id)
  40. job_run_status = job_run_res['data']['status']
  41. af_job = crud.get_airflow_job_once(db, af_job_run.job_id)
  42. task = list(af_job.tasks)[0] if len(list(af_job.tasks))>0 else None
  43. nodes = {}
  44. if task:
  45. task_script = json.loads(task['script'])
  46. for node in task_script['sub_nodes']:
  47. task_id = str(task['id'])+'_'+node['id']
  48. task_log_res = get_task_log(af_job.id, af_job_run.af_run_id, task_id)
  49. task_log = task_log_res['data'] if 'data' in task_log_res.keys() else None
  50. if task_log:
  51. nodes.update({node['id']:task_log['status'] if task_log['status'] else 'running'})
  52. res = {
  53. "job":job_run_status if job_run_status else af_job_run.status,
  54. "nodes": nodes
  55. }
  56. return res
  57. @router.get("/node_log")
  58. @web_try()
  59. @sxtimeit
  60. def get_dag_debug_status(dag_uuid: str, node_id: str,db: Session = Depends(get_db)):
  61. relation = crud.get_dag_af_id(db,dag_uuid, 'debug')
  62. af_job_run = crud.get_airflow_run_once_debug_mode(db,relation.af_id)
  63. af_job = crud.get_airflow_job_once(db, af_job_run.job_id)
  64. task = list(af_job.tasks)[0] if len(list(af_job.tasks))>0 else None
  65. if task:
  66. task_id = str(task['id'])+'_'+node_id
  67. task_log_res = get_task_log(af_job.id, af_job_run.af_run_id, task_id)
  68. task_log = task_log_res['data'] if 'data' in task_log_res.keys() else None
  69. if task_log:
  70. return task_log['log']
  71. return None
  72. @router.get("/node_result")
  73. @web_try()
  74. @sxtimeit
  75. def get_dag_debug_result(dag_uuid: str,node_id: str,out_pin: int ,db: Session = Depends(get_db)):
  76. table_name = get_tmp_table_name(dag_uuid, node_id, str(out_pin), db)
  77. result = hiveDs.get_preview_data(table_name,500)
  78. return result