|
@@ -1,3 +1,4 @@
|
|
|
|
+import json
|
|
from typing import List, Optional
|
|
from typing import List, Optional
|
|
from fastapi import APIRouter
|
|
from fastapi import APIRouter
|
|
|
|
|
|
@@ -28,6 +29,7 @@ router = APIRouter(
|
|
@sxtimeit
|
|
@sxtimeit
|
|
def get_job_logs(job_id: int = None, params: Params=Depends(get_page), db: Session = Depends(get_db)):
|
|
def get_job_logs(job_id: int = None, params: Params=Depends(get_page), db: Session = Depends(get_db)):
|
|
jm_job_list = []
|
|
jm_job_list = []
|
|
|
|
+ # 是否有任务筛选
|
|
if job_id is not None:
|
|
if job_id is not None:
|
|
jm_job_list = [crud.get_jm_job_info(db, job_id)]
|
|
jm_job_list = [crud.get_jm_job_info(db, job_id)]
|
|
else:
|
|
else:
|
|
@@ -35,84 +37,79 @@ def get_job_logs(job_id: int = None, params: Params=Depends(get_page), db: Sessi
|
|
id_to_job = {job.id:job for job in jm_job_list}
|
|
id_to_job = {job.id:job for job in jm_job_list}
|
|
relations = crud.get_af_ids(db,id_to_job.keys(), 'job')
|
|
relations = crud.get_af_ids(db,id_to_job.keys(), 'job')
|
|
af_to_datax = {relation.af_id:relation.se_id for relation in relations}
|
|
af_to_datax = {relation.af_id:relation.se_id for relation in relations}
|
|
|
|
+ # 获取任务运行记录
|
|
af_job_runs = crud.get_airflow_runs_by_af_job_ids(db, af_to_datax.keys())
|
|
af_job_runs = crud.get_airflow_runs_by_af_job_ids(db, af_to_datax.keys())
|
|
|
|
+ # 根据时间进行排序
|
|
af_job_runs.sort(key=lambda x: x.start_time, reverse=True)
|
|
af_job_runs.sort(key=lambda x: x.start_time, reverse=True)
|
|
total = len(af_job_runs)
|
|
total = len(af_job_runs)
|
|
|
|
+ # 进行分页
|
|
af_job_runs = af_job_runs[(params['page'] - 1) * params['size']:params['page'] * params['size']]
|
|
af_job_runs = af_job_runs[(params['page'] - 1) * params['size']:params['page'] * params['size']]
|
|
res = []
|
|
res = []
|
|
for af_job_run in af_job_runs:
|
|
for af_job_run in af_job_runs:
|
|
- tasks = list(af_job_run.details['tasks'].values()) if len(list(af_job_run.details['tasks'].values()))>0 else []
|
|
|
|
- if len(tasks) > 0:
|
|
|
|
- task = tasks[-1]
|
|
|
|
- task.pop('log',None)
|
|
|
|
- job_id = af_to_datax[int(af_job_run.job_id)]
|
|
|
|
- execute_result = None
|
|
|
|
- if af_job_run.status <= 1:
|
|
|
|
- run_status = get_job_run_status(af_job_run.id)
|
|
|
|
- execute_result = run_status['data']['status']
|
|
|
|
- log = {
|
|
|
|
- "id": af_job_run.id,
|
|
|
|
- "job_id": job_id,
|
|
|
|
- "job_name": id_to_job[job_id].name,
|
|
|
|
- "job_type": id_to_job[job_id].type,
|
|
|
|
- "job_tag": id_to_job[job_id].tag,
|
|
|
|
- "af_job_id": int(af_job_run.job_id),
|
|
|
|
- "run_id": af_job_run.af_run_id,
|
|
|
|
- "trigger_time": af_job_run.start_time,
|
|
|
|
- "trigger_result": 1,
|
|
|
|
- "execute_time": task['start_time'] if task else 0,
|
|
|
|
- "execute_result": execute_result if execute_result else af_job_run.status,
|
|
|
|
- "end_time": task['end_time'] if task else 0,
|
|
|
|
- }
|
|
|
|
- res.append(log)
|
|
|
|
- return page_help(res,params['page'],params['size'],total)
|
|
|
|
-
|
|
|
|
-@router.get("/logs")
|
|
|
|
-@web_try()
|
|
|
|
-@sxtimeit
|
|
|
|
-def get_job_log_once(run_id: str, db: Session = Depends(get_db)):
|
|
|
|
- af_job_run = crud.get_airflow_run_once(db, run_id)
|
|
|
|
- tasks = list(af_job_run.details['tasks'].values()) if len(list(af_job_run.details['tasks'].values()))>0 else []
|
|
|
|
- res = []
|
|
|
|
- for task in tasks:
|
|
|
|
|
|
+ job_id = af_to_datax[int(af_job_run.job_id)]
|
|
|
|
+ execute_result = None
|
|
|
|
+ # 若该记录未运行完成,获取运行的状态
|
|
|
|
+ if af_job_run.status <= 1:
|
|
|
|
+ run_status = get_job_run_status(af_job_run.id)
|
|
|
|
+ execute_result = run_status['data']['status']
|
|
log = {
|
|
log = {
|
|
"id": af_job_run.id,
|
|
"id": af_job_run.id,
|
|
|
|
+ "job_id": job_id,
|
|
|
|
+ "job_name": id_to_job[job_id].name,
|
|
|
|
+ "job_type": id_to_job[job_id].type,
|
|
|
|
+ "job_tag": id_to_job[job_id].tag,
|
|
"af_job_id": int(af_job_run.job_id),
|
|
"af_job_id": int(af_job_run.job_id),
|
|
"run_id": af_job_run.af_run_id,
|
|
"run_id": af_job_run.af_run_id,
|
|
- "trigger_time": af_job_run.start_time,
|
|
|
|
- "trigger_result": 1,
|
|
|
|
- "execute_time": task['start_time'] if task else 0,
|
|
|
|
- "execute_result": af_job_run.status,
|
|
|
|
- "end_time": task['end_time'] if task else 0,
|
|
|
|
- "log": task['log'] if task else None
|
|
|
|
|
|
+ "start_time": af_job_run.start_time,
|
|
|
|
+ "result": execute_result if execute_result else af_job_run.status,
|
|
}
|
|
}
|
|
res.append(log)
|
|
res.append(log)
|
|
- res.sort(key=lambda x: x['trigger_time'], reverse=True)
|
|
|
|
- return res
|
|
|
|
|
|
+ return page_help(res,params['page'],params['size'],total)
|
|
|
|
|
|
@router.get("/all_task")
|
|
@router.get("/all_task")
|
|
@web_try()
|
|
@web_try()
|
|
@sxtimeit
|
|
@sxtimeit
|
|
def get_job_all_task(run_id: str, db: Session = Depends(get_db)):
|
|
def get_job_all_task(run_id: str, db: Session = Depends(get_db)):
|
|
af_job_run = crud.get_airflow_run_once(db, run_id)
|
|
af_job_run = crud.get_airflow_run_once(db, run_id)
|
|
- af_job_id = af_job_run.job_id
|
|
|
|
- af_job = crud.get_airflow_job_once(db, af_job_id)
|
|
|
|
|
|
+ af_job = crud.get_airflow_job_once(db, af_job_run.job_id)
|
|
|
|
+ tasks = list(af_job.tasks) if len(list(af_job.tasks))>0 else []
|
|
res = []
|
|
res = []
|
|
- for task in af_job.tasks:
|
|
|
|
- task.update({
|
|
|
|
- 'job_id':af_job_id,
|
|
|
|
- 'af_run_id':af_job_run.af_run_id,
|
|
|
|
- 'task_id':task['id'],
|
|
|
|
- })
|
|
|
|
- task_log_res = get_task_log(af_job_id, af_job_run.af_run_id, task['id'])
|
|
|
|
- task_log = task_log_res['data'] if 'data' in task_log_res else None
|
|
|
|
- if task_log:
|
|
|
|
- task.update({
|
|
|
|
- 'execute_result':task_log['status'] if 'status' in task_log else None,
|
|
|
|
- 'execute_time':task_log['start_time'] if 'start_time' in task_log else None,
|
|
|
|
- 'log': task_log['log'] if 'log' in task_log else None
|
|
|
|
- })
|
|
|
|
- res.append(task)
|
|
|
|
|
|
+ for task in tasks:
|
|
|
|
+ if task['task_type'] == 'sparks':
|
|
|
|
+ task_script = json.loads(task['script'])
|
|
|
|
+ for node in task_script['sub_nodes']:
|
|
|
|
+ task_id = str(task['id'])+'_'+node['id']
|
|
|
|
+ log ={
|
|
|
|
+ 'name':task['name']+'-'+node['name'],
|
|
|
|
+ 'job_id':af_job.id,
|
|
|
|
+ 'af_run_id':af_job_run.af_run_id,
|
|
|
|
+ 'task_id': task_id,
|
|
|
|
+ }
|
|
|
|
+ task_log_res = get_task_log(af_job.id, af_job_run.af_run_id, task_id)
|
|
|
|
+ task_log = task_log_res['data'] if 'data' in task_log_res.keys() else None
|
|
|
|
+ if task_log:
|
|
|
|
+ log.update({
|
|
|
|
+ 'execute_result': RUN_STATUS[task_log['status']] if task_log['status'] else 0,
|
|
|
|
+ 'execute_time':task_log['start_time'],
|
|
|
|
+ 'log': task_log['log']
|
|
|
|
+ })
|
|
|
|
+ res.append(log)
|
|
|
|
+ else:
|
|
|
|
+ log ={
|
|
|
|
+ 'name':task['name'],
|
|
|
|
+ 'job_id':af_job.id,
|
|
|
|
+ 'af_run_id':af_job_run.af_run_id,
|
|
|
|
+ 'task_id':task['id'],
|
|
|
|
+ }
|
|
|
|
+ task_log_res = get_task_log(af_job.id, af_job_run.af_run_id, task['id'])
|
|
|
|
+ task_log = task_log_res['data'] if 'data' in task_log_res.keys() else None
|
|
|
|
+ if task_log:
|
|
|
|
+ log.update({
|
|
|
|
+ 'execute_result':RUN_STATUS[task_log['status']] if task_log['status'] else 0,
|
|
|
|
+ 'execute_time':task_log['start_time'],
|
|
|
|
+ 'log': task_log['log']
|
|
|
|
+ })
|
|
|
|
+ res.append(log)
|
|
return res
|
|
return res
|
|
|
|
|
|
|
|
|
|
@@ -121,7 +118,8 @@ def get_job_all_task(run_id: str, db: Session = Depends(get_db)):
|
|
@sxtimeit
|
|
@sxtimeit
|
|
def get_job_task_log(job_id: str, af_run_id: str, task_id: str, db: Session = Depends(get_db)):
|
|
def get_job_task_log(job_id: str, af_run_id: str, task_id: str, db: Session = Depends(get_db)):
|
|
res = get_task_log(job_id, af_run_id, task_id)
|
|
res = get_task_log(job_id, af_run_id, task_id)
|
|
- return res['data']
|
|
|
|
|
|
+ log = res['data'] if 'data' in res else None
|
|
|
|
+ return log
|
|
|
|
|
|
@router.get("/logs_status/{ids}")
|
|
@router.get("/logs_status/{ids}")
|
|
@web_try()
|
|
@web_try()
|