|
@@ -24,50 +24,58 @@ router = APIRouter(
|
|
|
@web_try()
|
|
|
@sxtimeit
|
|
|
def get_job_logs(job_id: int = None, db: Session = Depends(get_db)):
|
|
|
- jm_job_infos = []
|
|
|
+ jm_job_list = []
|
|
|
if job_id is not None:
|
|
|
- jm_job_infos = [crud.get_jm_job_info(db, job_id)]
|
|
|
+ jm_job_list = [crud.get_jm_job_info(db, job_id)]
|
|
|
else:
|
|
|
- jm_job_infos = crud.get_jm_job_infos(db)
|
|
|
- job_id_to_job = {jm_job.id:jm_job for jm_job in jm_job_infos}
|
|
|
- jm_job_id_list = job_id_to_job.keys()
|
|
|
- job_history_list = crud.get_historys_by_job_ids(db,jm_job_id_list)
|
|
|
+ jm_job_list = crud.get_jm_job_infos(db)
|
|
|
+ jm_job_list = crud.get_jm_job_infos(db)
|
|
|
+ id_to_job = {job.id:job for job in jm_job_list}
|
|
|
+ relations = crud.get_af_ids(db,id_to_job.keys(), 'job')
|
|
|
+ af_to_datax = {relation.af_id:relation.se_id for relation in relations}
|
|
|
+ af_job_runs = crud.get_airflow_runs_by_af_job_ids(db, af_to_datax.keys())
|
|
|
res = []
|
|
|
- for job_history in job_history_list:
|
|
|
- jm_job = job_id_to_job[job_history.job_id]
|
|
|
- job_history_dict = job_history.__dict__
|
|
|
- job_history_dict.update({"job_name":jm_job.name})
|
|
|
- job_history_dict.update({"job_type":jm_job.type})
|
|
|
- job_history_dict.update({"job_tag":jm_job.tag})
|
|
|
- res.append(job_history_dict)
|
|
|
+ for af_job_run in af_job_runs:
|
|
|
+ tasks = list(af_job_run.details['tasks'].values()) if len(list(af_job_run.details['tasks'].values()))>0 else []
|
|
|
+ if len(tasks) > 0:
|
|
|
+ task = tasks[-1]
|
|
|
+ task.pop('log',None)
|
|
|
+ job_id = af_to_datax[int(af_job_run.job_id)]
|
|
|
+ log = {
|
|
|
+ "id": af_job_run.id,
|
|
|
+ "job_id": job_id,
|
|
|
+ "job_name": id_to_job[job_id].name,
|
|
|
+ "job_type": id_to_job[job_id].type,
|
|
|
+ "job_tag": id_to_job[job_id].tag,
|
|
|
+ "af_job_id": int(af_job_run.job_id),
|
|
|
+ "run_id": af_job_run.run_id,
|
|
|
+ "trigger_time": af_job_run.start_time,
|
|
|
+ "trigger_result": 1 if task else 0,
|
|
|
+ "execute_time": task['start_time'] if task else 0,
|
|
|
+ "execute_result": 1 if task and task['status'] == 'success' else 0,
|
|
|
+ "end_time": task['end_time'] if task else 0,
|
|
|
+ }
|
|
|
+ res.append(log)
|
|
|
return res
|
|
|
|
|
|
@router.get("/logs")
|
|
|
@web_try()
|
|
|
@sxtimeit
|
|
|
-def get_job_logs(job_history_id: int,db: Session = Depends(get_db)):
|
|
|
- job_history_info = crud.get_jm_job_history_info(db,job_history_id)
|
|
|
- job_info = crud.get_jm_job_info(db,job_history_info.job_id)
|
|
|
- job_logs = crud.get_jm_job_logs_by_history_id(db,job_history_id)
|
|
|
- if len(job_logs) <= 0:
|
|
|
- raise Exception("未找到该任务此次运行的日志")
|
|
|
- if job_info.type == '单作业离线任务':
|
|
|
- return {
|
|
|
- 'job_type': job_info.type,
|
|
|
- 'logs': job_logs,
|
|
|
+def get_job_logs(run_id: str, job_id: int, db: Session = Depends(get_db)):
|
|
|
+ af_job_run = crud.get_airflow_run_once(db, run_id, job_id)
|
|
|
+ tasks = list(af_job_run.details['tasks'].values()) if len(list(af_job_run.details['tasks'].values()))>0 else []
|
|
|
+ res = []
|
|
|
+ for task in tasks:
|
|
|
+ log = {
|
|
|
+ "id": af_job_run.id,
|
|
|
+ "af_job_id": int(af_job_run.job_id),
|
|
|
+ "run_id": af_job_run.run_id,
|
|
|
+ "trigger_time": af_job_run.start_time,
|
|
|
+ "trigger_result": 1 if task else 0,
|
|
|
+ "execute_time": task['start_time'] if task else 0,
|
|
|
+ "execute_result": 1 if task and task['status'] == 'success' else 0,
|
|
|
+ "end_time": task['end_time'] if task else 0,
|
|
|
+ "log": task['log'] if task else None
|
|
|
}
|
|
|
- res = {}
|
|
|
- for job_log in job_logs:
|
|
|
- if job_log.homework_id in res.keys():
|
|
|
- res[job_log.homework_id]['nodes'].append(job_log)
|
|
|
- else:
|
|
|
- res.update({job_log.homework_id:{
|
|
|
- "homework_name":job_log.homework_name,
|
|
|
- "nodes": [job_log]
|
|
|
- }})
|
|
|
-
|
|
|
- logs = [v for k, v in res.items()]
|
|
|
- return {
|
|
|
- 'job_type': job_info.type,
|
|
|
- 'logs': logs,
|
|
|
- }
|
|
|
+ res.append(log)
|
|
|
+ return res
|