|
@@ -10,7 +10,6 @@ from pydantic import BaseModel
|
|
|
from sqlalchemy.orm import Session
|
|
|
from app import schemas, get_db, crud
|
|
|
from app.core.airflow.af_util import get_airflow_api_info, call_airflow_api, datetime2timestamp
|
|
|
-from app.core.k8s.k8s_client import KubernetesTools
|
|
|
from utils import web_try, sxtimeit
|
|
|
|
|
|
|
|
@@ -133,10 +132,10 @@ def add_notification(item: Item, db: Session = Depends(get_db)):
|
|
|
if run is not None:
|
|
|
update_run = schemas.AirflowRunUpdate(
|
|
|
**{"details": run.details, "status": run.status, "af_run_id": item.data['af_run_id']})
|
|
|
- update_run.details['tasks'][item.data['task_id']] = {"log": logs,
|
|
|
- "start_time": item.data["start_time"],
|
|
|
- "end_time": item.data["end_time"],
|
|
|
- "status": item.data['status']}
|
|
|
+ update_run.details['tasks'][item.data['task_id']].update({"log": logs,
|
|
|
+ "start_time": item.data["start_time"],
|
|
|
+ "end_time": item.data["end_time"],
|
|
|
+ "status": item.data['status']})
|
|
|
crud.update_airflow_run(db=db, item_id=run.id, update_item=update_run)
|
|
|
|
|
|
|
|
@@ -168,22 +167,16 @@ def get_airflow_dagrun(job_id: int, af_run_id: str, db: Session = Depends(get_db
|
|
|
@router_af_run.get("/running_status/{job_id}/{af_run_id}")
|
|
|
@web_try()
|
|
|
@sxtimeit
|
|
|
-def get_airflow_dagrun_running_status(job_id: int, af_run_id: str, db: Session = Depends(get_db)):
|
|
|
- job_info = call_airflow_api(method='get', uri=f'dags/dag_{job_id}/dagRuns/{af_run_id}', args_dict={})
|
|
|
- tasks_info = call_airflow_api(method='get', uri=f'dags/dag_{job_id}/dagRuns/{af_run_id}/taskInstances',
|
|
|
- args_dict={})
|
|
|
-
|
|
|
- details = defaultdict(dict)
|
|
|
- for task in tasks_info.json()['task_instances']:
|
|
|
- details['tasks'][task['task_id']] = {
|
|
|
- # "log": logs,
|
|
|
- "start_time": datetime2timestamp(task['start_date']),
|
|
|
- "end_time": datetime2timestamp(task['end_date']),
|
|
|
- "status": task['state']
|
|
|
- }
|
|
|
- # print(f"{task['task_id']}:{task['duration']}")
|
|
|
-
|
|
|
-
|
|
|
+def get_airflow_dagrun_running_status(job_id: int, af_run_id: str):
|
|
|
+ job_uri = f'dags/dag_{job_id}/dagRuns/{af_run_id}'
|
|
|
+ job_ret = call_airflow_api(method='get', uri=job_uri, args_dict={})
|
|
|
+ if job_ret.status_code != 200:
|
|
|
+ raise Exception(f'cant found the information of this job run,please check your input: job uri is {job_uri} ')
|
|
|
+ return {
|
|
|
+ "start_time": datetime2timestamp(job_ret.json()['start_date']),
|
|
|
+ "end_time": datetime2timestamp(job_ret.json()['end_date']),
|
|
|
+ "status": job_ret.json()['state']
|
|
|
+ }
|
|
|
|
|
|
|
|
|
@router_af_run.get("/task_log/{job_id}/{af_run_id}/{task_id}")
|
|
@@ -206,11 +199,14 @@ def get_airflow_dagrun_task_log(job_id: int, af_run_id: str, task_id: str, db: S
|
|
|
state_ret = call_airflow_api(method='get', uri=state_uri, args_dict={})
|
|
|
log_ret = call_airflow_api(method='get', uri=log_uri, args_dict={})
|
|
|
if state_ret.status_code != 200 or log_ret.status_code != 200:
|
|
|
- raise Exception(f'cant found the information of this task,please check your input.log uri is {log_uri} ')
|
|
|
+ return None
|
|
|
update_run = schemas.AirflowRunUpdate(
|
|
|
**{"details": run.details, "status": run.status, "af_run_id": af_run_id})
|
|
|
+ print(f'stat is {state_ret.json()}')
|
|
|
task_info = {
|
|
|
- "log": log_ret.text, "status": state_ret.json()['state'],
|
|
|
+ "log": log_ret.text,
|
|
|
+ "status": state_ret.json()['state'],
|
|
|
+ "execution_time": datetime2timestamp(state_ret.json()['execution_date']),
|
|
|
"start_time": datetime2timestamp(state_ret.json()['start_date']),
|
|
|
"end_time": datetime2timestamp(state_ret.json()['end_date']),
|
|
|
}
|
|
@@ -219,3 +215,20 @@ def get_airflow_dagrun_task_log(job_id: int, af_run_id: str, task_id: str, db: S
|
|
|
return task_info
|
|
|
else:
|
|
|
return run.details['tasks'][task_id]
|
|
|
+
|
|
|
+
|
|
|
+@router_af_run.get("/data_transfer_log/{af_run_id}")
|
|
|
+@web_try()
|
|
|
+@sxtimeit
|
|
|
+def get_airflow_dagrun_task_log(af_run_id: str):
|
|
|
+ state_uri = f"dags/dag_0/dagRuns/{af_run_id}/taskInstances/0"
|
|
|
+ log_uri = f"{state_uri}/logs/1"
|
|
|
+ state_ret = call_airflow_api(method='get', uri=state_uri, args_dict={})
|
|
|
+ log_ret = call_airflow_api(method='get', uri=log_uri, args_dict={})
|
|
|
+ return {
|
|
|
+ "log": log_ret.text,
|
|
|
+ "status": state_ret.json()['state'],
|
|
|
+ "execution_time": datetime2timestamp(state_ret.json()['execution_date']),
|
|
|
+ "start_time": datetime2timestamp(state_ret.json()['start_date']),
|
|
|
+ "end_time": datetime2timestamp(state_ret.json()['end_date']),
|
|
|
+ }
|