|
@@ -9,7 +9,7 @@ from fastapi_pagination import paginate, Params
|
|
|
from pydantic import BaseModel
|
|
|
from sqlalchemy.orm import Session
|
|
|
from app import schemas, get_db, crud
|
|
|
-from app.core.airflow.uri import get_airflow_api_info, call_airflow_api
|
|
|
+from app.core.airflow.af_util import get_airflow_api_info, call_airflow_api, datetime2timestamp
|
|
|
from app.core.k8s.k8s_client import KubernetesTools
|
|
|
from utils import web_try, sxtimeit
|
|
|
|
|
@@ -148,7 +148,7 @@ def get_airflow_dagrun(job_id: int, af_run_id: str, db: Session = Depends(get_db
|
|
|
for task in ret.json()['task_instances']:
|
|
|
details['tasks'][task['task_id']] = {
|
|
|
# "log": logs,
|
|
|
- "start_time": datetime.datetime.strptime(task['start_date'], '%Y-%m-%dT%H:%M:%S.%f%z').timestamp(),
|
|
|
+ "start_time": datetime.datetime.strptime(task['start_date'], '%Y-%m-%dT%H:%M:%S.%f%z').timestamp(),
|
|
|
"end_time": datetime.datetime.strptime(task['end_date'], '%Y-%m-%dT%H:%M:%S.%f%z').timestamp(),
|
|
|
"status": task['state']
|
|
|
}
|
|
@@ -160,20 +160,38 @@ def get_airflow_dagrun(job_id: int, af_run_id: str, db: Session = Depends(get_db
|
|
|
@web_try()
|
|
|
@sxtimeit
|
|
|
def get_airflow_dagrun_running_status(job_id: int, af_run_id: str, db: Session = Depends(get_db)):
|
|
|
- ret = call_airflow_api(method='get', uri=f'dags/dag_{job_id}/dagRuns/{af_run_id}', args_dict={})
|
|
|
- ret = call_airflow_api(method='get', uri=f'dags/dag_{job_id}/dagRuns/{af_run_id}/taskInstances', args_dict={})
|
|
|
+ job_info = call_airflow_api(method='get', uri=f'dags/dag_{job_id}/dagRuns/{af_run_id}', args_dict={})
|
|
|
+ tasks_info = call_airflow_api(method='get', uri=f'dags/dag_{job_id}/dagRuns/{af_run_id}/taskInstances', args_dict={})
|
|
|
+
|
|
|
+
|
|
|
+ details = defaultdict(dict)
|
|
|
+ for task in tasks_info.json()['task_instances']:
|
|
|
+ details['tasks'][task['task_id']] = {
|
|
|
+ # "log": logs,
|
|
|
+ "start_time": datetime2timestamp(task['start_date']),
|
|
|
+ "end_time": datetime2timestamp(task['end_date']),
|
|
|
+ "status": task['state']
|
|
|
+ }
|
|
|
+ # print(f"{task['task_id']}:{task['duration']}")
|
|
|
|
|
|
- # details = defaultdict(dict)
|
|
|
-
|
|
|
- # for task in ret.json()['task_instances']:
|
|
|
- # details['tasks'][task['task_id']] = {
|
|
|
- # # "log": logs,
|
|
|
- # "start_time": datetime.datetime.strptime(task['start_date'], '%Y-%m-%dT%H:%M:%S.%f%z').timestamp(),
|
|
|
- # "end_time": datetime.datetime.strptime(task['end_date'], '%Y-%m-%dT%H:%M:%S.%f%z').timestamp(),
|
|
|
- # "status": task['state']
|
|
|
- # }
|
|
|
- # # print(f"{task['task_id']}:{task['duration']}")
|
|
|
- return ret.json()
|
|
|
+ # item = schemas.AirflowRunUpdate(**{#"start_time": item.data["start_time"],
|
|
|
+ # #"job_id": int(job_id),
|
|
|
+ # # "run_ts": item.data['run_ts'],
|
|
|
+ # # "af_run_id": item.data['af_run_id'],
|
|
|
+ # "end_time":datetime2timestamp()
|
|
|
+ # "details": {"tasks": {}, "dependence": {"tasks": job_item.dependence,
|
|
|
+ # "sparks": sparks_dependence}},
|
|
|
+ # "status": 0},
|
|
|
+
|
|
|
+ # item = schemas.AirflowRunCreate(**{"start_time": item.data["start_time"],
|
|
|
+ # "job_id": int(job_id),
|
|
|
+ # # "run_ts": item.data['run_ts'],
|
|
|
+ # # "af_run_id": item.data['af_run_id'],
|
|
|
+ # "details": {"tasks": {}, "dependence": {"tasks": job_item.dependence,
|
|
|
+ # "sparks": sparks_dependence}},
|
|
|
+ # "status": 0},
|
|
|
+
|
|
|
+ # return ret.json()
|
|
|
|
|
|
|
|
|
@router_af_run.get("/task_log/{job_id}/{af_run_id}/{task_id}")
|