123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193 |
- import re
- from typing import Optional
- from fastapi import APIRouter
- from fastapi import Depends
- from sqlalchemy.orm import Session
- from app import schemas
- from app.common.decorators import verify_admin, verify_all, verify_special, verify_super_admin
- import app.crud as crud
- from utils.sx_time import sxtimeit
- from utils.sx_web import web_try
- from fastapi_pagination import Page, add_pagination, paginate, Params
- from app import get_db
- from configs.globals import g
- from configs.settings import DefaultOption, config
- DATABASE_NAME = config.get('HIVE', 'DATABASE_NAME')
- router = APIRouter(
- prefix="/jpt/datasource",
- tags=["datasource-数据源管理"],
- )
- @router.post("/test", dependencies=[Depends(verify_super_admin)])
- @web_try()
- @sxtimeit
- def test_datasource_connection(ds: schemas.JobJdbcDatasourceCreate, db: Session = Depends(get_db)):
- return crud.test_datasource_connection(db, ds)
- @router.post("/preview", dependencies=[Depends(verify_special)])
- @web_try()
- @sxtimeit
- def get_preview_data(ds_id: int, table_name: str, limit: int = 100, db: Session = Depends(get_db)):
- return crud.get_preview_data(db, ds_id, table_name, limit)
- @router.post("/table_names", description="获取所有表名", dependencies=[Depends(verify_special)])
- @web_try()
- @sxtimeit
- def get_table_names(ds_id: int, db: Session = Depends(get_db)):
- return crud.get_table_names(db, ds_id)
- @router.post("/table_schema", description="获取表结构信息", dependencies=[Depends(verify_special)])
- @web_try()
- @sxtimeit
- def get_table_schema(ds_id: int, table_name: str, db: Session = Depends(get_db)):
- return crud.get_table_schema(db, ds_id, table_name)
- @router.post("/", dependencies=[Depends(verify_super_admin)])
- @web_try()
- @sxtimeit
- def create_datasource(ds: schemas.JobJdbcDatasourceCreate, db: Session = Depends(get_db)):
- return crud.create_job_jdbc_datasource(db, ds)
- @router.get("/", dependencies=[Depends(verify_special)])
- @web_try()
- @sxtimeit
- def get_datasources(datasource_type: Optional[str] = None, params: Params=Depends(), db: Session = Depends(get_db)):
- return paginate(crud.get_job_jdbc_datasources(db, datasource_type), params)
- @router.get("/info", dependencies=[Depends(verify_special)])
- @web_try()
- @sxtimeit
- def get_datasources_info(ds_id: int, db: Session = Depends(get_db)):
- return crud.get_job_jdbc_datasources_info(db, ds_id)
- @router.put("/{ds_id}", dependencies=[Depends(verify_super_admin)])
- @web_try()
- @sxtimeit
- def update_datasource(ds_id: int, ds: schemas.JobJdbcDatasourceUpdate, db: Session = Depends(get_db)):
- return crud.update_job_jdbc_datasources(db, ds_id, ds)
- @router.delete("/{ds_id}", dependencies=[Depends(verify_super_admin)])
- @web_try()
- @sxtimeit
- def delete_job_jdbc_datasource(ds_id: int, db: Session = Depends(get_db)):
- return crud.delete_job_jdbc_datasource(db, ds_id)
- @router.post("/import_datalake", dependencies=[Depends(verify_super_admin)])
- @web_try()
- @sxtimeit
- def import_datalake(item: schemas.ImportDataLake, db: Session = Depends(get_db)):
- return crud.import_datalake(db, item)
- @router.put("/update_datalake/{dl_id}", dependencies=[Depends(verify_super_admin)])
- @web_try()
- @sxtimeit
- def update_datalake(dl_id: int,item: schemas.ImportDataLake, db: Session = Depends(get_db)):
- return crud.update_datalake(db, dl_id, item)
- @router.delete("/delete_datalake/{dl_id}", dependencies=[Depends(verify_super_admin)])
- @web_try()
- @sxtimeit
- def delete_datalake(dl_id: int, db: Session = Depends(get_db)):
- return crud.delete_datalake(db, dl_id)
- @router.post("/share_ailab", dependencies=[Depends(verify_special)])
- @web_try()
- @sxtimeit
- def share_ailab(item: schemas.ShareAilab, db: Session = Depends(get_db)):
- return crud.share_ailab(db, item)
- @router.post("/create_table", dependencies=[Depends(verify_all)])
- @web_try()
- @sxtimeit
- def create_table(item: schemas.CreateAilab, db: Session = Depends(get_db)):
- return crud.create_table(db, item)
- @router.get("/ailab_source", dependencies=[Depends(verify_all)])
- @web_try()
- @sxtimeit
- def get_ailab_source():
- return [{
- 'database_name': DATABASE_NAME,
- 'datasource': "hive",
- 'datasource_name': DATABASE_NAME,
- 'id': -1
- }]
- @router.get("/ailab_table", dependencies=[Depends(verify_all)])
- @web_try()
- @sxtimeit
- def get_ailab_table(db: Session = Depends(get_db)):
- return crud.get_ailab_table(db, g.project_id)
- @router.get("/ailab_table_schema", dependencies=[Depends(verify_all)])
- @web_try()
- @sxtimeit
- def get_ailab_table_schema(table_name: str, db: Session = Depends(get_db)):
- return crud.get_ailab_table_schema(db, table_name)
- @router.get("/preview_ailab_table", dependencies=[Depends(verify_all)])
- @web_try()
- @sxtimeit
- def get_preview_ailab_table(table_name: str, db: Session = Depends(get_db)):
- return crud.get_preview_ailab_table(db, table_name)
- @router.get("/lake_table", dependencies=[Depends(verify_special)])
- @web_try()
- @sxtimeit
- def get_lake_table(db: Session = Depends(get_db)):
- return crud.get_lake_table(db, g.project_id)
- @router.get("/lake_table_info", dependencies=[Depends(verify_special)])
- @web_try()
- @sxtimeit
- def get_lake_table_info(dl_id: int, db: Session = Depends(get_db)):
- return crud.get_lake_table_info(db, dl_id)
- @router.get("/lake_table_schema", dependencies=[Depends(verify_special)])
- @web_try()
- @sxtimeit
- def get_lake_table_schema(db_name: str, table_name: str, db: Session = Depends(get_db)):
- return crud.get_lake_table_schema(db, db_name, table_name)
- @router.get("/preview_lake_table", dependencies=[Depends(verify_special)])
- @web_try()
- @sxtimeit
- def get_preview_lake_table(db_name: str, table_name: str, db: Session = Depends(get_db)):
- return crud.get_preview_lake_table(db, db_name, table_name)
- @router.get("/table_location", dependencies=[Depends(verify_all)])
- @web_try()
- @sxtimeit
- def get_table_location(db_name: str, table_name: str, ds_id: Optional[int] = None, db: Session = Depends(get_db)):
- def find_n_sub_str(src, sub, pos, start):
- index = src.find(sub, start)
- if index != -1 and pos > 0:
- return find_n_sub_str(src, sub, pos - 1, index + 1)
- return index
- res = None
- if ds_id is None:
- res = crud.get_table_info(db, db_name, table_name)
- else:
- res = crud.get_job_jdbc_datasource_table_location(db, db_name, table_name, ds_id)
- location = ''
- hdfs = ''
- for line_list in res[0]:
- if line_list[0].find('Location')>=0:
- location = line_list[1]
- index = int(find_n_sub_str(location, '/', 2, 0))
- if index > 0:
- hdfs , location = location[0:index] , location[index:]
- break
- return {'location': location, 'hdfs': hdfs}
- add_pagination(router)
|