from typing import List, Optional from pydantic import BaseModel class JobJdbcDatasourceBase(BaseModel): # 数据源名称 datasource_name: str # 数据源 datasource: str # 数据库名 database_name: str # 数据库用户名 jdbc_username: Optional[str] # 数据库密码 jdbc_password: Optional[str] # jdbc url jdbc_url: str # 备注 comments: str # 标签 tag: str # kerberos 验证(0:未开启,1:开启) kerberos: Optional[int] = 0 # keytab 文件 keytab: Optional[str] # krb5config 文件 krb5config: Optional[str] # service 名称 kerberos_service_name: Optional[str] # principal principal: Optional[str] # use_ssl use_ssl: Optional[int] = 0 class Config: schema_extra = { # "example": { # "datasource_name": 'test', # "datasource": "mysql", # "database_name": 'datax_web', # "jdbc_username": 'root', # "jdbc_password": 'happylay', # "jdbc_url": '192.168.199.107:10086', # "comments": 'This is a very nice Item', # "tag": '线下' # } "example": { "datasource_name": 'testhive', "datasource": "hive", "database_name": 'default', "jdbc_username": '', "jdbc_password": '', "jdbc_url": '192.168.199.107:10000', "comments": 'This is a very nice Item', "tag": '线下', "kerberos": 0, "keytab": "test/kerberos/user.keytab", "krb5config": "test/kerberos/user.conf", "kerberos_service_name": "hadoop", "principal": "ailab@EMR-5XJSY31F", "use_ssl": 0, } } class JobJdbcDatasourceCreate(JobJdbcDatasourceBase): pass class JobJdbcDatasourceUpdate(JobJdbcDatasourceBase): status: int = 1 class Config: schema_extra = { # "example": { # "datasource_name": 'test', # "datasource": "mysql", # "database_name": 'datax_web', # "jdbc_username": 'root', # "jdbc_password": 'happylay', # "jdbc_url": '192.168.199.107:10086', # "comments": 'This is a very nice Item', # "tag": '线下', # "status": 1, # } "example": { "datasource_name": 'testhive', "datasource": "hive", "database_name": 'default', "jdbc_username": '', "jdbc_password": '', "jdbc_url": '192.168.199.107:10000', "comments": 'This is a very nice Item', "tag": '线下', "kerberos": 0, "keytab": "test/kerberos/user.keytab", "krb5config": "test/kerberos/user.conf", "kerberos_service_name": "hadoop", "principal": "ailab@EMR-5XJSY31F", "use_ssl": 0, "status": 1, } } class JobJdbcDatasource(JobJdbcDatasourceBase): id: int status: int create_time: int create_by: str update_time: int update_by: str jdbc_url: str jdbc_driver_class: str class Config: orm_mode = True class ImportDataLake(BaseModel): database_name: str table_name: str table_path: str class Config: schema_extra = { "example": { "database_name": 'test', "table_name": 'test', "table_path": '/users/ailaab/test_1' } } class ShareAilab(BaseModel): table_names: List[str] project_ids: List[int] class Config: schema_extra = { "example": { "table_names": ['test'], "project_ids": [3], } } class Column(BaseModel): column_name: str Column_type: str class CreateAilab(BaseModel): table_name: str columns: List[Column] partition_column: Optional[str] project_id: int class Config: schema_extra = { "example": { "table_name": 'test', "columns": [{ 'column_name': 'id', 'Column_type': 'int', },{ 'column_name': 'name', 'Column_type': 'string', }], "project_id": 1, "partition_column": 'data_ct' } }