from typing import List, Optional from pydantic import BaseModel class HiveReaderParam(BaseModel): reader_path: str reader_default_fs: Optional[str] reader_file_type: str reader_field_delimiter: Optional[str] reader_skip_header: Optional[str] class HiveWriterParam(BaseModel): writer_default_fs: Optional[str] writer_file_type: str writer_path: str writer_filename: str writer_mode: Optional[str]='append' writer_field_delimiter: Optional[str] class RdbmsReaderParam(BaseModel): reader_split_pk: Optional[str] = '' where_param: Optional[str] = '' query_sql: Optional[str] class RdbmsWriterParam(BaseModel): pre_sql: Optional[str] = '' post_sql: Optional[str] = '' class DataXJsonParam(BaseModel): reader_datasource_id: Optional[int] reader_type: str # datasource、datalake、ailab reader_tables: List[str] = [] reader_columns: List[str] = [] writer_datasource_id: Optional[int] writer_type: str # datasource、ailab writer_tables: List[str] = [] writer_columns: List[str] = [] hive_reader: Optional[HiveReaderParam] hive_writer: Optional[HiveWriterParam] rdbms_reader: Optional[RdbmsReaderParam] rdbms_writer: Optional[RdbmsWriterParam] class Config: schema_extra = { 'examples': { 'mysql2mysql': { "reader_datasource_id": 18, "reader_type": "datasource", "reader_tables": ["job_group_copy1"], "reader_columns": ["0:id:int", "1:app_name:varchar(20)", "2:title:varchar(20)", "3:address_type:varchar(20)"], "writer_datasource_id": 18, "writer_type": "datasource", "writer_tables": ["job_group_copy2"], "writer_columns": ["0:id:int", "1:app_name:varchar(20)", "2:title:varchar(20)", "3:address_type:varchar(20)"], "rdbms_reader": { "reader_split_pk": "", "where_param": "", "query_sql": "" }, "rdbms_writer": { "pre_sql": "delete from job_group_copy2", "post_sql": "" } }, 'mysql2hive': { "reader_datasource_id": 18, "reader_type": "datasource", "reader_tables": ["grades"], "reader_columns": ["id", "ssn", "test2"], "writer_datasource_id": 17, "writer_type": "datasource", "writer_columns": ["0:id:int", "1:ssn:varchar", "2:test2:int"], "writer_tables": [], "rdbms_reader": { "reader_split_pk": "", "where_param": "", "query_sql": "" }, "hive_writer": { "writer_default_fs": "hdfs://192.168.199.107:9000", "writer_file_type": "text", "writer_path": "/usr/hive/warehouse/test_1", "writer_filename": "test_1", "writer_mode": "append", "writer_field_delimiter": "|" } }, 'hive2mysql': { "reader_datasource_id": 17, "reader_type": "datasource", "reader_tables": ["grades"], "reader_columns": ["0:id:int", "3:ssn:varchar", "5:test2:int"], "writer_datasource_id": 18, "writer_type": "datasource", "writer_tables": ["grades"], "writer_columns": ["0:id:int", "1:ssn:varchar", "2:test2:int"], "hive_reader": { "reader_default_fs": "hdfs://192.168.199.107:9000", "reader_file_type": "csv", "reader_path": "/usr/hive/warehouse/grades/*", "reader_field_delimiter": ",", "reader_skip_header": "true" }, "rdbms_writer": { "pre_sql": "delete from grades;", "post_sql": "" } } } } # class Config: # schema_extra = { # "example": { # "reader_datasource_id": 18, # "reader_tables": ["job_group_copy1"], # "reader_columns": ["id", "app_name", "title", "address_type"], # "writer_datasource_id": 18, # "writer_tables": ["job_group_copy2"], # "writer_columns": ["id", "app_name", "title", "address_type"], # "rdbms_reader": { # "reader_split_pk": "", # "where_param": "", # "query_sql": "" # }, # "rdbms_writer": { # "pre_sql": "delete from job_group_copy2", # "post_sql": "" # } # } # "example": { # "reader_datasource_id": 18, # "reader_tables": ["grades"], # "reader_columns": ["id", "ssn", "test2"], # "writer_datasource_id": 17, # "writer_columns": ["id:int", "ssn:string", "test2:int"], # "writer_tables": ["grades"], # "rdbms_reader": { # "reader_split_pk": "", # "where_param": "", # }, # "hive_writer": { # "writer_default_fs": "hdfs://192.168.199.107:9000", # "writer_file_type": "text", # "writer_path": "/usr/hive/warehouse/test_1", # "writer_filename": "test_1", # "write_mode": "append", # "write_field_delimiter": "|" # } # } # } class RWDataSource(BaseModel): id: Optional[int] datasource: str # musql/hive rw_type: str # datasource/datalake/ailab jdbc_url: Optional[str] jdbc_username: Optional[str] jdbc_password: Optional[str]