浏览代码

merge 处理冲突

liweiquan 2 年之前
父节点
当前提交
1a5fa11147
共有 8 个文件被更改,包括 39 次插入10 次删除
  1. 1 1
      Dockerfile
  2. 2 0
      README.md
  3. 2 2
      app/core/datax/rdbms.py
  4. 7 1
      app/models/job_info.py
  5. 3 3
      app/schemas/datax_json.py
  6. 13 1
      app/schemas/job_info.py
  7. 9 2
      data/data.sql
  8. 2 0
      docker-compose.yml

+ 1 - 1
Dockerfile

@@ -71,7 +71,7 @@ startretries=0\n\
 redirect_stderr=true\n\
 stdout_logfile=/var/log/be.log\n\
 stdout_logfile_maxbytes=50MB\n\
-environment=PYTHONUNBUFFERED=1,APP_ENV=production\n\
+environment=PYTHONUNBUFFERED=1\n\
 " > /etc/supervisor/conf.d/be.conf
 
 ADD . ${WORKDIR}

+ 2 - 0
README.md

@@ -17,6 +17,8 @@ python run.py
 
 ## 部署
 
+在 k8s 生产环境设置 pod 的环境变量 `APP_ENV=production`
+
 ```shell
 # 打包镜像
 make image

+ 2 - 2
app/core/datax/rdbms.py

@@ -93,7 +93,7 @@ class RdbmsReader(ReaderBase):
                 raise Exception('表名和字段名不能为空')
             table = param.reader_tables
             connection.append({'jdbcUrl': jdbcUrl, 'table': table})
-            parameter['column'] = param.reader_columns
+            parameter['column'] = list(map(lambda x: x.split(':')[1], param.reader_columns))
             parameter['where'] = param.rdbms_reader.where_param
             parameter['splitPk'] = param.rdbms_reader.reader_split_pk
         parameter['connection'] = connection
@@ -129,7 +129,7 @@ class RdbmsWriter(WriterBase):
         connection = list()
         connection.append({'jdbcUrl': self.ds.jdbc_url, 'table': param.writer_tables})
         parameter['connection'] = connection
-        parameter['column'] = param.writer_columns
+        parameter['column'] = list(map(lambda x: x.split(':')[1], param.writer_columns))
         if is_show:
             parameter['username'] = self.ds.jdbc_username
             parameter['password'] = self.ds.jdbc_password

+ 7 - 1
app/models/job_info.py

@@ -41,5 +41,11 @@ class JobInfo(BaseModel):
     inc_start_time = Column(Integer)
     # 最近一次执行状态
     last_handle_code = Column(Integer)
-    # 数据状态
+    # 数据状态 0-删除 1-正常
     delete_status = Column(Integer, nullable=False)
+    # 增量时间
+    replace_param = Column(String)
+    # 分区信息
+    partition_info = Column(String)
+    # jvm参数
+    jvm_param = Column(String)

+ 3 - 3
app/schemas/datax_json.py

@@ -48,10 +48,10 @@ class DataXJsonParam(BaseModel):
                 'mysql2mysql': {
                     "reader_datasource_id": 18,
                     "reader_tables": ["job_group_copy1"],
-                    "reader_columns": ["id", "app_name", "title", "address_type"],
+                    "reader_columns": ["0:id:int", "1:app_name:varchar(20)", "2:title:varchar(20)", "3:address_type:varchar(20)"],
                     "writer_datasource_id": 18,
                     "writer_tables": ["job_group_copy2"],
-                    "writer_columns": ["id", "app_name", "title", "address_type"],
+                    "writer_columns": ["0:id:int", "1:app_name:varchar(20)", "2:title:varchar(20)", "3:address_type:varchar(20)"],
                     "rdbms_reader": {
                         "reader_split_pk": "",
                         "where_param": "",
@@ -89,7 +89,7 @@ class DataXJsonParam(BaseModel):
                     "reader_columns": ["0:id:int", "3:ssn:varchar", "5:test2:int"],
                     "writer_datasource_id": 18,
                     "writer_tables": ["grades"],
-                    "writer_columns": ["id", "ssn", "test2"],
+                    "writer_columns": ["0:id:int", "1:ssn:varchar", "2:test2:int"],
                     "hive_reader": {
                         "reader_default_fs": "hdfs://192.168.199.107:9000",
                         "reader_file_type": "csv",

+ 13 - 1
app/schemas/job_info.py

@@ -23,6 +23,12 @@ class JobInfoBase(BaseModel):
     inc_start_time: Optional[int]
     # datax运行脚本
     job_json: str
+     # 增量时间
+    replace_param: Optional[str]
+    # 分区信息
+    partition_info: Optional[str]
+    # jvm参数
+    jvm_param: Optional[str]
 
 
 
@@ -39,7 +45,10 @@ class JobInfoCreate(JobInfoBase):
                 "executor_timeout": 60,
                 "executor_fail_retry_count": 2,
                 "inc_start_time": 0,
-                "job_json": ""
+                "job_json": "",
+                "replace_param": "-DlastTime='%s' -DcurrentTime='%s'",
+                "partition_info": "txn_date,0,yyyy-MM-dd",
+                "jvm_param": "",
             }
         }
 
@@ -60,6 +69,9 @@ class JobInfoUpdate(JobInfoBase):
                 "inc_start_time": 0,
                 "job_json": "",
                 "trigger_status": 1,
+                "replace_param": "-DlastTime='%s' -DcurrentTime='%s'",
+                "partition_info": "txn_date,0,yyyy-MM-dd",
+                "jvm_param": "",
             }
         }
 

+ 9 - 2
data/data.sql

@@ -68,6 +68,14 @@ ALTER TABLE `job_info`
 ADD COLUMN `delete_status` tinyint(4) NOT NULL COMMENT '数据状态:0-无效,1-有效' AFTER `last_handle_code`;
 
 
+
+
+ALTER TABLE `job_info`
+ADD COLUMN `replace_param` varchar(100) NULL COMMENT '增量时间' AFTER `delete_status`,
+ADD COLUMN `partition_info` varchar(100) NULL COMMENT '分区信息' AFTER `replace_param`,
+ADD COLUMN `jvm_param` varchar(100) NULL COMMENT 'jvm参数' AFTER `partition_info`;
+
+
 -- ----------------------------
 -- Table structure for job_log
 -- ----------------------------
@@ -94,7 +102,6 @@ CREATE TABLE `job_log` (
   KEY `I_handle_code` (`handle_code`) USING BTREE
 ) ENGINE=InnoDB AUTO_INCREMENT=1581 DEFAULT CHARSET=utf8mb4 ROW_FORMAT=DYNAMIC;
 
-
 -- ----------------------------
 -- Table structure for data_management
 -- ----------------------------
@@ -113,4 +120,4 @@ CREATE TABLE `data_management` (
   `user_id` varchar(50) COLLATE utf8_unicode_ci NOT NULL COMMENT '创建人编号',
   `project_id` varchar(50) COLLATE utf8_unicode_ci NOT NULL COMMENT '项目编号',
   PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci COMMENT='数据管理';
+) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8_unicode_ci COMMENT='数据管理' ROW_FORMAT=DYNAMIC;

+ 2 - 0
docker-compose.yml

@@ -14,5 +14,7 @@ services:
       - '18224:22'
     extra_hosts:
       - 'minio-api.sxkj.com:192.168.199.109'
+    environment:
+      - APP_ENV=development
     # volumes:
     #   - ./:/workspace