Quellcode durchsuchen

mysql2hive hive2mysql

qill vor 2 Jahren
Ursprung
Commit
20d2becaf2
22 geänderte Dateien mit 1538 neuen und 0 gelöschten Zeilen
  1. 43 0
      app/datax/job/mysql2hive.json.template
  2. 153 0
      app/datax/log/2022-08-08/2hive1659925783_json-10_29_44.075.log
  3. 112 0
      app/datax/log/2022-08-08/2hive1659949960_json-17_12_40.466.log
  4. 149 0
      app/datax/log/2022-08-08/2hive1659950348_json-17_19_09.002.log
  5. 125 0
      app/datax/log/2022-08-08/2hive1659953224_json-18_07_04.969.log
  6. 149 0
      app/datax/log/2022-08-08/2hive1659953258_json-18_07_38.612.log
  7. 149 0
      app/datax/log/2022-08-08/2hive1659953282_json-18_08_02.318.log
  8. 165 0
      app/datax/log/2022-08-08/mysql1659946080_json-16_08_00.769.log
  9. 139 0
      app/datax/log/2022-08-08/mysql1659946358_json-16_12_38.517.log
  10. 97 0
      app/datax/log/2022-08-08/mysql1659947482_json-16_31_23.163.log
  11. 139 0
      app/datax/log/2022-08-08/mysql1659947523_json-16_32_03.353.log
  12. 0 0
      app/datax/log_perf/2022-08-08/2hive1659925783_json-10_29_44.075.log
  13. 0 0
      app/datax/log_perf/2022-08-08/2hive1659949960_json-17_12_40.466.log
  14. 0 0
      app/datax/log_perf/2022-08-08/2hive1659950348_json-17_19_09.002.log
  15. 0 0
      app/datax/log_perf/2022-08-08/2hive1659953224_json-18_07_04.969.log
  16. 0 0
      app/datax/log_perf/2022-08-08/2hive1659953258_json-18_07_38.612.log
  17. 0 0
      app/datax/log_perf/2022-08-08/2hive1659953282_json-18_08_02.318.log
  18. 0 0
      app/datax/log_perf/2022-08-08/mysql1659946080_json-16_08_00.769.log
  19. 0 0
      app/datax/log_perf/2022-08-08/mysql1659946358_json-16_12_38.517.log
  20. 0 0
      app/datax/log_perf/2022-08-08/mysql1659947482_json-16_31_23.163.log
  21. 0 0
      app/datax/log_perf/2022-08-08/mysql1659947523_json-16_32_03.353.log
  22. 118 0
      app/run.py

+ 43 - 0
app/datax/job/mysql2hive.json.template

@@ -0,0 +1,43 @@
+{
+    "job": {
+        "content": [
+            {
+                "reader": {
+                    "name": "mysqlreader", 
+                    "parameter": {
+                        "connection": [
+                            {
+                                "jdbcUrl": ["${jdbcReader}"],
+                                "querySql": ["${querySql}"]
+                            }
+                        ], 
+                        "password": "${passwordReader}",
+                        "username": "${usernameReader}"
+                    }
+                }, 
+                   "writer": {
+                    "name": "hdfswriter",
+                    "parameter": {
+                        "column": "${columns}",
+                        "compress": "",
+                        "defaultFS": "${WriterdefaultFS}",
+                        "fieldDelimiter": "${WriterfieldDelimiter}",
+                        "fileName": "${WriterfileName}",
+                        "fileType": "${WriterfileType}",
+                        "path": "${Writerpath}",
+                        "writeMode": "${WriteMode}"
+                    }
+                }
+            }
+        ],
+        "setting": {
+            "speed": {
+                "channel": "${channel}"
+            }
+        }
+    }
+}
+
+
+
+

+ 153 - 0
app/datax/log/2022-08-08/2hive1659925783_json-10_29_44.075.log

@@ -0,0 +1,153 @@
+2022-08-08 10:29:44.242 [main] INFO  VMInfo - VMInfo# operatingSystem class => com.sun.management.internal.OperatingSystemImpl
+2022-08-08 10:29:44.246 [main] INFO  Engine - the machine info  => 
+
+	osInfo:	Ubuntu 11 11.0.16+8-post-Ubuntu-0ubuntu120.04
+	jvmInfo:	Linux amd64 5.13.0-52-generic
+	cpu num:	8
+
+	totalPhysicalMemory:	-0.00G
+	freePhysicalMemory:	-0.00G
+	maxFileDescriptorCount:	-1
+	currentOpenFileDescriptorCount:	-1
+
+	GC Names	[G1 Young Generation, G1 Old Generation]
+
+	MEMORY_NAME                    | allocation_size                | init_size                      
+	CodeHeap 'profiled nmethods'   | 117.21MB                       | 2.44MB                         
+	G1 Old Gen                     | 1,024.00MB                     | 970.00MB                       
+	G1 Survivor Space              | -0.00MB                        | 0.00MB                         
+	CodeHeap 'non-profiled nmethods' | 117.22MB                       | 2.44MB                         
+	Compressed Class Space         | 1,024.00MB                     | 0.00MB                         
+	Metaspace                      | -0.00MB                        | 0.00MB                         
+	G1 Eden Space                  | -0.00MB                        | 54.00MB                        
+	CodeHeap 'non-nmethods'        | 5.57MB                         | 2.44MB                         
+
+
+2022-08-08 10:29:44.258 [main] INFO  Engine - 
+{
+	"content":[
+		{
+			"reader":{
+				"name":"hdfsreader",
+				"parameter":{
+					"column":[
+						"*"
+					],
+					"defaultFS":"hdfs://hadoop03:8020/",
+					"encoding":"UTF-8",
+					"fieldDelimiter":"\t",
+					"fileType":"text",
+					"path":"/user/hive/warehouse/user_info/user_info_data.txt"
+				}
+			},
+			"writer":{
+				"name":"hdfswriter",
+				"parameter":{
+					"column":[
+						{
+							"name":"user_id",
+							"type":"string"
+						},
+						{
+							"name":"area_id",
+							"type":"string"
+						},
+						{
+							"name":"age",
+							"type":"int"
+						},
+						{
+							"name":"occupation",
+							"type":"string"
+						}
+					],
+					"compress":"",
+					"defaultFS":"hdfs://hadoop03:8020/",
+					"fieldDelimiter":"\t",
+					"fileName":"user_info_data_2.txt",
+					"fileType":"text",
+					"path":"/user/hive/warehouse/user_info/",
+					"writeMode":"append"
+				}
+			}
+		}
+	],
+	"setting":{
+		"speed":{
+			"channel":"2"
+		}
+	}
+}
+
+2022-08-08 10:29:44.268 [main] WARN  Engine - prioriy set to 0, because NumberFormatException, the value is: null
+2022-08-08 10:29:44.269 [main] INFO  PerfTrace - PerfTrace traceId=job_-1, isEnable=false, priority=0
+2022-08-08 10:29:44.269 [main] INFO  JobContainer - DataX jobContainer starts job.
+2022-08-08 10:29:44.270 [main] INFO  JobContainer - Set jobId = 0
+2022-08-08 10:29:44.282 [job-0] INFO  HdfsReader$Job - init() begin...
+2022-08-08 10:29:44.475 [job-0] INFO  HdfsReader$Job - hadoopConfig details:{"finalParameters":[]}
+2022-08-08 10:29:44.475 [job-0] INFO  HdfsReader$Job - init() ok and end...
+2022-08-08 10:29:45.020 [job-0] INFO  JobContainer - jobContainer starts to do prepare ...
+2022-08-08 10:29:45.020 [job-0] INFO  JobContainer - DataX Reader.Job [hdfsreader] do prepare work .
+2022-08-08 10:29:45.020 [job-0] INFO  HdfsReader$Job - prepare(), start to getAllFiles...
+2022-08-08 10:29:45.020 [job-0] INFO  HdfsReader$Job - get HDFS all files in path = [/user/hive/warehouse/user_info/user_info_data.txt]
+2022-08-08 10:29:45.400 [job-0] INFO  HdfsReader$Job - [hdfs://hadoop03:8020/user/hive/warehouse/user_info/user_info_data.txt]是[text]类型的文件, 将该文件加入source files列表
+2022-08-08 10:29:45.401 [job-0] INFO  HdfsReader$Job - 您即将读取的文件数为: [1], 列表为: [hdfs://hadoop03:8020/user/hive/warehouse/user_info/user_info_data.txt]
+2022-08-08 10:29:45.401 [job-0] INFO  JobContainer - DataX Writer.Job [hdfswriter] do prepare work .
+2022-08-08 10:29:45.439 [job-0] INFO  HdfsWriter$Job - 由于您配置了writeMode append, 写入前不做清理工作, [/user/hive/warehouse/user_info/] 目录下写入相应文件名前缀  [user_info_data_2.txt] 的文件
+2022-08-08 10:29:45.439 [job-0] INFO  JobContainer - jobContainer starts to do split ...
+2022-08-08 10:29:45.440 [job-0] INFO  JobContainer - Job set Channel-Number to 2 channels.
+2022-08-08 10:29:45.440 [job-0] INFO  HdfsReader$Job - split() begin...
+2022-08-08 10:29:45.440 [job-0] INFO  JobContainer - DataX Reader.Job [hdfsreader] splits to [1] tasks.
+2022-08-08 10:29:45.440 [job-0] INFO  HdfsWriter$Job - begin do split...
+2022-08-08 10:29:45.444 [job-0] INFO  HdfsWriter$Job - splited write file name:[hdfs://hadoop03:8020//user/hive/warehouse/user_info__21f6a4ce_7abf_4682_b9e8_5ba0b537dc5f/user_info_data_2.txt__19cb7817_de74_48ef_b71c_98aab72c3932]
+2022-08-08 10:29:45.444 [job-0] INFO  HdfsWriter$Job - end do split.
+2022-08-08 10:29:45.444 [job-0] INFO  JobContainer - DataX Writer.Job [hdfswriter] splits to [1] tasks.
+2022-08-08 10:29:45.449 [job-0] INFO  JobContainer - jobContainer starts to do schedule ...
+2022-08-08 10:29:45.452 [job-0] INFO  JobContainer - Scheduler starts [1] taskGroups.
+2022-08-08 10:29:45.453 [job-0] INFO  JobContainer - Running by standalone Mode.
+2022-08-08 10:29:45.456 [taskGroup-0] INFO  TaskGroupContainer - taskGroupId=[0] start [1] channels for [1] tasks.
+2022-08-08 10:29:45.461 [taskGroup-0] INFO  Channel - Channel set byte_speed_limit to -1, No bps activated.
+2022-08-08 10:29:45.461 [taskGroup-0] INFO  Channel - Channel set record_speed_limit to -1, No tps activated.
+2022-08-08 10:29:45.467 [taskGroup-0] INFO  TaskGroupContainer - taskGroup[0] taskId[0] attemptCount[1] is started
+2022-08-08 10:29:45.486 [0-0-0-reader] INFO  HdfsReader$Job - hadoopConfig details:{"finalParameters":["mapreduce.job.end-notification.max.retry.interval","mapreduce.job.end-notification.max.attempts"]}
+2022-08-08 10:29:45.487 [0-0-0-reader] INFO  Reader$Task - read start
+2022-08-08 10:29:45.487 [0-0-0-writer] INFO  HdfsWriter$Task - begin do write...
+2022-08-08 10:29:45.487 [0-0-0-reader] INFO  Reader$Task - reading file : [hdfs://hadoop03:8020/user/hive/warehouse/user_info/user_info_data.txt]
+2022-08-08 10:29:45.487 [0-0-0-writer] INFO  HdfsWriter$Task - write to file : [hdfs://hadoop03:8020//user/hive/warehouse/user_info__21f6a4ce_7abf_4682_b9e8_5ba0b537dc5f/user_info_data_2.txt__19cb7817_de74_48ef_b71c_98aab72c3932]
+2022-08-08 10:29:45.497 [0-0-0-reader] INFO  UnstructuredStorageReaderUtil - CsvReader使用默认值[{"captureRawRecord":true,"columnCount":0,"comment":"#","currentRecord":-1,"delimiter":"\t","escapeMode":1,"headerCount":0,"rawRecord":"","recordDelimiter":"\u0000","safetySwitch":false,"skipEmptyRecords":true,"textQualifier":"\"","trimWhitespace":true,"useComments":false,"useTextQualifier":true,"values":[]}],csvReaderConfig值为[null]
+2022-08-08 10:29:45.502 [0-0-0-reader] INFO  Reader$Task - end read source files...
+2022-08-08 10:29:45.608 [0-0-0-writer] INFO  HdfsWriter$Task - end do write
+2022-08-08 10:29:45.667 [taskGroup-0] INFO  TaskGroupContainer - taskGroup[0] taskId[0] is successed, used[201]ms
+2022-08-08 10:29:45.668 [taskGroup-0] INFO  TaskGroupContainer - taskGroup[0] completed it's tasks.
+2022-08-08 10:29:55.463 [job-0] INFO  StandAloneJobContainerCommunicator - Total 4 records, 79 bytes | Speed 7B/s, 0 records/s | Error 0 records, 0 bytes |  All Task WaitWriterTime 0.000s |  All Task WaitReaderTime 0.000s | Percentage 100.00%
+2022-08-08 10:29:55.463 [job-0] INFO  AbstractScheduler - Scheduler accomplished all tasks.
+2022-08-08 10:29:55.463 [job-0] INFO  JobContainer - DataX Writer.Job [hdfswriter] do post work.
+2022-08-08 10:29:55.463 [job-0] INFO  HdfsWriter$Job - start rename file [hdfs://hadoop03:8020//user/hive/warehouse/user_info__21f6a4ce_7abf_4682_b9e8_5ba0b537dc5f/user_info_data_2.txt__19cb7817_de74_48ef_b71c_98aab72c3932] to file [hdfs://hadoop03:8020//user/hive/warehouse/user_info/user_info_data_2.txt__19cb7817_de74_48ef_b71c_98aab72c3932].
+2022-08-08 10:29:55.470 [job-0] INFO  HdfsWriter$Job - finish rename file [hdfs://hadoop03:8020//user/hive/warehouse/user_info__21f6a4ce_7abf_4682_b9e8_5ba0b537dc5f/user_info_data_2.txt__19cb7817_de74_48ef_b71c_98aab72c3932] to file [hdfs://hadoop03:8020//user/hive/warehouse/user_info/user_info_data_2.txt__19cb7817_de74_48ef_b71c_98aab72c3932].
+2022-08-08 10:29:55.471 [job-0] INFO  HdfsWriter$Job - start delete tmp dir [hdfs://hadoop03:8020/user/hive/warehouse/user_info__21f6a4ce_7abf_4682_b9e8_5ba0b537dc5f] .
+2022-08-08 10:29:55.477 [job-0] INFO  HdfsWriter$Job - finish delete tmp dir [hdfs://hadoop03:8020/user/hive/warehouse/user_info__21f6a4ce_7abf_4682_b9e8_5ba0b537dc5f] .
+2022-08-08 10:29:55.477 [job-0] INFO  JobContainer - DataX Reader.Job [hdfsreader] do post work.
+2022-08-08 10:29:55.477 [job-0] INFO  JobContainer - DataX jobId [0] completed successfully.
+2022-08-08 10:29:55.478 [job-0] INFO  HookInvoker - No hook invoked, because base dir not exists or is a file: /home/zhouze/PycharmProjects/yili/yili-portal/app/datax/hook
+2022-08-08 10:29:55.586 [job-0] INFO  JobContainer - 
+	 [total cpu info] => 
+		averageCpu                     | maxDeltaCpu                    | minDeltaCpu                    
+		-1.00%                         | -1.00%                         | -1.00%
+                        
+
+	 [total gc info] => 
+		 NAME                 | totalGCCount       | maxDeltaGCCount    | minDeltaGCCount    | totalGCTime        | maxDeltaGCTime     | minDeltaGCTime     
+		 G1 Young Generation  | 5                  | 5                  | 5                  | 0.036s             | 0.036s             | 0.036s             
+		 G1 Old Generation    | 0                  | 0                  | 0                  | 0.000s             | 0.000s             | 0.000s             
+
+2022-08-08 10:29:55.586 [job-0] INFO  JobContainer - PerfTrace not enable!
+2022-08-08 10:29:55.587 [job-0] INFO  StandAloneJobContainerCommunicator - Total 4 records, 79 bytes | Speed 7B/s, 0 records/s | Error 0 records, 0 bytes |  All Task WaitWriterTime 0.000s |  All Task WaitReaderTime 0.000s | Percentage 100.00%
+2022-08-08 10:29:55.590 [job-0] INFO  JobContainer - 
+任务启动时刻                    : 2022-08-08 10:29:44
+任务结束时刻                    : 2022-08-08 10:29:55
+任务总计耗时                    :                 11s
+任务平均流量                    :                7B/s
+记录写入速度                    :              0rec/s
+读出记录总数                    :                   4
+读写失败总数                    :                   0
+

+ 112 - 0
app/datax/log/2022-08-08/2hive1659949960_json-17_12_40.466.log

@@ -0,0 +1,112 @@
+2022-08-08 17:12:40.638 [main] INFO  VMInfo - VMInfo# operatingSystem class => com.sun.management.internal.OperatingSystemImpl
+2022-08-08 17:12:40.644 [main] INFO  Engine - the machine info  => 
+
+	osInfo:	Ubuntu 11 11.0.16+8-post-Ubuntu-0ubuntu120.04
+	jvmInfo:	Linux amd64 5.13.0-52-generic
+	cpu num:	8
+
+	totalPhysicalMemory:	-0.00G
+	freePhysicalMemory:	-0.00G
+	maxFileDescriptorCount:	-1
+	currentOpenFileDescriptorCount:	-1
+
+	GC Names	[G1 Young Generation, G1 Old Generation]
+
+	MEMORY_NAME                    | allocation_size                | init_size                      
+	CodeHeap 'profiled nmethods'   | 117.21MB                       | 2.44MB                         
+	G1 Old Gen                     | 1,024.00MB                     | 970.00MB                       
+	G1 Survivor Space              | -0.00MB                        | 0.00MB                         
+	CodeHeap 'non-profiled nmethods' | 117.22MB                       | 2.44MB                         
+	Compressed Class Space         | 1,024.00MB                     | 0.00MB                         
+	Metaspace                      | -0.00MB                        | 0.00MB                         
+	G1 Eden Space                  | -0.00MB                        | 54.00MB                        
+	CodeHeap 'non-nmethods'        | 5.57MB                         | 2.44MB                         
+
+
+2022-08-08 17:12:40.655 [main] INFO  Engine - 
+{
+	"content":[
+		{
+			"reader":{
+				"name":"hdfsreader",
+				"parameter":{
+					"column":[
+						"*"
+					],
+					"defaultFS":"${ReaderdefaultFS}",
+					"encoding":"UTF-8",
+					"fieldDelimiter":"${ReaderfieldDelimiter}",
+					"fileType":"${ReaderfileType}",
+					"path":"${ReaderPath}"
+				}
+			},
+			"writer":{
+				"name":"hdfswriter",
+				"parameter":{
+					"column":[
+						{
+							"name":"user_id",
+							"type":"string"
+						},
+						{
+							"name":"area_id",
+							"type":"string"
+						},
+						{
+							"name":"age",
+							"type":"int"
+						},
+						{
+							"name":"occupation",
+							"type":"string"
+						}
+					],
+					"compress":"",
+					"defaultFS":"hdfs://hadoop03:8020/",
+					"fieldDelimiter":"\t",
+					"fileName":"user_info_data_3.txt",
+					"fileType":"text",
+					"path":"/user/hive/warehouse/user_info/",
+					"writeMode":"append"
+				}
+			}
+		}
+	],
+	"setting":{
+		"speed":{
+			"channel":"2"
+		}
+	}
+}
+
+2022-08-08 17:12:40.664 [main] WARN  Engine - prioriy set to 0, because NumberFormatException, the value is: null
+2022-08-08 17:12:40.665 [main] INFO  PerfTrace - PerfTrace traceId=job_-1, isEnable=false, priority=0
+2022-08-08 17:12:40.665 [main] INFO  JobContainer - DataX jobContainer starts job.
+2022-08-08 17:12:40.666 [main] INFO  JobContainer - Set jobId = 0
+2022-08-08 17:12:40.687 [job-0] INFO  HdfsReader$Job - init() begin...
+2022-08-08 17:12:40.690 [job-0] ERROR JobContainer - Exception when job run
+com.alibaba.datax.common.exception.DataXException: Code:[HdfsReader-11], Description:[文件类型配置错误].  - HdfsReader插件目前支持ORC, TEXT, CSV, SEQUENCE, RC五种格式的文件,请将fileType选项的值配置为ORC, TEXT, CSV, SEQUENCE 或者 RC
+	at com.alibaba.datax.common.exception.DataXException.asDataXException(DataXException.java:26) ~[datax-common-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.reader.hdfsreader.HdfsReader$Job.validate(HdfsReader.java:87) ~[hdfsreader-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.reader.hdfsreader.HdfsReader$Job.init(HdfsReader.java:50) ~[hdfsreader-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.core.job.JobContainer.initJobReader(JobContainer.java:673) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.core.job.JobContainer.init(JobContainer.java:303) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:113) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.core.Engine.start(Engine.java:92) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.core.Engine.entry(Engine.java:171) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.core.Engine.main(Engine.java:204) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
+2022-08-08 17:12:40.693 [job-0] INFO  StandAloneJobContainerCommunicator - Total 0 records, 0 bytes | Speed 0B/s, 0 records/s | Error 0 records, 0 bytes |  All Task WaitWriterTime 0.000s |  All Task WaitReaderTime 0.000s | Percentage 0.00%
+2022-08-08 17:12:40.695 [job-0] ERROR Engine - 
+
+经DataX智能分析,该任务最可能的错误原因是:
+com.alibaba.datax.common.exception.DataXException: Code:[HdfsReader-11], Description:[文件类型配置错误].  - HdfsReader插件目前支持ORC, TEXT, CSV, SEQUENCE, RC五种格式的文件,请将fileType选项的值配置为ORC, TEXT, CSV, SEQUENCE 或者 RC
+	at com.alibaba.datax.common.exception.DataXException.asDataXException(DataXException.java:26)
+	at com.alibaba.datax.plugin.reader.hdfsreader.HdfsReader$Job.validate(HdfsReader.java:87)
+	at com.alibaba.datax.plugin.reader.hdfsreader.HdfsReader$Job.init(HdfsReader.java:50)
+	at com.alibaba.datax.core.job.JobContainer.initJobReader(JobContainer.java:673)
+	at com.alibaba.datax.core.job.JobContainer.init(JobContainer.java:303)
+	at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:113)
+	at com.alibaba.datax.core.Engine.start(Engine.java:92)
+	at com.alibaba.datax.core.Engine.entry(Engine.java:171)
+	at com.alibaba.datax.core.Engine.main(Engine.java:204)
+

+ 149 - 0
app/datax/log/2022-08-08/2hive1659950348_json-17_19_09.002.log

@@ -0,0 +1,149 @@
+2022-08-08 17:19:09.160 [main] INFO  VMInfo - VMInfo# operatingSystem class => com.sun.management.internal.OperatingSystemImpl
+2022-08-08 17:19:09.164 [main] INFO  Engine - the machine info  => 
+
+	osInfo:	Ubuntu 11 11.0.16+8-post-Ubuntu-0ubuntu120.04
+	jvmInfo:	Linux amd64 5.13.0-52-generic
+	cpu num:	8
+
+	totalPhysicalMemory:	-0.00G
+	freePhysicalMemory:	-0.00G
+	maxFileDescriptorCount:	-1
+	currentOpenFileDescriptorCount:	-1
+
+	GC Names	[G1 Young Generation, G1 Old Generation]
+
+	MEMORY_NAME                    | allocation_size                | init_size                      
+	CodeHeap 'profiled nmethods'   | 117.21MB                       | 2.44MB                         
+	G1 Old Gen                     | 1,024.00MB                     | 970.00MB                       
+	G1 Survivor Space              | -0.00MB                        | 0.00MB                         
+	CodeHeap 'non-profiled nmethods' | 117.22MB                       | 2.44MB                         
+	Compressed Class Space         | 1,024.00MB                     | 0.00MB                         
+	Metaspace                      | -0.00MB                        | 0.00MB                         
+	G1 Eden Space                  | -0.00MB                        | 54.00MB                        
+	CodeHeap 'non-nmethods'        | 5.57MB                         | 2.44MB                         
+
+
+2022-08-08 17:19:09.175 [main] INFO  Engine - 
+{
+	"content":[
+		{
+			"reader":{
+				"name":"mysqlreader",
+				"parameter":{
+					"connection":[
+						{
+							"jdbcUrl":[
+								"jdbc:mysql://192.168.2.2:3306/gene"
+							],
+							"querySql":[
+								"select * from gene.user_info_transfer"
+							]
+						}
+					],
+					"password":"*********",
+					"username":"root"
+				}
+			},
+			"writer":{
+				"name":"hdfswriter",
+				"parameter":{
+					"column":[
+						{
+							"name":"user_id",
+							"type":"string"
+						},
+						{
+							"name":"area_id",
+							"type":"string"
+						},
+						{
+							"name":"age",
+							"type":"int"
+						},
+						{
+							"name":"occupation",
+							"type":"string"
+						}
+					],
+					"compress":"",
+					"defaultFS":"hdfs://hadoop03:8020/",
+					"fieldDelimiter":"\t",
+					"fileName":"user_info_data_3.txt",
+					"fileType":"text",
+					"path":"/user/hive/warehouse/user_info/",
+					"writeMode":"append"
+				}
+			}
+		}
+	],
+	"setting":{
+		"speed":{
+			"channel":"2"
+		}
+	}
+}
+
+2022-08-08 17:19:09.183 [main] WARN  Engine - prioriy set to 0, because NumberFormatException, the value is: null
+2022-08-08 17:19:09.184 [main] INFO  PerfTrace - PerfTrace traceId=job_-1, isEnable=false, priority=0
+2022-08-08 17:19:09.184 [main] INFO  JobContainer - DataX jobContainer starts job.
+2022-08-08 17:19:09.185 [main] INFO  JobContainer - Set jobId = 0
+2022-08-08 17:19:09.371 [job-0] INFO  OriginalConfPretreatmentUtil - Available jdbcUrl:jdbc:mysql://192.168.2.2:3306/gene?yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true.
+2022-08-08 17:19:09.899 [job-0] INFO  JobContainer - jobContainer starts to do prepare ...
+2022-08-08 17:19:09.899 [job-0] INFO  JobContainer - DataX Reader.Job [mysqlreader] do prepare work .
+2022-08-08 17:19:09.900 [job-0] INFO  JobContainer - DataX Writer.Job [hdfswriter] do prepare work .
+2022-08-08 17:19:09.960 [job-0] INFO  HdfsWriter$Job - 由于您配置了writeMode append, 写入前不做清理工作, [/user/hive/warehouse/user_info/] 目录下写入相应文件名前缀  [user_info_data_3.txt] 的文件
+2022-08-08 17:19:09.960 [job-0] INFO  JobContainer - jobContainer starts to do split ...
+2022-08-08 17:19:09.960 [job-0] INFO  JobContainer - Job set Channel-Number to 2 channels.
+2022-08-08 17:19:09.961 [job-0] INFO  JobContainer - DataX Reader.Job [mysqlreader] splits to [1] tasks.
+2022-08-08 17:19:09.962 [job-0] INFO  HdfsWriter$Job - begin do split...
+2022-08-08 17:19:09.965 [job-0] INFO  HdfsWriter$Job - splited write file name:[hdfs://hadoop03:8020//user/hive/warehouse/user_info__d08e04a5_91f2_4610_ad2f_fec59a7bc14d/user_info_data_3.txt__b7ee6106_5144_4078_be54_6aed202d6de3]
+2022-08-08 17:19:09.965 [job-0] INFO  HdfsWriter$Job - end do split.
+2022-08-08 17:19:09.965 [job-0] INFO  JobContainer - DataX Writer.Job [hdfswriter] splits to [1] tasks.
+2022-08-08 17:19:09.973 [job-0] INFO  JobContainer - jobContainer starts to do schedule ...
+2022-08-08 17:19:09.975 [job-0] INFO  JobContainer - Scheduler starts [1] taskGroups.
+2022-08-08 17:19:09.976 [job-0] INFO  JobContainer - Running by standalone Mode.
+2022-08-08 17:19:09.979 [taskGroup-0] INFO  TaskGroupContainer - taskGroupId=[0] start [1] channels for [1] tasks.
+2022-08-08 17:19:09.983 [taskGroup-0] INFO  Channel - Channel set byte_speed_limit to -1, No bps activated.
+2022-08-08 17:19:09.983 [taskGroup-0] INFO  Channel - Channel set record_speed_limit to -1, No tps activated.
+2022-08-08 17:19:09.989 [taskGroup-0] INFO  TaskGroupContainer - taskGroup[0] taskId[0] attemptCount[1] is started
+2022-08-08 17:19:09.991 [0-0-0-reader] INFO  CommonRdbmsReader$Task - Begin to read record by Sql: [select * from gene.user_info_transfer
+] jdbcUrl:[jdbc:mysql://192.168.2.2:3306/gene?yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true].
+2022-08-08 17:19:10.003 [0-0-0-writer] INFO  HdfsWriter$Task - begin do write...
+2022-08-08 17:19:10.003 [0-0-0-writer] INFO  HdfsWriter$Task - write to file : [hdfs://hadoop03:8020//user/hive/warehouse/user_info__d08e04a5_91f2_4610_ad2f_fec59a7bc14d/user_info_data_3.txt__b7ee6106_5144_4078_be54_6aed202d6de3]
+2022-08-08 17:19:10.006 [0-0-0-reader] INFO  CommonRdbmsReader$Task - Finished read record by Sql: [select * from gene.user_info_transfer
+] jdbcUrl:[jdbc:mysql://192.168.2.2:3306/gene?yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true].
+2022-08-08 17:19:10.139 [0-0-0-writer] INFO  HdfsWriter$Task - end do write
+2022-08-08 17:19:10.189 [taskGroup-0] INFO  TaskGroupContainer - taskGroup[0] taskId[0] is successed, used[201]ms
+2022-08-08 17:19:10.190 [taskGroup-0] INFO  TaskGroupContainer - taskGroup[0] completed it's tasks.
+2022-08-08 17:19:19.989 [job-0] INFO  StandAloneJobContainerCommunicator - Total 8 records, 158 bytes | Speed 15B/s, 0 records/s | Error 0 records, 0 bytes |  All Task WaitWriterTime 0.000s |  All Task WaitReaderTime 0.000s | Percentage 100.00%
+2022-08-08 17:19:19.990 [job-0] INFO  AbstractScheduler - Scheduler accomplished all tasks.
+2022-08-08 17:19:19.991 [job-0] INFO  JobContainer - DataX Writer.Job [hdfswriter] do post work.
+2022-08-08 17:19:19.992 [job-0] INFO  HdfsWriter$Job - start rename file [hdfs://hadoop03:8020//user/hive/warehouse/user_info__d08e04a5_91f2_4610_ad2f_fec59a7bc14d/user_info_data_3.txt__b7ee6106_5144_4078_be54_6aed202d6de3] to file [hdfs://hadoop03:8020//user/hive/warehouse/user_info/user_info_data_3.txt__b7ee6106_5144_4078_be54_6aed202d6de3].
+2022-08-08 17:19:20.006 [job-0] INFO  HdfsWriter$Job - finish rename file [hdfs://hadoop03:8020//user/hive/warehouse/user_info__d08e04a5_91f2_4610_ad2f_fec59a7bc14d/user_info_data_3.txt__b7ee6106_5144_4078_be54_6aed202d6de3] to file [hdfs://hadoop03:8020//user/hive/warehouse/user_info/user_info_data_3.txt__b7ee6106_5144_4078_be54_6aed202d6de3].
+2022-08-08 17:19:20.006 [job-0] INFO  HdfsWriter$Job - start delete tmp dir [hdfs://hadoop03:8020/user/hive/warehouse/user_info__d08e04a5_91f2_4610_ad2f_fec59a7bc14d] .
+2022-08-08 17:19:20.018 [job-0] INFO  HdfsWriter$Job - finish delete tmp dir [hdfs://hadoop03:8020/user/hive/warehouse/user_info__d08e04a5_91f2_4610_ad2f_fec59a7bc14d] .
+2022-08-08 17:19:20.019 [job-0] INFO  JobContainer - DataX Reader.Job [mysqlreader] do post work.
+2022-08-08 17:19:20.019 [job-0] INFO  JobContainer - DataX jobId [0] completed successfully.
+2022-08-08 17:19:20.021 [job-0] INFO  HookInvoker - No hook invoked, because base dir not exists or is a file: /home/zhouze/PycharmProjects/yili/yili-portal/app/datax/hook
+2022-08-08 17:19:20.128 [job-0] INFO  JobContainer - 
+	 [total cpu info] => 
+		averageCpu                     | maxDeltaCpu                    | minDeltaCpu                    
+		-1.00%                         | -1.00%                         | -1.00%
+                        
+
+	 [total gc info] => 
+		 NAME                 | totalGCCount       | maxDeltaGCCount    | minDeltaGCCount    | totalGCTime        | maxDeltaGCTime     | minDeltaGCTime     
+		 G1 Young Generation  | 2                  | 2                  | 2                  | 0.014s             | 0.014s             | 0.014s             
+		 G1 Old Generation    | 0                  | 0                  | 0                  | 0.000s             | 0.000s             | 0.000s             
+
+2022-08-08 17:19:20.129 [job-0] INFO  JobContainer - PerfTrace not enable!
+2022-08-08 17:19:20.130 [job-0] INFO  StandAloneJobContainerCommunicator - Total 8 records, 158 bytes | Speed 15B/s, 0 records/s | Error 0 records, 0 bytes |  All Task WaitWriterTime 0.000s |  All Task WaitReaderTime 0.000s | Percentage 100.00%
+2022-08-08 17:19:20.132 [job-0] INFO  JobContainer - 
+任务启动时刻                    : 2022-08-08 17:19:09
+任务结束时刻                    : 2022-08-08 17:19:20
+任务总计耗时                    :                 10s
+任务平均流量                    :               15B/s
+记录写入速度                    :              0rec/s
+读出记录总数                    :                   8
+读写失败总数                    :                   0
+

+ 125 - 0
app/datax/log/2022-08-08/2hive1659953224_json-18_07_04.969.log

@@ -0,0 +1,125 @@
+2022-08-08 18:07:05.138 [main] INFO  VMInfo - VMInfo# operatingSystem class => com.sun.management.internal.OperatingSystemImpl
+2022-08-08 18:07:05.143 [main] INFO  Engine - the machine info  => 
+
+	osInfo:	Ubuntu 11 11.0.16+8-post-Ubuntu-0ubuntu120.04
+	jvmInfo:	Linux amd64 5.13.0-52-generic
+	cpu num:	8
+
+	totalPhysicalMemory:	-0.00G
+	freePhysicalMemory:	-0.00G
+	maxFileDescriptorCount:	-1
+	currentOpenFileDescriptorCount:	-1
+
+	GC Names	[G1 Young Generation, G1 Old Generation]
+
+	MEMORY_NAME                    | allocation_size                | init_size                      
+	CodeHeap 'profiled nmethods'   | 117.21MB                       | 2.44MB                         
+	G1 Old Gen                     | 1,024.00MB                     | 970.00MB                       
+	G1 Survivor Space              | -0.00MB                        | 0.00MB                         
+	CodeHeap 'non-profiled nmethods' | 117.22MB                       | 2.44MB                         
+	Compressed Class Space         | 1,024.00MB                     | 0.00MB                         
+	Metaspace                      | -0.00MB                        | 0.00MB                         
+	G1 Eden Space                  | -0.00MB                        | 54.00MB                        
+	CodeHeap 'non-nmethods'        | 5.57MB                         | 2.44MB                         
+
+
+2022-08-08 18:07:05.154 [main] INFO  Engine - 
+{
+	"content":[
+		{
+			"reader":{
+				"name":"mysqlreader",
+				"parameter":{
+					"connection":[
+						{
+							"jdbcUrl":[
+								"jdbc:mysql://192.168.2.2:3306/gene"
+							],
+							"querySql":[
+								"select * from gene.test"
+							]
+						}
+					],
+					"password":"*********",
+					"username":"root"
+				}
+			},
+			"writer":{
+				"name":"mysqlwriter",
+				"parameter":{
+					"column":[
+						{
+							"name":"user_id",
+							"type":"string"
+						},
+						{
+							"name":"area_id",
+							"type":"string"
+						},
+						{
+							"name":"age",
+							"type":"int"
+						},
+						{
+							"name":"occupation",
+							"type":"string"
+						}
+					],
+					"connection":[
+						{
+							"jdbcUrl":"${jdbcWriter}",
+							"table":[
+								"${table}"
+							]
+						}
+					],
+					"password":"*****************",
+					"session":[],
+					"username":"${usernameWriter}",
+					"writeMode":"insert"
+				}
+			}
+		}
+	],
+	"setting":{
+		"speed":{
+			"channel":"2"
+		}
+	}
+}
+
+2022-08-08 18:07:05.164 [main] WARN  Engine - prioriy set to 0, because NumberFormatException, the value is: null
+2022-08-08 18:07:05.165 [main] INFO  PerfTrace - PerfTrace traceId=job_-1, isEnable=false, priority=0
+2022-08-08 18:07:05.165 [main] INFO  JobContainer - DataX jobContainer starts job.
+2022-08-08 18:07:05.167 [main] INFO  JobContainer - Set jobId = 0
+2022-08-08 18:07:05.416 [job-0] INFO  OriginalConfPretreatmentUtil - Available jdbcUrl:jdbc:mysql://192.168.2.2:3306/gene?yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true.
+2022-08-08 18:07:05.439 [job-0] ERROR RetryUtil - Exception when calling callable, 异常Msg:Code:[DBUtilErrorCode-10], Description:[连接数据库失败. 请检查您的 账号、密码、数据库名称、IP、Port或者向 DBA 寻求帮助(注意网络环境).].  -  具体错误信息为:java.sql.SQLException: No suitable driver found for ${jdbcWriter}?yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true
+com.alibaba.datax.common.exception.DataXException: Code:[DBUtilErrorCode-10], Description:[连接数据库失败. 请检查您的 账号、密码、数据库名称、IP、Port或者向 DBA 寻求帮助(注意网络环境).].  -  具体错误信息为:java.sql.SQLException: No suitable driver found for ${jdbcWriter}?yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true
+	at com.alibaba.datax.common.exception.DataXException.asDataXException(DataXException.java:26) ~[datax-common-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.rdbms.util.RdbmsException.asConnException(RdbmsException.java:23) ~[plugin-rdbms-util-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.rdbms.util.DBUtil.connect(DBUtil.java:394) ~[plugin-rdbms-util-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.rdbms.util.DBUtil.connect(DBUtil.java:384) ~[plugin-rdbms-util-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.rdbms.util.DBUtil.access$000(DBUtil.java:22) ~[plugin-rdbms-util-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.rdbms.util.DBUtil$3.call(DBUtil.java:322) ~[plugin-rdbms-util-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.rdbms.util.DBUtil$3.call(DBUtil.java:319) ~[plugin-rdbms-util-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.common.util.RetryUtil$Retry.call(RetryUtil.java:164) ~[datax-common-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.common.util.RetryUtil$Retry.doRetry(RetryUtil.java:111) ~[datax-common-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.common.util.RetryUtil.executeWithRetry(RetryUtil.java:30) ~[datax-common-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.rdbms.util.DBUtil.getConnection(DBUtil.java:319) ~[plugin-rdbms-util-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.rdbms.util.DBUtil.getConnection(DBUtil.java:303) ~[plugin-rdbms-util-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.rdbms.util.JdbcConnectionFactory.getConnecttion(JdbcConnectionFactory.java:27) ~[plugin-rdbms-util-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.rdbms.writer.util.OriginalConfPretreatmentUtil.dealColumnConf(OriginalConfPretreatmentUtil.java:105) ~[plugin-rdbms-util-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.rdbms.writer.util.OriginalConfPretreatmentUtil.dealColumnConf(OriginalConfPretreatmentUtil.java:140) ~[plugin-rdbms-util-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.rdbms.writer.util.OriginalConfPretreatmentUtil.doPretreatment(OriginalConfPretreatmentUtil.java:35) ~[plugin-rdbms-util-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.rdbms.writer.CommonRdbmsWriter$Job.init(CommonRdbmsWriter.java:41) ~[plugin-rdbms-util-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.writer.mysqlwriter.MysqlWriter$Job.init(MysqlWriter.java:31) ~[mysqlwriter-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.core.job.JobContainer.initJobWriter(JobContainer.java:704) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.core.job.JobContainer.init(JobContainer.java:304) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:113) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.core.Engine.start(Engine.java:92) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.core.Engine.entry(Engine.java:171) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.core.Engine.main(Engine.java:204) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
+2022-08-08 18:07:06.441 [job-0] ERROR RetryUtil - Exception when calling callable, 即将尝试执行第1次重试.本次重试计划等待[1000]ms,实际等待[1000]ms, 异常Msg:[Code:[DBUtilErrorCode-10], Description:[连接数据库失败. 请检查您的 账号、密码、数据库名称、IP、Port或者向 DBA 寻求帮助(注意网络环境).].  -  具体错误信息为:java.sql.SQLException: No suitable driver found for ${jdbcWriter}?yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true]
+2022-08-08 18:07:08.443 [job-0] ERROR RetryUtil - Exception when calling callable, 即将尝试执行第2次重试.本次重试计划等待[2000]ms,实际等待[2000]ms, 异常Msg:[Code:[DBUtilErrorCode-10], Description:[连接数据库失败. 请检查您的 账号、密码、数据库名称、IP、Port或者向 DBA 寻求帮助(注意网络环境).].  -  具体错误信息为:java.sql.SQLException: No suitable driver found for ${jdbcWriter}?yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true]
+2022-08-08 18:07:12.445 [job-0] ERROR RetryUtil - Exception when calling callable, 即将尝试执行第3次重试.本次重试计划等待[4000]ms,实际等待[4000]ms, 异常Msg:[Code:[DBUtilErrorCode-10], Description:[连接数据库失败. 请检查您的 账号、密码、数据库名称、IP、Port或者向 DBA 寻求帮助(注意网络环境).].  -  具体错误信息为:java.sql.SQLException: No suitable driver found for ${jdbcWriter}?yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true]
+2022-08-08 18:07:20.447 [job-0] ERROR RetryUtil - Exception when calling callable, 即将尝试执行第4次重试.本次重试计划等待[8000]ms,实际等待[8000]ms, 异常Msg:[Code:[DBUtilErrorCode-10], Description:[连接数据库失败. 请检查您的 账号、密码、数据库名称、IP、Port或者向 DBA 寻求帮助(注意网络环境).].  -  具体错误信息为:java.sql.SQLException: No suitable driver found for ${jdbcWriter}?yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true]

+ 149 - 0
app/datax/log/2022-08-08/2hive1659953258_json-18_07_38.612.log

@@ -0,0 +1,149 @@
+2022-08-08 18:07:38.828 [main] INFO  VMInfo - VMInfo# operatingSystem class => com.sun.management.internal.OperatingSystemImpl
+2022-08-08 18:07:38.836 [main] INFO  Engine - the machine info  => 
+
+	osInfo:	Ubuntu 11 11.0.16+8-post-Ubuntu-0ubuntu120.04
+	jvmInfo:	Linux amd64 5.13.0-52-generic
+	cpu num:	8
+
+	totalPhysicalMemory:	-0.00G
+	freePhysicalMemory:	-0.00G
+	maxFileDescriptorCount:	-1
+	currentOpenFileDescriptorCount:	-1
+
+	GC Names	[G1 Young Generation, G1 Old Generation]
+
+	MEMORY_NAME                    | allocation_size                | init_size                      
+	CodeHeap 'profiled nmethods'   | 117.21MB                       | 2.44MB                         
+	G1 Old Gen                     | 1,024.00MB                     | 970.00MB                       
+	G1 Survivor Space              | -0.00MB                        | 0.00MB                         
+	CodeHeap 'non-profiled nmethods' | 117.22MB                       | 2.44MB                         
+	Compressed Class Space         | 1,024.00MB                     | 0.00MB                         
+	Metaspace                      | -0.00MB                        | 0.00MB                         
+	G1 Eden Space                  | -0.00MB                        | 54.00MB                        
+	CodeHeap 'non-nmethods'        | 5.57MB                         | 2.44MB                         
+
+
+2022-08-08 18:07:38.847 [main] INFO  Engine - 
+{
+	"content":[
+		{
+			"reader":{
+				"name":"mysqlreader",
+				"parameter":{
+					"connection":[
+						{
+							"jdbcUrl":[
+								"jdbc:mysql://192.168.2.2:3306/gene"
+							],
+							"querySql":[
+								"select * from gene.test"
+							]
+						}
+					],
+					"password":"*********",
+					"username":"root"
+				}
+			},
+			"writer":{
+				"name":"hdfswriter",
+				"parameter":{
+					"column":[
+						{
+							"name":"user_id",
+							"type":"string"
+						},
+						{
+							"name":"area_id",
+							"type":"string"
+						},
+						{
+							"name":"age",
+							"type":"int"
+						},
+						{
+							"name":"occupation",
+							"type":"string"
+						}
+					],
+					"compress":"",
+					"defaultFS":"hdfs://hadoop03:8020/",
+					"fieldDelimiter":"\t",
+					"fileName":"user_info_data_4.txt",
+					"fileType":"text",
+					"path":"/user/hive/warehouse/user_info/",
+					"writeMode":"append"
+				}
+			}
+		}
+	],
+	"setting":{
+		"speed":{
+			"channel":"2"
+		}
+	}
+}
+
+2022-08-08 18:07:38.856 [main] WARN  Engine - prioriy set to 0, because NumberFormatException, the value is: null
+2022-08-08 18:07:38.857 [main] INFO  PerfTrace - PerfTrace traceId=job_-1, isEnable=false, priority=0
+2022-08-08 18:07:38.858 [main] INFO  JobContainer - DataX jobContainer starts job.
+2022-08-08 18:07:38.859 [main] INFO  JobContainer - Set jobId = 0
+2022-08-08 18:07:39.062 [job-0] INFO  OriginalConfPretreatmentUtil - Available jdbcUrl:jdbc:mysql://192.168.2.2:3306/gene?yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true.
+2022-08-08 18:07:39.701 [job-0] INFO  JobContainer - jobContainer starts to do prepare ...
+2022-08-08 18:07:39.701 [job-0] INFO  JobContainer - DataX Reader.Job [mysqlreader] do prepare work .
+2022-08-08 18:07:39.701 [job-0] INFO  JobContainer - DataX Writer.Job [hdfswriter] do prepare work .
+2022-08-08 18:07:39.767 [job-0] INFO  HdfsWriter$Job - 由于您配置了writeMode append, 写入前不做清理工作, [/user/hive/warehouse/user_info/] 目录下写入相应文件名前缀  [user_info_data_4.txt] 的文件
+2022-08-08 18:07:39.767 [job-0] INFO  JobContainer - jobContainer starts to do split ...
+2022-08-08 18:07:39.767 [job-0] INFO  JobContainer - Job set Channel-Number to 2 channels.
+2022-08-08 18:07:39.768 [job-0] INFO  JobContainer - DataX Reader.Job [mysqlreader] splits to [1] tasks.
+2022-08-08 18:07:39.769 [job-0] INFO  HdfsWriter$Job - begin do split...
+2022-08-08 18:07:39.772 [job-0] INFO  HdfsWriter$Job - splited write file name:[hdfs://hadoop03:8020//user/hive/warehouse/user_info__c24e31a1_36fc_4584_be8d_033ad6f586c0/user_info_data_4.txt__eddd8ab0_6883_49a1_8bbe_4e09d6d9fe0c]
+2022-08-08 18:07:39.772 [job-0] INFO  HdfsWriter$Job - end do split.
+2022-08-08 18:07:39.772 [job-0] INFO  JobContainer - DataX Writer.Job [hdfswriter] splits to [1] tasks.
+2022-08-08 18:07:39.781 [job-0] INFO  JobContainer - jobContainer starts to do schedule ...
+2022-08-08 18:07:39.783 [job-0] INFO  JobContainer - Scheduler starts [1] taskGroups.
+2022-08-08 18:07:39.784 [job-0] INFO  JobContainer - Running by standalone Mode.
+2022-08-08 18:07:39.787 [taskGroup-0] INFO  TaskGroupContainer - taskGroupId=[0] start [1] channels for [1] tasks.
+2022-08-08 18:07:39.792 [taskGroup-0] INFO  Channel - Channel set byte_speed_limit to -1, No bps activated.
+2022-08-08 18:07:39.792 [taskGroup-0] INFO  Channel - Channel set record_speed_limit to -1, No tps activated.
+2022-08-08 18:07:39.798 [taskGroup-0] INFO  TaskGroupContainer - taskGroup[0] taskId[0] attemptCount[1] is started
+2022-08-08 18:07:39.800 [0-0-0-reader] INFO  CommonRdbmsReader$Task - Begin to read record by Sql: [select * from gene.test
+] jdbcUrl:[jdbc:mysql://192.168.2.2:3306/gene?yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true].
+2022-08-08 18:07:39.815 [0-0-0-writer] INFO  HdfsWriter$Task - begin do write...
+2022-08-08 18:07:39.816 [0-0-0-writer] INFO  HdfsWriter$Task - write to file : [hdfs://hadoop03:8020//user/hive/warehouse/user_info__c24e31a1_36fc_4584_be8d_033ad6f586c0/user_info_data_4.txt__eddd8ab0_6883_49a1_8bbe_4e09d6d9fe0c]
+2022-08-08 18:07:39.818 [0-0-0-reader] INFO  CommonRdbmsReader$Task - Finished read record by Sql: [select * from gene.test
+] jdbcUrl:[jdbc:mysql://192.168.2.2:3306/gene?yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true].
+2022-08-08 18:07:39.952 [0-0-0-writer] INFO  HdfsWriter$Task - end do write
+2022-08-08 18:07:39.998 [taskGroup-0] INFO  TaskGroupContainer - taskGroup[0] taskId[0] is successed, used[201]ms
+2022-08-08 18:07:39.999 [taskGroup-0] INFO  TaskGroupContainer - taskGroup[0] completed it's tasks.
+2022-08-08 18:07:49.797 [job-0] INFO  StandAloneJobContainerCommunicator - Total 3 records, 14 bytes | Speed 1B/s, 0 records/s | Error 0 records, 0 bytes |  All Task WaitWriterTime 0.000s |  All Task WaitReaderTime 0.000s | Percentage 100.00%
+2022-08-08 18:07:49.798 [job-0] INFO  AbstractScheduler - Scheduler accomplished all tasks.
+2022-08-08 18:07:49.799 [job-0] INFO  JobContainer - DataX Writer.Job [hdfswriter] do post work.
+2022-08-08 18:07:49.800 [job-0] INFO  HdfsWriter$Job - start rename file [hdfs://hadoop03:8020//user/hive/warehouse/user_info__c24e31a1_36fc_4584_be8d_033ad6f586c0/user_info_data_4.txt__eddd8ab0_6883_49a1_8bbe_4e09d6d9fe0c] to file [hdfs://hadoop03:8020//user/hive/warehouse/user_info/user_info_data_4.txt__eddd8ab0_6883_49a1_8bbe_4e09d6d9fe0c].
+2022-08-08 18:07:49.813 [job-0] INFO  HdfsWriter$Job - finish rename file [hdfs://hadoop03:8020//user/hive/warehouse/user_info__c24e31a1_36fc_4584_be8d_033ad6f586c0/user_info_data_4.txt__eddd8ab0_6883_49a1_8bbe_4e09d6d9fe0c] to file [hdfs://hadoop03:8020//user/hive/warehouse/user_info/user_info_data_4.txt__eddd8ab0_6883_49a1_8bbe_4e09d6d9fe0c].
+2022-08-08 18:07:49.814 [job-0] INFO  HdfsWriter$Job - start delete tmp dir [hdfs://hadoop03:8020/user/hive/warehouse/user_info__c24e31a1_36fc_4584_be8d_033ad6f586c0] .
+2022-08-08 18:07:49.826 [job-0] INFO  HdfsWriter$Job - finish delete tmp dir [hdfs://hadoop03:8020/user/hive/warehouse/user_info__c24e31a1_36fc_4584_be8d_033ad6f586c0] .
+2022-08-08 18:07:49.827 [job-0] INFO  JobContainer - DataX Reader.Job [mysqlreader] do post work.
+2022-08-08 18:07:49.828 [job-0] INFO  JobContainer - DataX jobId [0] completed successfully.
+2022-08-08 18:07:49.830 [job-0] INFO  HookInvoker - No hook invoked, because base dir not exists or is a file: /home/zhouze/PycharmProjects/yili/yili-portal/app/datax/hook
+2022-08-08 18:07:49.937 [job-0] INFO  JobContainer - 
+	 [total cpu info] => 
+		averageCpu                     | maxDeltaCpu                    | minDeltaCpu                    
+		-1.00%                         | -1.00%                         | -1.00%
+                        
+
+	 [total gc info] => 
+		 NAME                 | totalGCCount       | maxDeltaGCCount    | minDeltaGCCount    | totalGCTime        | maxDeltaGCTime     | minDeltaGCTime     
+		 G1 Young Generation  | 2                  | 2                  | 2                  | 0.031s             | 0.031s             | 0.031s             
+		 G1 Old Generation    | 0                  | 0                  | 0                  | 0.000s             | 0.000s             | 0.000s             
+
+2022-08-08 18:07:49.938 [job-0] INFO  JobContainer - PerfTrace not enable!
+2022-08-08 18:07:49.939 [job-0] INFO  StandAloneJobContainerCommunicator - Total 3 records, 14 bytes | Speed 1B/s, 0 records/s | Error 0 records, 0 bytes |  All Task WaitWriterTime 0.000s |  All Task WaitReaderTime 0.000s | Percentage 100.00%
+2022-08-08 18:07:49.942 [job-0] INFO  JobContainer - 
+任务启动时刻                    : 2022-08-08 18:07:38
+任务结束时刻                    : 2022-08-08 18:07:49
+任务总计耗时                    :                 11s
+任务平均流量                    :                1B/s
+记录写入速度                    :              0rec/s
+读出记录总数                    :                   3
+读写失败总数                    :                   0
+

+ 149 - 0
app/datax/log/2022-08-08/2hive1659953282_json-18_08_02.318.log

@@ -0,0 +1,149 @@
+2022-08-08 18:08:02.508 [main] INFO  VMInfo - VMInfo# operatingSystem class => com.sun.management.internal.OperatingSystemImpl
+2022-08-08 18:08:02.514 [main] INFO  Engine - the machine info  => 
+
+	osInfo:	Ubuntu 11 11.0.16+8-post-Ubuntu-0ubuntu120.04
+	jvmInfo:	Linux amd64 5.13.0-52-generic
+	cpu num:	8
+
+	totalPhysicalMemory:	-0.00G
+	freePhysicalMemory:	-0.00G
+	maxFileDescriptorCount:	-1
+	currentOpenFileDescriptorCount:	-1
+
+	GC Names	[G1 Young Generation, G1 Old Generation]
+
+	MEMORY_NAME                    | allocation_size                | init_size                      
+	CodeHeap 'profiled nmethods'   | 117.21MB                       | 2.44MB                         
+	G1 Old Gen                     | 1,024.00MB                     | 970.00MB                       
+	G1 Survivor Space              | -0.00MB                        | 0.00MB                         
+	CodeHeap 'non-profiled nmethods' | 117.22MB                       | 2.44MB                         
+	Compressed Class Space         | 1,024.00MB                     | 0.00MB                         
+	Metaspace                      | -0.00MB                        | 0.00MB                         
+	G1 Eden Space                  | -0.00MB                        | 54.00MB                        
+	CodeHeap 'non-nmethods'        | 5.57MB                         | 2.44MB                         
+
+
+2022-08-08 18:08:02.524 [main] INFO  Engine - 
+{
+	"content":[
+		{
+			"reader":{
+				"name":"mysqlreader",
+				"parameter":{
+					"connection":[
+						{
+							"jdbcUrl":[
+								"jdbc:mysql://192.168.2.2:3306/gene"
+							],
+							"querySql":[
+								"select * from gene.test"
+							]
+						}
+					],
+					"password":"*********",
+					"username":"root"
+				}
+			},
+			"writer":{
+				"name":"hdfswriter",
+				"parameter":{
+					"column":[
+						{
+							"name":"user_id",
+							"type":"string"
+						},
+						{
+							"name":"area_id",
+							"type":"string"
+						},
+						{
+							"name":"age",
+							"type":"int"
+						},
+						{
+							"name":"occupation",
+							"type":"string"
+						}
+					],
+					"compress":"",
+					"defaultFS":"hdfs://hadoop03:8020/",
+					"fieldDelimiter":"\t",
+					"fileName":"user_info_data_4.txt",
+					"fileType":"text",
+					"path":"/user/hive/warehouse/user_info/",
+					"writeMode":"append"
+				}
+			}
+		}
+	],
+	"setting":{
+		"speed":{
+			"channel":"2"
+		}
+	}
+}
+
+2022-08-08 18:08:02.534 [main] WARN  Engine - prioriy set to 0, because NumberFormatException, the value is: null
+2022-08-08 18:08:02.535 [main] INFO  PerfTrace - PerfTrace traceId=job_-1, isEnable=false, priority=0
+2022-08-08 18:08:02.535 [main] INFO  JobContainer - DataX jobContainer starts job.
+2022-08-08 18:08:02.536 [main] INFO  JobContainer - Set jobId = 0
+2022-08-08 18:08:02.739 [job-0] INFO  OriginalConfPretreatmentUtil - Available jdbcUrl:jdbc:mysql://192.168.2.2:3306/gene?yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true.
+2022-08-08 18:08:03.321 [job-0] INFO  JobContainer - jobContainer starts to do prepare ...
+2022-08-08 18:08:03.321 [job-0] INFO  JobContainer - DataX Reader.Job [mysqlreader] do prepare work .
+2022-08-08 18:08:03.321 [job-0] INFO  JobContainer - DataX Writer.Job [hdfswriter] do prepare work .
+2022-08-08 18:08:03.367 [job-0] INFO  HdfsWriter$Job - 由于您配置了writeMode append, 写入前不做清理工作, [/user/hive/warehouse/user_info/] 目录下写入相应文件名前缀  [user_info_data_4.txt] 的文件
+2022-08-08 18:08:03.368 [job-0] INFO  JobContainer - jobContainer starts to do split ...
+2022-08-08 18:08:03.368 [job-0] INFO  JobContainer - Job set Channel-Number to 2 channels.
+2022-08-08 18:08:03.369 [job-0] INFO  JobContainer - DataX Reader.Job [mysqlreader] splits to [1] tasks.
+2022-08-08 18:08:03.369 [job-0] INFO  HdfsWriter$Job - begin do split...
+2022-08-08 18:08:03.372 [job-0] INFO  HdfsWriter$Job - splited write file name:[hdfs://hadoop03:8020//user/hive/warehouse/user_info__6a13fbbe_487f_4c9f_9243_fdb06295c9ac/user_info_data_4.txt__ca225bcb_69df_4820_8201_6af8a9fda36d]
+2022-08-08 18:08:03.373 [job-0] INFO  HdfsWriter$Job - end do split.
+2022-08-08 18:08:03.373 [job-0] INFO  JobContainer - DataX Writer.Job [hdfswriter] splits to [1] tasks.
+2022-08-08 18:08:03.381 [job-0] INFO  JobContainer - jobContainer starts to do schedule ...
+2022-08-08 18:08:03.383 [job-0] INFO  JobContainer - Scheduler starts [1] taskGroups.
+2022-08-08 18:08:03.384 [job-0] INFO  JobContainer - Running by standalone Mode.
+2022-08-08 18:08:03.387 [taskGroup-0] INFO  TaskGroupContainer - taskGroupId=[0] start [1] channels for [1] tasks.
+2022-08-08 18:08:03.391 [taskGroup-0] INFO  Channel - Channel set byte_speed_limit to -1, No bps activated.
+2022-08-08 18:08:03.391 [taskGroup-0] INFO  Channel - Channel set record_speed_limit to -1, No tps activated.
+2022-08-08 18:08:03.400 [taskGroup-0] INFO  TaskGroupContainer - taskGroup[0] taskId[0] attemptCount[1] is started
+2022-08-08 18:08:03.402 [0-0-0-reader] INFO  CommonRdbmsReader$Task - Begin to read record by Sql: [select * from gene.test
+] jdbcUrl:[jdbc:mysql://192.168.2.2:3306/gene?yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true].
+2022-08-08 18:08:03.417 [0-0-0-writer] INFO  HdfsWriter$Task - begin do write...
+2022-08-08 18:08:03.417 [0-0-0-reader] INFO  CommonRdbmsReader$Task - Finished read record by Sql: [select * from gene.test
+] jdbcUrl:[jdbc:mysql://192.168.2.2:3306/gene?yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true].
+2022-08-08 18:08:03.417 [0-0-0-writer] INFO  HdfsWriter$Task - write to file : [hdfs://hadoop03:8020//user/hive/warehouse/user_info__6a13fbbe_487f_4c9f_9243_fdb06295c9ac/user_info_data_4.txt__ca225bcb_69df_4820_8201_6af8a9fda36d]
+2022-08-08 18:08:03.567 [0-0-0-writer] INFO  HdfsWriter$Task - end do write
+2022-08-08 18:08:03.601 [taskGroup-0] INFO  TaskGroupContainer - taskGroup[0] taskId[0] is successed, used[202]ms
+2022-08-08 18:08:03.601 [taskGroup-0] INFO  TaskGroupContainer - taskGroup[0] completed it's tasks.
+2022-08-08 18:08:13.398 [job-0] INFO  StandAloneJobContainerCommunicator - Total 3 records, 14 bytes | Speed 1B/s, 0 records/s | Error 0 records, 0 bytes |  All Task WaitWriterTime 0.000s |  All Task WaitReaderTime 0.000s | Percentage 100.00%
+2022-08-08 18:08:13.399 [job-0] INFO  AbstractScheduler - Scheduler accomplished all tasks.
+2022-08-08 18:08:13.400 [job-0] INFO  JobContainer - DataX Writer.Job [hdfswriter] do post work.
+2022-08-08 18:08:13.401 [job-0] INFO  HdfsWriter$Job - start rename file [hdfs://hadoop03:8020//user/hive/warehouse/user_info__6a13fbbe_487f_4c9f_9243_fdb06295c9ac/user_info_data_4.txt__ca225bcb_69df_4820_8201_6af8a9fda36d] to file [hdfs://hadoop03:8020//user/hive/warehouse/user_info/user_info_data_4.txt__ca225bcb_69df_4820_8201_6af8a9fda36d].
+2022-08-08 18:08:13.429 [job-0] INFO  HdfsWriter$Job - finish rename file [hdfs://hadoop03:8020//user/hive/warehouse/user_info__6a13fbbe_487f_4c9f_9243_fdb06295c9ac/user_info_data_4.txt__ca225bcb_69df_4820_8201_6af8a9fda36d] to file [hdfs://hadoop03:8020//user/hive/warehouse/user_info/user_info_data_4.txt__ca225bcb_69df_4820_8201_6af8a9fda36d].
+2022-08-08 18:08:13.429 [job-0] INFO  HdfsWriter$Job - start delete tmp dir [hdfs://hadoop03:8020/user/hive/warehouse/user_info__6a13fbbe_487f_4c9f_9243_fdb06295c9ac] .
+2022-08-08 18:08:13.441 [job-0] INFO  HdfsWriter$Job - finish delete tmp dir [hdfs://hadoop03:8020/user/hive/warehouse/user_info__6a13fbbe_487f_4c9f_9243_fdb06295c9ac] .
+2022-08-08 18:08:13.442 [job-0] INFO  JobContainer - DataX Reader.Job [mysqlreader] do post work.
+2022-08-08 18:08:13.443 [job-0] INFO  JobContainer - DataX jobId [0] completed successfully.
+2022-08-08 18:08:13.445 [job-0] INFO  HookInvoker - No hook invoked, because base dir not exists or is a file: /home/zhouze/PycharmProjects/yili/yili-portal/app/datax/hook
+2022-08-08 18:08:13.552 [job-0] INFO  JobContainer - 
+	 [total cpu info] => 
+		averageCpu                     | maxDeltaCpu                    | minDeltaCpu                    
+		-1.00%                         | -1.00%                         | -1.00%
+                        
+
+	 [total gc info] => 
+		 NAME                 | totalGCCount       | maxDeltaGCCount    | minDeltaGCCount    | totalGCTime        | maxDeltaGCTime     | minDeltaGCTime     
+		 G1 Young Generation  | 2                  | 2                  | 2                  | 0.016s             | 0.016s             | 0.016s             
+		 G1 Old Generation    | 0                  | 0                  | 0                  | 0.000s             | 0.000s             | 0.000s             
+
+2022-08-08 18:08:13.553 [job-0] INFO  JobContainer - PerfTrace not enable!
+2022-08-08 18:08:13.554 [job-0] INFO  StandAloneJobContainerCommunicator - Total 3 records, 14 bytes | Speed 1B/s, 0 records/s | Error 0 records, 0 bytes |  All Task WaitWriterTime 0.000s |  All Task WaitReaderTime 0.000s | Percentage 100.00%
+2022-08-08 18:08:13.557 [job-0] INFO  JobContainer - 
+任务启动时刻                    : 2022-08-08 18:08:02
+任务结束时刻                    : 2022-08-08 18:08:13
+任务总计耗时                    :                 11s
+任务平均流量                    :                1B/s
+记录写入速度                    :              0rec/s
+读出记录总数                    :                   3
+读写失败总数                    :                   0
+

+ 165 - 0
app/datax/log/2022-08-08/mysql1659946080_json-16_08_00.769.log

@@ -0,0 +1,165 @@
+2022-08-08 16:08:00.926 [main] INFO  VMInfo - VMInfo# operatingSystem class => com.sun.management.internal.OperatingSystemImpl
+2022-08-08 16:08:00.931 [main] INFO  Engine - the machine info  => 
+
+	osInfo:	Ubuntu 11 11.0.16+8-post-Ubuntu-0ubuntu120.04
+	jvmInfo:	Linux amd64 5.13.0-52-generic
+	cpu num:	8
+
+	totalPhysicalMemory:	-0.00G
+	freePhysicalMemory:	-0.00G
+	maxFileDescriptorCount:	-1
+	currentOpenFileDescriptorCount:	-1
+
+	GC Names	[G1 Young Generation, G1 Old Generation]
+
+	MEMORY_NAME                    | allocation_size                | init_size                      
+	CodeHeap 'profiled nmethods'   | 117.21MB                       | 2.44MB                         
+	G1 Old Gen                     | 1,024.00MB                     | 970.00MB                       
+	G1 Survivor Space              | -0.00MB                        | 0.00MB                         
+	CodeHeap 'non-profiled nmethods' | 117.22MB                       | 2.44MB                         
+	Compressed Class Space         | 1,024.00MB                     | 0.00MB                         
+	Metaspace                      | -0.00MB                        | 0.00MB                         
+	G1 Eden Space                  | -0.00MB                        | 54.00MB                        
+	CodeHeap 'non-nmethods'        | 5.57MB                         | 2.44MB                         
+
+
+2022-08-08 16:08:00.940 [main] INFO  Engine - 
+{
+	"content":[
+		{
+			"reader":{
+				"name":"hdfsreader",
+				"parameter":{
+					"column":[
+						"*"
+					],
+					"defaultFS":"hdfs://hadoop03:8020/",
+					"encoding":"UTF-8",
+					"fieldDelimiter":"\t",
+					"fileType":"text",
+					"path":"/user/hive/warehouse/user_info/user_info_data.txt"
+				}
+			},
+			"writer":{
+				"name":"mysqlwriter",
+				"parameter":{
+					"column":[
+						"*"
+					],
+					"connection":[
+						{
+							"jdbcUrl":"jdbc:mysql://192.168.2.2:3306/gene",
+							"table":[
+								"user_info_transfer"
+							]
+						}
+					],
+					"password":"**********",
+					"session":[],
+					"username":"root",
+					"writeMode":"insert"
+				}
+			}
+		}
+	],
+	"setting":{
+		"speed":{
+			"channel":"2"
+		}
+	}
+}
+
+2022-08-08 16:08:00.949 [main] WARN  Engine - prioriy set to 0, because NumberFormatException, the value is: null
+2022-08-08 16:08:00.949 [main] INFO  PerfTrace - PerfTrace traceId=job_-1, isEnable=false, priority=0
+2022-08-08 16:08:00.950 [main] INFO  JobContainer - DataX jobContainer starts job.
+2022-08-08 16:08:00.951 [main] INFO  JobContainer - Set jobId = 0
+2022-08-08 16:08:00.962 [job-0] INFO  HdfsReader$Job - init() begin...
+2022-08-08 16:08:01.153 [job-0] INFO  HdfsReader$Job - hadoopConfig details:{"finalParameters":[]}
+2022-08-08 16:08:01.153 [job-0] INFO  HdfsReader$Job - init() ok and end...
+2022-08-08 16:08:01.334 [job-0] INFO  OriginalConfPretreatmentUtil - table:[user_info_transfer] all columns:[
+user_id,area_id,age,occupation
+].
+2022-08-08 16:08:01.334 [job-0] WARN  OriginalConfPretreatmentUtil - 您的配置文件中的列配置信息存在风险. 因为您配置的写入数据库表的列为*,当您的表字段个数、类型有变动时,可能影响任务正确性甚至会运行出错。请检查您的配置并作出修改.
+2022-08-08 16:08:01.335 [job-0] INFO  OriginalConfPretreatmentUtil - Write data [
+insert INTO %s (user_id,area_id,age,occupation) VALUES(?,?,?,?)
+], which jdbcUrl like:[jdbc:mysql://192.168.2.2:3306/gene?yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true]
+2022-08-08 16:08:01.335 [job-0] INFO  JobContainer - jobContainer starts to do prepare ...
+2022-08-08 16:08:01.335 [job-0] INFO  JobContainer - DataX Reader.Job [hdfsreader] do prepare work .
+2022-08-08 16:08:01.335 [job-0] INFO  HdfsReader$Job - prepare(), start to getAllFiles...
+2022-08-08 16:08:01.335 [job-0] INFO  HdfsReader$Job - get HDFS all files in path = [/user/hive/warehouse/user_info/user_info_data.txt]
+2022-08-08 16:08:01.840 [job-0] INFO  HdfsReader$Job - [hdfs://hadoop03:8020/user/hive/warehouse/user_info/user_info_data.txt]是[text]类型的文件, 将该文件加入source files列表
+2022-08-08 16:08:01.841 [job-0] INFO  HdfsReader$Job - 您即将读取的文件数为: [1], 列表为: [hdfs://hadoop03:8020/user/hive/warehouse/user_info/user_info_data.txt]
+2022-08-08 16:08:01.841 [job-0] INFO  JobContainer - DataX Writer.Job [mysqlwriter] do prepare work .
+2022-08-08 16:08:01.842 [job-0] INFO  JobContainer - jobContainer starts to do split ...
+2022-08-08 16:08:01.842 [job-0] INFO  JobContainer - Job set Channel-Number to 2 channels.
+2022-08-08 16:08:01.842 [job-0] INFO  HdfsReader$Job - split() begin...
+2022-08-08 16:08:01.842 [job-0] INFO  JobContainer - DataX Reader.Job [hdfsreader] splits to [1] tasks.
+2022-08-08 16:08:01.843 [job-0] INFO  JobContainer - DataX Writer.Job [mysqlwriter] splits to [1] tasks.
+2022-08-08 16:08:01.847 [job-0] INFO  JobContainer - jobContainer starts to do schedule ...
+2022-08-08 16:08:01.850 [job-0] INFO  JobContainer - Scheduler starts [1] taskGroups.
+2022-08-08 16:08:01.851 [job-0] INFO  JobContainer - Running by standalone Mode.
+2022-08-08 16:08:01.854 [taskGroup-0] INFO  TaskGroupContainer - taskGroupId=[0] start [1] channels for [1] tasks.
+2022-08-08 16:08:01.857 [taskGroup-0] INFO  Channel - Channel set byte_speed_limit to -1, No bps activated.
+2022-08-08 16:08:01.857 [taskGroup-0] INFO  Channel - Channel set record_speed_limit to -1, No tps activated.
+2022-08-08 16:08:01.864 [taskGroup-0] INFO  TaskGroupContainer - taskGroup[0] taskId[0] attemptCount[1] is started
+2022-08-08 16:08:01.879 [0-0-0-reader] INFO  HdfsReader$Job - hadoopConfig details:{"finalParameters":["mapreduce.job.end-notification.max.retry.interval","mapreduce.job.end-notification.max.attempts"]}
+2022-08-08 16:08:01.879 [0-0-0-reader] INFO  Reader$Task - read start
+2022-08-08 16:08:01.879 [0-0-0-reader] INFO  Reader$Task - reading file : [hdfs://hadoop03:8020/user/hive/warehouse/user_info/user_info_data.txt]
+2022-08-08 16:08:01.891 [0-0-0-reader] INFO  UnstructuredStorageReaderUtil - CsvReader使用默认值[{"captureRawRecord":true,"columnCount":0,"comment":"#","currentRecord":-1,"delimiter":"\t","escapeMode":1,"headerCount":0,"rawRecord":"","recordDelimiter":"\u0000","safetySwitch":false,"skipEmptyRecords":true,"textQualifier":"\"","trimWhitespace":true,"useComments":false,"useTextQualifier":true,"values":[]}],csvReaderConfig值为[null]
+2022-08-08 16:08:01.896 [0-0-0-reader] INFO  Reader$Task - end read source files...
+2022-08-08 16:08:01.905 [0-0-0-writer] WARN  CommonRdbmsWriter$Task - 回滚此次写入, 采用每次写入一行方式提交. 因为:#HY000
+2022-08-08 16:08:01.909 [0-0-0-writer] ERROR StdoutPluginCollector - 
+java.sql.SQLException: #HY000
+	at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:996) ~[mysql-connector-java-5.1.34.jar:5.1.34]
+	at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3887) ~[mysql-connector-java-5.1.34.jar:5.1.34]
+	at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3823) ~[mysql-connector-java-5.1.34.jar:5.1.34]
+	at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:2435) ~[mysql-connector-java-5.1.34.jar:5.1.34]
+	at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2582) ~[mysql-connector-java-5.1.34.jar:5.1.34]
+	at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2530) ~[mysql-connector-java-5.1.34.jar:5.1.34]
+	at com.mysql.jdbc.PreparedStatement.executeInternal(PreparedStatement.java:1907) ~[mysql-connector-java-5.1.34.jar:5.1.34]
+	at com.mysql.jdbc.PreparedStatement.execute(PreparedStatement.java:1199) ~[mysql-connector-java-5.1.34.jar:5.1.34]
+	at com.alibaba.datax.plugin.rdbms.writer.CommonRdbmsWriter$Task.doOneInsert(CommonRdbmsWriter.java:382) ~[plugin-rdbms-util-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.rdbms.writer.CommonRdbmsWriter$Task.doBatchInsert(CommonRdbmsWriter.java:362) ~[plugin-rdbms-util-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.rdbms.writer.CommonRdbmsWriter$Task.startWriteWithConnection(CommonRdbmsWriter.java:297) ~[plugin-rdbms-util-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.rdbms.writer.CommonRdbmsWriter$Task.startWrite(CommonRdbmsWriter.java:319) ~[plugin-rdbms-util-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.writer.mysqlwriter.MysqlWriter$Task.startWrite(MysqlWriter.java:78) ~[mysqlwriter-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.core.taskgroup.runner.WriterRunner.run(WriterRunner.java:56) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
+	at java.base/java.lang.Thread.run(Thread.java:829) ~[na:na]
+2022-08-08 16:08:01.910 [0-0-0-writer] ERROR StdoutPluginCollector - 脏数据: 
+{"exception":"#HY000","record":[{"byteSize":8,"index":0,"rawData":"xiaoming","type":"STRING"},{"byteSize":5,"index":1,"rawData":"10000","type":"STRING"},{"byteSize":2,"index":2,"rawData":"18","type":"STRING"},{"byteSize":7,"index":3,"rawData":"student","type":"STRING"}],"type":"writer"}
+2022-08-08 16:08:01.911 [0-0-0-writer] ERROR StdoutPluginCollector - 脏数据: 
+{"exception":"#HY000","record":[{"byteSize":7,"index":0,"rawData":"xiaobai","type":"STRING"},{"byteSize":5,"index":1,"rawData":"10000","type":"STRING"},{"byteSize":2,"index":2,"rawData":"25","type":"STRING"},{"byteSize":5,"index":3,"rawData":"coder","type":"STRING"}],"type":"writer"}
+2022-08-08 16:08:01.912 [0-0-0-writer] ERROR StdoutPluginCollector - 脏数据: 
+{"exception":"#HY000","record":[{"byteSize":8,"index":0,"rawData":"zhangsan","type":"STRING"},{"byteSize":5,"index":1,"rawData":"11000","type":"STRING"},{"byteSize":2,"index":2,"rawData":"28","type":"STRING"},{"byteSize":5,"index":3,"rawData":"coder","type":"STRING"}],"type":"writer"}
+2022-08-08 16:08:01.913 [0-0-0-writer] ERROR StdoutPluginCollector - 脏数据: 
+{"exception":"#HY000","record":[{"byteSize":4,"index":0,"rawData":"lisi","type":"STRING"},{"byteSize":5,"index":1,"rawData":"10000","type":"STRING"},{"byteSize":2,"index":2,"rawData":"30","type":"STRING"},{"byteSize":7,"index":3,"rawData":"teacher","type":"STRING"}],"type":"writer"}
+2022-08-08 16:08:01.964 [taskGroup-0] INFO  TaskGroupContainer - taskGroup[0] taskId[0] is successed, used[101]ms
+2022-08-08 16:08:01.965 [taskGroup-0] INFO  TaskGroupContainer - taskGroup[0] completed it's tasks.
+2022-08-08 16:08:11.861 [job-0] INFO  StandAloneJobContainerCommunicator - Total 4 records, 79 bytes | Speed 7B/s, 0 records/s | Error 4 records, 79 bytes |  All Task WaitWriterTime 0.000s |  All Task WaitReaderTime 0.013s | Percentage 100.00%
+2022-08-08 16:08:11.861 [job-0] INFO  AbstractScheduler - Scheduler accomplished all tasks.
+2022-08-08 16:08:11.862 [job-0] INFO  JobContainer - DataX Writer.Job [mysqlwriter] do post work.
+2022-08-08 16:08:11.862 [job-0] INFO  JobContainer - DataX Reader.Job [hdfsreader] do post work.
+2022-08-08 16:08:11.862 [job-0] INFO  JobContainer - DataX jobId [0] completed successfully.
+2022-08-08 16:08:11.863 [job-0] INFO  HookInvoker - No hook invoked, because base dir not exists or is a file: /home/zhouze/PycharmProjects/yili/yili-portal/app/datax/hook
+2022-08-08 16:08:11.865 [job-0] INFO  JobContainer - 
+	 [total cpu info] => 
+		averageCpu                     | maxDeltaCpu                    | minDeltaCpu                    
+		-1.00%                         | -1.00%                         | -1.00%
+                        
+
+	 [total gc info] => 
+		 NAME                 | totalGCCount       | maxDeltaGCCount    | minDeltaGCCount    | totalGCTime        | maxDeltaGCTime     | minDeltaGCTime     
+		 G1 Young Generation  | 2                  | 2                  | 2                  | 0.013s             | 0.013s             | 0.013s             
+		 G1 Old Generation    | 0                  | 0                  | 0                  | 0.000s             | 0.000s             | 0.000s             
+
+2022-08-08 16:08:11.865 [job-0] INFO  JobContainer - PerfTrace not enable!
+2022-08-08 16:08:11.865 [job-0] INFO  StandAloneJobContainerCommunicator - Total 4 records, 79 bytes | Speed 7B/s, 0 records/s | Error 4 records, 79 bytes |  All Task WaitWriterTime 0.000s |  All Task WaitReaderTime 0.013s | Percentage 100.00%
+2022-08-08 16:08:11.866 [job-0] INFO  JobContainer - 
+任务启动时刻                    : 2022-08-08 16:08:00
+任务结束时刻                    : 2022-08-08 16:08:11
+任务总计耗时                    :                 10s
+任务平均流量                    :                7B/s
+记录写入速度                    :              0rec/s
+读出记录总数                    :                   4
+读写失败总数                    :                   4
+

+ 139 - 0
app/datax/log/2022-08-08/mysql1659946358_json-16_12_38.517.log

@@ -0,0 +1,139 @@
+2022-08-08 16:12:38.678 [main] INFO  VMInfo - VMInfo# operatingSystem class => com.sun.management.internal.OperatingSystemImpl
+2022-08-08 16:12:38.684 [main] INFO  Engine - the machine info  => 
+
+	osInfo:	Ubuntu 11 11.0.16+8-post-Ubuntu-0ubuntu120.04
+	jvmInfo:	Linux amd64 5.13.0-52-generic
+	cpu num:	8
+
+	totalPhysicalMemory:	-0.00G
+	freePhysicalMemory:	-0.00G
+	maxFileDescriptorCount:	-1
+	currentOpenFileDescriptorCount:	-1
+
+	GC Names	[G1 Young Generation, G1 Old Generation]
+
+	MEMORY_NAME                    | allocation_size                | init_size                      
+	CodeHeap 'profiled nmethods'   | 117.21MB                       | 2.44MB                         
+	G1 Old Gen                     | 1,024.00MB                     | 970.00MB                       
+	G1 Survivor Space              | -0.00MB                        | 0.00MB                         
+	CodeHeap 'non-profiled nmethods' | 117.22MB                       | 2.44MB                         
+	Compressed Class Space         | 1,024.00MB                     | 0.00MB                         
+	Metaspace                      | -0.00MB                        | 0.00MB                         
+	G1 Eden Space                  | -0.00MB                        | 54.00MB                        
+	CodeHeap 'non-nmethods'        | 5.57MB                         | 2.44MB                         
+
+
+2022-08-08 16:12:38.694 [main] INFO  Engine - 
+{
+	"content":[
+		{
+			"reader":{
+				"name":"hdfsreader",
+				"parameter":{
+					"column":[
+						"*"
+					],
+					"defaultFS":"hdfs://hadoop03:8020/",
+					"encoding":"UTF-8",
+					"fieldDelimiter":"\t",
+					"fileType":"text",
+					"path":"/user/hive/warehouse/user_info/user_info_data.txt"
+				}
+			},
+			"writer":{
+				"name":"mysqlwriter",
+				"parameter":{
+					"column":[
+						"*"
+					],
+					"connection":[
+						{
+							"jdbcUrl":"jdbc:mysql://192.168.2.2:3306/gene",
+							"table":[
+								"user_info_transfer"
+							]
+						}
+					],
+					"password":"**********",
+					"session":[],
+					"username":"root",
+					"writeMode":"insert"
+				}
+			}
+		}
+	],
+	"setting":{
+		"speed":{
+			"channel":"2"
+		}
+	}
+}
+
+2022-08-08 16:12:38.703 [main] WARN  Engine - prioriy set to 0, because NumberFormatException, the value is: null
+2022-08-08 16:12:38.704 [main] INFO  PerfTrace - PerfTrace traceId=job_-1, isEnable=false, priority=0
+2022-08-08 16:12:38.704 [main] INFO  JobContainer - DataX jobContainer starts job.
+2022-08-08 16:12:38.705 [main] INFO  JobContainer - Set jobId = 0
+2022-08-08 16:12:38.717 [job-0] INFO  HdfsReader$Job - init() begin...
+2022-08-08 16:12:38.896 [job-0] INFO  HdfsReader$Job - hadoopConfig details:{"finalParameters":[]}
+2022-08-08 16:12:38.896 [job-0] INFO  HdfsReader$Job - init() ok and end...
+2022-08-08 16:12:39.071 [job-0] INFO  OriginalConfPretreatmentUtil - table:[user_info_transfer] all columns:[
+user_id,area_id,age,occupation
+].
+2022-08-08 16:12:39.071 [job-0] WARN  OriginalConfPretreatmentUtil - 您的配置文件中的列配置信息存在风险. 因为您配置的写入数据库表的列为*,当您的表字段个数、类型有变动时,可能影响任务正确性甚至会运行出错。请检查您的配置并作出修改.
+2022-08-08 16:12:39.072 [job-0] INFO  OriginalConfPretreatmentUtil - Write data [
+insert INTO %s (user_id,area_id,age,occupation) VALUES(?,?,?,?)
+], which jdbcUrl like:[jdbc:mysql://192.168.2.2:3306/gene?yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true]
+2022-08-08 16:12:39.072 [job-0] INFO  JobContainer - jobContainer starts to do prepare ...
+2022-08-08 16:12:39.072 [job-0] INFO  JobContainer - DataX Reader.Job [hdfsreader] do prepare work .
+2022-08-08 16:12:39.072 [job-0] INFO  HdfsReader$Job - prepare(), start to getAllFiles...
+2022-08-08 16:12:39.072 [job-0] INFO  HdfsReader$Job - get HDFS all files in path = [/user/hive/warehouse/user_info/user_info_data.txt]
+2022-08-08 16:12:39.525 [job-0] INFO  HdfsReader$Job - [hdfs://hadoop03:8020/user/hive/warehouse/user_info/user_info_data.txt]是[text]类型的文件, 将该文件加入source files列表
+2022-08-08 16:12:39.526 [job-0] INFO  HdfsReader$Job - 您即将读取的文件数为: [1], 列表为: [hdfs://hadoop03:8020/user/hive/warehouse/user_info/user_info_data.txt]
+2022-08-08 16:12:39.526 [job-0] INFO  JobContainer - DataX Writer.Job [mysqlwriter] do prepare work .
+2022-08-08 16:12:39.527 [job-0] INFO  JobContainer - jobContainer starts to do split ...
+2022-08-08 16:12:39.527 [job-0] INFO  JobContainer - Job set Channel-Number to 2 channels.
+2022-08-08 16:12:39.527 [job-0] INFO  HdfsReader$Job - split() begin...
+2022-08-08 16:12:39.527 [job-0] INFO  JobContainer - DataX Reader.Job [hdfsreader] splits to [1] tasks.
+2022-08-08 16:12:39.528 [job-0] INFO  JobContainer - DataX Writer.Job [mysqlwriter] splits to [1] tasks.
+2022-08-08 16:12:39.533 [job-0] INFO  JobContainer - jobContainer starts to do schedule ...
+2022-08-08 16:12:39.535 [job-0] INFO  JobContainer - Scheduler starts [1] taskGroups.
+2022-08-08 16:12:39.536 [job-0] INFO  JobContainer - Running by standalone Mode.
+2022-08-08 16:12:39.540 [taskGroup-0] INFO  TaskGroupContainer - taskGroupId=[0] start [1] channels for [1] tasks.
+2022-08-08 16:12:39.544 [taskGroup-0] INFO  Channel - Channel set byte_speed_limit to -1, No bps activated.
+2022-08-08 16:12:39.544 [taskGroup-0] INFO  Channel - Channel set record_speed_limit to -1, No tps activated.
+2022-08-08 16:12:39.550 [taskGroup-0] INFO  TaskGroupContainer - taskGroup[0] taskId[0] attemptCount[1] is started
+2022-08-08 16:12:39.566 [0-0-0-reader] INFO  HdfsReader$Job - hadoopConfig details:{"finalParameters":["mapreduce.job.end-notification.max.retry.interval","mapreduce.job.end-notification.max.attempts"]}
+2022-08-08 16:12:39.567 [0-0-0-reader] INFO  Reader$Task - read start
+2022-08-08 16:12:39.567 [0-0-0-reader] INFO  Reader$Task - reading file : [hdfs://hadoop03:8020/user/hive/warehouse/user_info/user_info_data.txt]
+2022-08-08 16:12:39.578 [0-0-0-reader] INFO  UnstructuredStorageReaderUtil - CsvReader使用默认值[{"captureRawRecord":true,"columnCount":0,"comment":"#","currentRecord":-1,"delimiter":"\t","escapeMode":1,"headerCount":0,"rawRecord":"","recordDelimiter":"\u0000","safetySwitch":false,"skipEmptyRecords":true,"textQualifier":"\"","trimWhitespace":true,"useComments":false,"useTextQualifier":true,"values":[]}],csvReaderConfig值为[null]
+2022-08-08 16:12:39.585 [0-0-0-reader] INFO  Reader$Task - end read source files...
+2022-08-08 16:12:39.951 [taskGroup-0] INFO  TaskGroupContainer - taskGroup[0] taskId[0] is successed, used[402]ms
+2022-08-08 16:12:39.952 [taskGroup-0] INFO  TaskGroupContainer - taskGroup[0] completed it's tasks.
+2022-08-08 16:12:49.549 [job-0] INFO  StandAloneJobContainerCommunicator - Total 4 records, 79 bytes | Speed 7B/s, 0 records/s | Error 0 records, 0 bytes |  All Task WaitWriterTime 0.000s |  All Task WaitReaderTime 0.010s | Percentage 100.00%
+2022-08-08 16:12:49.549 [job-0] INFO  AbstractScheduler - Scheduler accomplished all tasks.
+2022-08-08 16:12:49.550 [job-0] INFO  JobContainer - DataX Writer.Job [mysqlwriter] do post work.
+2022-08-08 16:12:49.550 [job-0] INFO  JobContainer - DataX Reader.Job [hdfsreader] do post work.
+2022-08-08 16:12:49.550 [job-0] INFO  JobContainer - DataX jobId [0] completed successfully.
+2022-08-08 16:12:49.551 [job-0] INFO  HookInvoker - No hook invoked, because base dir not exists or is a file: /home/zhouze/PycharmProjects/yili/yili-portal/app/datax/hook
+2022-08-08 16:12:49.551 [job-0] INFO  JobContainer - 
+	 [total cpu info] => 
+		averageCpu                     | maxDeltaCpu                    | minDeltaCpu                    
+		-1.00%                         | -1.00%                         | -1.00%
+                        
+
+	 [total gc info] => 
+		 NAME                 | totalGCCount       | maxDeltaGCCount    | minDeltaGCCount    | totalGCTime        | maxDeltaGCTime     | minDeltaGCTime     
+		 G1 Young Generation  | 2                  | 2                  | 2                  | 0.011s             | 0.011s             | 0.011s             
+		 G1 Old Generation    | 0                  | 0                  | 0                  | 0.000s             | 0.000s             | 0.000s             
+
+2022-08-08 16:12:49.552 [job-0] INFO  JobContainer - PerfTrace not enable!
+2022-08-08 16:12:49.552 [job-0] INFO  StandAloneJobContainerCommunicator - Total 4 records, 79 bytes | Speed 7B/s, 0 records/s | Error 0 records, 0 bytes |  All Task WaitWriterTime 0.000s |  All Task WaitReaderTime 0.010s | Percentage 100.00%
+2022-08-08 16:12:49.552 [job-0] INFO  JobContainer - 
+任务启动时刻                    : 2022-08-08 16:12:38
+任务结束时刻                    : 2022-08-08 16:12:49
+任务总计耗时                    :                 10s
+任务平均流量                    :                7B/s
+记录写入速度                    :              0rec/s
+读出记录总数                    :                   4
+读写失败总数                    :                   0
+

+ 97 - 0
app/datax/log/2022-08-08/mysql1659947482_json-16_31_23.163.log

@@ -0,0 +1,97 @@
+2022-08-08 16:31:23.381 [main] INFO  VMInfo - VMInfo# operatingSystem class => com.sun.management.internal.OperatingSystemImpl
+2022-08-08 16:31:23.387 [main] INFO  Engine - the machine info  => 
+
+	osInfo:	Ubuntu 11 11.0.16+8-post-Ubuntu-0ubuntu120.04
+	jvmInfo:	Linux amd64 5.13.0-52-generic
+	cpu num:	8
+
+	totalPhysicalMemory:	-0.00G
+	freePhysicalMemory:	-0.00G
+	maxFileDescriptorCount:	-1
+	currentOpenFileDescriptorCount:	-1
+
+	GC Names	[G1 Young Generation, G1 Old Generation]
+
+	MEMORY_NAME                    | allocation_size                | init_size                      
+	CodeHeap 'profiled nmethods'   | 117.21MB                       | 2.44MB                         
+	G1 Old Gen                     | 1,024.00MB                     | 970.00MB                       
+	G1 Survivor Space              | -0.00MB                        | 0.00MB                         
+	CodeHeap 'non-profiled nmethods' | 117.22MB                       | 2.44MB                         
+	Compressed Class Space         | 1,024.00MB                     | 0.00MB                         
+	Metaspace                      | -0.00MB                        | 0.00MB                         
+	G1 Eden Space                  | -0.00MB                        | 54.00MB                        
+	CodeHeap 'non-nmethods'        | 5.57MB                         | 2.44MB                         
+
+
+2022-08-08 16:31:23.398 [main] INFO  Engine - 
+{
+	"content":[
+		{
+			"reader":{
+				"name":"hdfsreader",
+				"parameter":{
+					"column":[
+						"*"
+					],
+					"defaultFS":"hdfs://hadoop03:8020/",
+					"encoding":"UTF-8",
+					"fieldDelimiter":"\t",
+					"fileType":"text",
+					"path":"/user/hive/warehouse/user_info/user_info_data.txt"
+				}
+			},
+			"writer":{
+				"name":"hdfswriter",
+				"parameter":{
+					"column":"${columns}",
+					"compress":"",
+					"defaultFS":"${WriterdefaultFS}",
+					"fieldDelimiter":"${WriterfieldDelimiter}",
+					"fileName":"${WriterfileName}",
+					"fileType":"${WriterfileType}",
+					"path":"${Writerpath}",
+					"writeMode":"${WriteMode}"
+				}
+			}
+		}
+	],
+	"setting":{
+		"speed":{
+			"channel":"2"
+		}
+	}
+}
+
+2022-08-08 16:31:23.408 [main] WARN  Engine - prioriy set to 0, because NumberFormatException, the value is: null
+2022-08-08 16:31:23.409 [main] INFO  PerfTrace - PerfTrace traceId=job_-1, isEnable=false, priority=0
+2022-08-08 16:31:23.409 [main] INFO  JobContainer - DataX jobContainer starts job.
+2022-08-08 16:31:23.410 [main] INFO  JobContainer - Set jobId = 0
+2022-08-08 16:31:23.423 [job-0] INFO  HdfsReader$Job - init() begin...
+2022-08-08 16:31:23.637 [job-0] INFO  HdfsReader$Job - hadoopConfig details:{"finalParameters":[]}
+2022-08-08 16:31:23.637 [job-0] INFO  HdfsReader$Job - init() ok and end...
+2022-08-08 16:31:23.647 [job-0] ERROR JobContainer - Exception when job run
+com.alibaba.datax.common.exception.DataXException: Code:[HdfsWriter-02], Description:[您填写的参数值不合法.]. - HdfsWriter插件目前只支持ORC和TEXT两种格式的文件,请将filetype选项的值配置为ORC或者TEXT
+	at com.alibaba.datax.common.exception.DataXException.asDataXException(DataXException.java:26) ~[datax-common-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.validateParameter(HdfsWriter.java:56) ~[hdfswriter-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.init(HdfsWriter.java:42) ~[hdfswriter-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.core.job.JobContainer.initJobWriter(JobContainer.java:704) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.core.job.JobContainer.init(JobContainer.java:304) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:113) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.core.Engine.start(Engine.java:92) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.core.Engine.entry(Engine.java:171) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
+	at com.alibaba.datax.core.Engine.main(Engine.java:204) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
+2022-08-08 16:31:23.650 [job-0] INFO  StandAloneJobContainerCommunicator - Total 0 records, 0 bytes | Speed 0B/s, 0 records/s | Error 0 records, 0 bytes |  All Task WaitWriterTime 0.000s |  All Task WaitReaderTime 0.000s | Percentage 0.00%
+2022-08-08 16:31:23.651 [job-0] ERROR Engine - 
+
+经DataX智能分析,该任务最可能的错误原因是:
+com.alibaba.datax.common.exception.DataXException: Code:[HdfsWriter-02], Description:[您填写的参数值不合法.]. - HdfsWriter插件目前只支持ORC和TEXT两种格式的文件,请将filetype选项的值配置为ORC或者TEXT
+	at com.alibaba.datax.common.exception.DataXException.asDataXException(DataXException.java:26)
+	at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.validateParameter(HdfsWriter.java:56)
+	at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.init(HdfsWriter.java:42)
+	at com.alibaba.datax.core.job.JobContainer.initJobWriter(JobContainer.java:704)
+	at com.alibaba.datax.core.job.JobContainer.init(JobContainer.java:304)
+	at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:113)
+	at com.alibaba.datax.core.Engine.start(Engine.java:92)
+	at com.alibaba.datax.core.Engine.entry(Engine.java:171)
+	at com.alibaba.datax.core.Engine.main(Engine.java:204)
+

+ 139 - 0
app/datax/log/2022-08-08/mysql1659947523_json-16_32_03.353.log

@@ -0,0 +1,139 @@
+2022-08-08 16:32:03.555 [main] INFO  VMInfo - VMInfo# operatingSystem class => com.sun.management.internal.OperatingSystemImpl
+2022-08-08 16:32:03.561 [main] INFO  Engine - the machine info  => 
+
+	osInfo:	Ubuntu 11 11.0.16+8-post-Ubuntu-0ubuntu120.04
+	jvmInfo:	Linux amd64 5.13.0-52-generic
+	cpu num:	8
+
+	totalPhysicalMemory:	-0.00G
+	freePhysicalMemory:	-0.00G
+	maxFileDescriptorCount:	-1
+	currentOpenFileDescriptorCount:	-1
+
+	GC Names	[G1 Young Generation, G1 Old Generation]
+
+	MEMORY_NAME                    | allocation_size                | init_size                      
+	CodeHeap 'profiled nmethods'   | 117.21MB                       | 2.44MB                         
+	G1 Old Gen                     | 1,024.00MB                     | 970.00MB                       
+	G1 Survivor Space              | -0.00MB                        | 0.00MB                         
+	CodeHeap 'non-profiled nmethods' | 117.22MB                       | 2.44MB                         
+	Compressed Class Space         | 1,024.00MB                     | 0.00MB                         
+	Metaspace                      | -0.00MB                        | 0.00MB                         
+	G1 Eden Space                  | -0.00MB                        | 54.00MB                        
+	CodeHeap 'non-nmethods'        | 5.57MB                         | 2.44MB                         
+
+
+2022-08-08 16:32:03.573 [main] INFO  Engine - 
+{
+	"content":[
+		{
+			"reader":{
+				"name":"hdfsreader",
+				"parameter":{
+					"column":[
+						"*"
+					],
+					"defaultFS":"hdfs://hadoop03:8020/",
+					"encoding":"UTF-8",
+					"fieldDelimiter":"\t",
+					"fileType":"text",
+					"path":"/user/hive/warehouse/user_info/user_info_data.txt"
+				}
+			},
+			"writer":{
+				"name":"mysqlwriter",
+				"parameter":{
+					"column":[
+						"*"
+					],
+					"connection":[
+						{
+							"jdbcUrl":"jdbc:mysql://192.168.2.2:3306/gene",
+							"table":[
+								"user_info_transfer"
+							]
+						}
+					],
+					"password":"**********",
+					"session":[],
+					"username":"root",
+					"writeMode":"insert"
+				}
+			}
+		}
+	],
+	"setting":{
+		"speed":{
+			"channel":"2"
+		}
+	}
+}
+
+2022-08-08 16:32:03.583 [main] WARN  Engine - prioriy set to 0, because NumberFormatException, the value is: null
+2022-08-08 16:32:03.584 [main] INFO  PerfTrace - PerfTrace traceId=job_-1, isEnable=false, priority=0
+2022-08-08 16:32:03.584 [main] INFO  JobContainer - DataX jobContainer starts job.
+2022-08-08 16:32:03.585 [main] INFO  JobContainer - Set jobId = 0
+2022-08-08 16:32:03.599 [job-0] INFO  HdfsReader$Job - init() begin...
+2022-08-08 16:32:03.807 [job-0] INFO  HdfsReader$Job - hadoopConfig details:{"finalParameters":[]}
+2022-08-08 16:32:03.807 [job-0] INFO  HdfsReader$Job - init() ok and end...
+2022-08-08 16:32:04.024 [job-0] INFO  OriginalConfPretreatmentUtil - table:[user_info_transfer] all columns:[
+user_id,area_id,age,occupation
+].
+2022-08-08 16:32:04.024 [job-0] WARN  OriginalConfPretreatmentUtil - 您的配置文件中的列配置信息存在风险. 因为您配置的写入数据库表的列为*,当您的表字段个数、类型有变动时,可能影响任务正确性甚至会运行出错。请检查您的配置并作出修改.
+2022-08-08 16:32:04.025 [job-0] INFO  OriginalConfPretreatmentUtil - Write data [
+insert INTO %s (user_id,area_id,age,occupation) VALUES(?,?,?,?)
+], which jdbcUrl like:[jdbc:mysql://192.168.2.2:3306/gene?yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true]
+2022-08-08 16:32:04.026 [job-0] INFO  JobContainer - jobContainer starts to do prepare ...
+2022-08-08 16:32:04.026 [job-0] INFO  JobContainer - DataX Reader.Job [hdfsreader] do prepare work .
+2022-08-08 16:32:04.026 [job-0] INFO  HdfsReader$Job - prepare(), start to getAllFiles...
+2022-08-08 16:32:04.026 [job-0] INFO  HdfsReader$Job - get HDFS all files in path = [/user/hive/warehouse/user_info/user_info_data.txt]
+2022-08-08 16:32:04.557 [job-0] INFO  HdfsReader$Job - [hdfs://hadoop03:8020/user/hive/warehouse/user_info/user_info_data.txt]是[text]类型的文件, 将该文件加入source files列表
+2022-08-08 16:32:04.558 [job-0] INFO  HdfsReader$Job - 您即将读取的文件数为: [1], 列表为: [hdfs://hadoop03:8020/user/hive/warehouse/user_info/user_info_data.txt]
+2022-08-08 16:32:04.558 [job-0] INFO  JobContainer - DataX Writer.Job [mysqlwriter] do prepare work .
+2022-08-08 16:32:04.558 [job-0] INFO  JobContainer - jobContainer starts to do split ...
+2022-08-08 16:32:04.558 [job-0] INFO  JobContainer - Job set Channel-Number to 2 channels.
+2022-08-08 16:32:04.559 [job-0] INFO  HdfsReader$Job - split() begin...
+2022-08-08 16:32:04.559 [job-0] INFO  JobContainer - DataX Reader.Job [hdfsreader] splits to [1] tasks.
+2022-08-08 16:32:04.559 [job-0] INFO  JobContainer - DataX Writer.Job [mysqlwriter] splits to [1] tasks.
+2022-08-08 16:32:04.565 [job-0] INFO  JobContainer - jobContainer starts to do schedule ...
+2022-08-08 16:32:04.568 [job-0] INFO  JobContainer - Scheduler starts [1] taskGroups.
+2022-08-08 16:32:04.569 [job-0] INFO  JobContainer - Running by standalone Mode.
+2022-08-08 16:32:04.573 [taskGroup-0] INFO  TaskGroupContainer - taskGroupId=[0] start [1] channels for [1] tasks.
+2022-08-08 16:32:04.577 [taskGroup-0] INFO  Channel - Channel set byte_speed_limit to -1, No bps activated.
+2022-08-08 16:32:04.577 [taskGroup-0] INFO  Channel - Channel set record_speed_limit to -1, No tps activated.
+2022-08-08 16:32:04.584 [taskGroup-0] INFO  TaskGroupContainer - taskGroup[0] taskId[0] attemptCount[1] is started
+2022-08-08 16:32:04.601 [0-0-0-reader] INFO  HdfsReader$Job - hadoopConfig details:{"finalParameters":["mapreduce.job.end-notification.max.retry.interval","mapreduce.job.end-notification.max.attempts"]}
+2022-08-08 16:32:04.602 [0-0-0-reader] INFO  Reader$Task - read start
+2022-08-08 16:32:04.603 [0-0-0-reader] INFO  Reader$Task - reading file : [hdfs://hadoop03:8020/user/hive/warehouse/user_info/user_info_data.txt]
+2022-08-08 16:32:04.613 [0-0-0-reader] INFO  UnstructuredStorageReaderUtil - CsvReader使用默认值[{"captureRawRecord":true,"columnCount":0,"comment":"#","currentRecord":-1,"delimiter":"\t","escapeMode":1,"headerCount":0,"rawRecord":"","recordDelimiter":"\u0000","safetySwitch":false,"skipEmptyRecords":true,"textQualifier":"\"","trimWhitespace":true,"useComments":false,"useTextQualifier":true,"values":[]}],csvReaderConfig值为[null]
+2022-08-08 16:32:04.621 [0-0-0-reader] INFO  Reader$Task - end read source files...
+2022-08-08 16:32:04.985 [taskGroup-0] INFO  TaskGroupContainer - taskGroup[0] taskId[0] is successed, used[402]ms
+2022-08-08 16:32:04.985 [taskGroup-0] INFO  TaskGroupContainer - taskGroup[0] completed it's tasks.
+2022-08-08 16:32:14.582 [job-0] INFO  StandAloneJobContainerCommunicator - Total 4 records, 79 bytes | Speed 7B/s, 0 records/s | Error 0 records, 0 bytes |  All Task WaitWriterTime 0.000s |  All Task WaitReaderTime 0.018s | Percentage 100.00%
+2022-08-08 16:32:14.582 [job-0] INFO  AbstractScheduler - Scheduler accomplished all tasks.
+2022-08-08 16:32:14.583 [job-0] INFO  JobContainer - DataX Writer.Job [mysqlwriter] do post work.
+2022-08-08 16:32:14.584 [job-0] INFO  JobContainer - DataX Reader.Job [hdfsreader] do post work.
+2022-08-08 16:32:14.585 [job-0] INFO  JobContainer - DataX jobId [0] completed successfully.
+2022-08-08 16:32:14.587 [job-0] INFO  HookInvoker - No hook invoked, because base dir not exists or is a file: /home/zhouze/PycharmProjects/yili/yili-portal/app/datax/hook
+2022-08-08 16:32:14.591 [job-0] INFO  JobContainer - 
+	 [total cpu info] => 
+		averageCpu                     | maxDeltaCpu                    | minDeltaCpu                    
+		-1.00%                         | -1.00%                         | -1.00%
+                        
+
+	 [total gc info] => 
+		 NAME                 | totalGCCount       | maxDeltaGCCount    | minDeltaGCCount    | totalGCTime        | maxDeltaGCTime     | minDeltaGCTime     
+		 G1 Young Generation  | 2                  | 2                  | 2                  | 0.026s             | 0.026s             | 0.026s             
+		 G1 Old Generation    | 0                  | 0                  | 0                  | 0.000s             | 0.000s             | 0.000s             
+
+2022-08-08 16:32:14.591 [job-0] INFO  JobContainer - PerfTrace not enable!
+2022-08-08 16:32:14.593 [job-0] INFO  StandAloneJobContainerCommunicator - Total 4 records, 79 bytes | Speed 7B/s, 0 records/s | Error 0 records, 0 bytes |  All Task WaitWriterTime 0.000s |  All Task WaitReaderTime 0.018s | Percentage 100.00%
+2022-08-08 16:32:14.594 [job-0] INFO  JobContainer - 
+任务启动时刻                    : 2022-08-08 16:32:03
+任务结束时刻                    : 2022-08-08 16:32:14
+任务总计耗时                    :                 11s
+任务平均流量                    :                7B/s
+记录写入速度                    :              0rec/s
+读出记录总数                    :                   4
+读写失败总数                    :                   0
+

+ 0 - 0
app/datax/log_perf/2022-08-08/2hive1659925783_json-10_29_44.075.log


+ 0 - 0
app/datax/log_perf/2022-08-08/2hive1659949960_json-17_12_40.466.log


+ 0 - 0
app/datax/log_perf/2022-08-08/2hive1659950348_json-17_19_09.002.log


+ 0 - 0
app/datax/log_perf/2022-08-08/2hive1659953224_json-18_07_04.969.log


+ 0 - 0
app/datax/log_perf/2022-08-08/2hive1659953258_json-18_07_38.612.log


+ 0 - 0
app/datax/log_perf/2022-08-08/2hive1659953282_json-18_08_02.318.log


+ 0 - 0
app/datax/log_perf/2022-08-08/mysql1659946080_json-16_08_00.769.log


+ 0 - 0
app/datax/log_perf/2022-08-08/mysql1659946358_json-16_12_38.517.log


+ 0 - 0
app/datax/log_perf/2022-08-08/mysql1659947482_json-16_31_23.163.log


+ 0 - 0
app/datax/log_perf/2022-08-08/mysql1659947523_json-16_32_03.353.log


+ 118 - 0
app/run.py

@@ -252,3 +252,121 @@ def datax_hive2hive():  # put application's code here
         return "successful executed!"
 
 
+"""
+POST body
+{
+    "ReaderdefaultFS":"hdfs://hadoop03:8020/",
+    "ReaderfieldDelimiter":"\\t",
+    "ReaderfileType":"text",
+    "ReaderPath":"/user/hive/warehouse/user_info/user_info_data.txt",
+    "jdbcWriter":"jdbc:mysql://192.168.2.2:3306/gene",
+    "table":"user_info_transfer",
+    "passwordWriter":"iict123456",
+    "usernameWriter":"root",
+    "channel":"2"
+}
+"""
+
+
+@app.route('/datax/hive2mysql', methods=['POST'])
+def datax_hive2mysql():  # put application's code here
+    set_config = request.get_data()
+    if set_config is None or set_config == "":
+        abort(404)
+    set_config=str(set_config,"utf8").replace('\'','\"').replace('\n','').replace(' ','')
+    set_config = json.loads(set_config)
+    ReaderdefaultFS=set_config.get('ReaderdefaultFS')
+    ReaderfieldDelimiter=set_config.get("ReaderfieldDelimiter")
+    ReaderfileType=set_config.get("ReaderfileType")
+    ReaderPath=set_config.get("ReaderPath")
+    jdbcWriter=set_config.get("jdbcWriter")
+    table=set_config.get("table")
+    passwordWriter=set_config.get("passwordWriter")
+    usernameWriter=set_config.get("usernameWriter")
+    channel = set_config.get("channel")
+    with open("./app/datax/job/hive2mysql.json.template") as f:
+        json_str = f.read()
+        json_str = json_str.replace('${ReaderdefaultFS}', ReaderdefaultFS)
+        json_str = json_str.replace('${ReaderfieldDelimiter}', ReaderfieldDelimiter)
+        json_str = json_str.replace('${ReaderfileType}', ReaderfileType)
+        json_str = json_str.replace('${ReaderPath}', ReaderPath)
+        json_str = json_str.replace('${jdbcWriter}', jdbcWriter)
+        json_str = json_str.replace('${table}', table)
+        json_str = json_str.replace('${passwordWriter}', passwordWriter)
+        json_str = json_str.replace('${usernameWriter}', usernameWriter)
+        json_str = json_str.replace('${channel}', channel)
+        json_obj = json.loads(json_str)
+        filename = 'hive2mysql' + str(int(time.time())) + '.json'
+        with open(filename, 'w') as f:
+            json.dump(json_obj, f)
+        cmd = 'python ./app/datax/bin/datax.py ./' + filename
+        print(cmd)
+        import os
+        os.system(cmd)
+        return "successful executed!"
+
+
+"""
+POST body
+  {
+    "jdbcReader":"jdbc:mysql://192.168.2.2:3306/gene",
+    "querySql":"select%2*%2from%2gene.test",
+    "passwordReader":"iict12345",
+    "usernameReader":"root",
+    "columns":[{"name":"user_id","type":"string"},{"name":"area_id","type":"string"},{"name":"age","type":"int"},{"name":"occupation","type":"string"}],
+    "WriterdefaultFS": "hdfs://hadoop03:8020/",
+    "WriterfieldDelimiter": "\\t",
+    "WriterfileName": "user_info_data_4.txt",
+    "WriterfileType": "text",
+    "Writerpath": "/user/hive/warehouse/user_info/",
+    "WriteMode":"append",
+    "channel":"2"
+  }
+  
+"""
+
+
+
+@app.route('/datax/mysql2hive', methods=['POST'])
+def datax_mysql2hive():  # put application's code here
+    set_config = request.get_data()
+    if set_config is None or set_config == "":
+        abort(404)
+    set_config=str(set_config,"utf8").replace('\'','\"').replace('\n','').replace(' ','')
+    set_config = json.loads(set_config)
+    jdbcReader=set_config.get('jdbcReader')
+    querySql=set_config.get("querySql")
+    querySql = querySql.replace("%2"," ")
+    passwordReader=set_config.get("passwordReader")
+    usernameReader=set_config.get("usernameReader")
+    columns = set_config.get("columns")
+    WriterdefaultFS=set_config.get("WriterdefaultFS")
+    WriterfieldDelimiter=set_config.get("WriterfieldDelimiter")
+    WriterfileName=set_config.get("WriterfileName")
+    WriterfileType=set_config.get("WriterfileType")
+    Writerpath = set_config.get("Writerpath")
+    WriteMode = set_config.get("WriteMode")
+    channel = set_config.get("channel")
+    with open("./app/datax/job/mysql2hive.json.template") as f:
+        json_str = f.read()
+        json_str = json_str.replace('${jdbcReader}', jdbcReader)
+        json_str = json_str.replace('${querySql}', querySql)
+        json_str = json_str.replace('${passwordReader}', passwordReader)
+        json_str = json_str.replace('${usernameReader}', usernameReader)
+        json_str = json_str.replace('${WriterdefaultFS}', WriterdefaultFS)
+        json_str = json_str.replace('${WriterfieldDelimiter}', WriterfieldDelimiter)
+        json_str = json_str.replace('${WriterfileName}', WriterfileName)
+        json_str = json_str.replace('${WriterfileType}', WriterfileType)
+        json_str = json_str.replace('${Writerpath}', Writerpath)
+        json_str = json_str.replace('${WriteMode}', WriteMode)
+        json_str = json_str.replace('${channel}', channel)
+        json_obj = json.loads(json_str)
+        json_obj["job"]["content"][0]["writer"]["parameter"]["column"] = columns
+        filename = 'mysql2hive' + str(int(time.time())) + '.json'
+        with open(filename, 'w') as f:
+            json.dump(json_obj, f)
+        cmd = 'python ./app/datax/bin/datax.py ./' + filename
+        print(cmd)
+        import os
+        os.system(cmd)
+        return "successful executed!"