|
@@ -0,0 +1,165 @@
|
|
|
+2022-08-08 16:08:00.926 [main] INFO VMInfo - VMInfo# operatingSystem class => com.sun.management.internal.OperatingSystemImpl
|
|
|
+2022-08-08 16:08:00.931 [main] INFO Engine - the machine info =>
|
|
|
+
|
|
|
+ osInfo: Ubuntu 11 11.0.16+8-post-Ubuntu-0ubuntu120.04
|
|
|
+ jvmInfo: Linux amd64 5.13.0-52-generic
|
|
|
+ cpu num: 8
|
|
|
+
|
|
|
+ totalPhysicalMemory: -0.00G
|
|
|
+ freePhysicalMemory: -0.00G
|
|
|
+ maxFileDescriptorCount: -1
|
|
|
+ currentOpenFileDescriptorCount: -1
|
|
|
+
|
|
|
+ GC Names [G1 Young Generation, G1 Old Generation]
|
|
|
+
|
|
|
+ MEMORY_NAME | allocation_size | init_size
|
|
|
+ CodeHeap 'profiled nmethods' | 117.21MB | 2.44MB
|
|
|
+ G1 Old Gen | 1,024.00MB | 970.00MB
|
|
|
+ G1 Survivor Space | -0.00MB | 0.00MB
|
|
|
+ CodeHeap 'non-profiled nmethods' | 117.22MB | 2.44MB
|
|
|
+ Compressed Class Space | 1,024.00MB | 0.00MB
|
|
|
+ Metaspace | -0.00MB | 0.00MB
|
|
|
+ G1 Eden Space | -0.00MB | 54.00MB
|
|
|
+ CodeHeap 'non-nmethods' | 5.57MB | 2.44MB
|
|
|
+
|
|
|
+
|
|
|
+2022-08-08 16:08:00.940 [main] INFO Engine -
|
|
|
+{
|
|
|
+ "content":[
|
|
|
+ {
|
|
|
+ "reader":{
|
|
|
+ "name":"hdfsreader",
|
|
|
+ "parameter":{
|
|
|
+ "column":[
|
|
|
+ "*"
|
|
|
+ ],
|
|
|
+ "defaultFS":"hdfs://hadoop03:8020/",
|
|
|
+ "encoding":"UTF-8",
|
|
|
+ "fieldDelimiter":"\t",
|
|
|
+ "fileType":"text",
|
|
|
+ "path":"/user/hive/warehouse/user_info/user_info_data.txt"
|
|
|
+ }
|
|
|
+ },
|
|
|
+ "writer":{
|
|
|
+ "name":"mysqlwriter",
|
|
|
+ "parameter":{
|
|
|
+ "column":[
|
|
|
+ "*"
|
|
|
+ ],
|
|
|
+ "connection":[
|
|
|
+ {
|
|
|
+ "jdbcUrl":"jdbc:mysql://192.168.2.2:3306/gene",
|
|
|
+ "table":[
|
|
|
+ "user_info_transfer"
|
|
|
+ ]
|
|
|
+ }
|
|
|
+ ],
|
|
|
+ "password":"**********",
|
|
|
+ "session":[],
|
|
|
+ "username":"root",
|
|
|
+ "writeMode":"insert"
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ ],
|
|
|
+ "setting":{
|
|
|
+ "speed":{
|
|
|
+ "channel":"2"
|
|
|
+ }
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+2022-08-08 16:08:00.949 [main] WARN Engine - prioriy set to 0, because NumberFormatException, the value is: null
|
|
|
+2022-08-08 16:08:00.949 [main] INFO PerfTrace - PerfTrace traceId=job_-1, isEnable=false, priority=0
|
|
|
+2022-08-08 16:08:00.950 [main] INFO JobContainer - DataX jobContainer starts job.
|
|
|
+2022-08-08 16:08:00.951 [main] INFO JobContainer - Set jobId = 0
|
|
|
+2022-08-08 16:08:00.962 [job-0] INFO HdfsReader$Job - init() begin...
|
|
|
+2022-08-08 16:08:01.153 [job-0] INFO HdfsReader$Job - hadoopConfig details:{"finalParameters":[]}
|
|
|
+2022-08-08 16:08:01.153 [job-0] INFO HdfsReader$Job - init() ok and end...
|
|
|
+2022-08-08 16:08:01.334 [job-0] INFO OriginalConfPretreatmentUtil - table:[user_info_transfer] all columns:[
|
|
|
+user_id,area_id,age,occupation
|
|
|
+].
|
|
|
+2022-08-08 16:08:01.334 [job-0] WARN OriginalConfPretreatmentUtil - 您的配置文件中的列配置信息存在风险. 因为您配置的写入数据库表的列为*,当您的表字段个数、类型有变动时,可能影响任务正确性甚至会运行出错。请检查您的配置并作出修改.
|
|
|
+2022-08-08 16:08:01.335 [job-0] INFO OriginalConfPretreatmentUtil - Write data [
|
|
|
+insert INTO %s (user_id,area_id,age,occupation) VALUES(?,?,?,?)
|
|
|
+], which jdbcUrl like:[jdbc:mysql://192.168.2.2:3306/gene?yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true]
|
|
|
+2022-08-08 16:08:01.335 [job-0] INFO JobContainer - jobContainer starts to do prepare ...
|
|
|
+2022-08-08 16:08:01.335 [job-0] INFO JobContainer - DataX Reader.Job [hdfsreader] do prepare work .
|
|
|
+2022-08-08 16:08:01.335 [job-0] INFO HdfsReader$Job - prepare(), start to getAllFiles...
|
|
|
+2022-08-08 16:08:01.335 [job-0] INFO HdfsReader$Job - get HDFS all files in path = [/user/hive/warehouse/user_info/user_info_data.txt]
|
|
|
+2022-08-08 16:08:01.840 [job-0] INFO HdfsReader$Job - [hdfs://hadoop03:8020/user/hive/warehouse/user_info/user_info_data.txt]是[text]类型的文件, 将该文件加入source files列表
|
|
|
+2022-08-08 16:08:01.841 [job-0] INFO HdfsReader$Job - 您即将读取的文件数为: [1], 列表为: [hdfs://hadoop03:8020/user/hive/warehouse/user_info/user_info_data.txt]
|
|
|
+2022-08-08 16:08:01.841 [job-0] INFO JobContainer - DataX Writer.Job [mysqlwriter] do prepare work .
|
|
|
+2022-08-08 16:08:01.842 [job-0] INFO JobContainer - jobContainer starts to do split ...
|
|
|
+2022-08-08 16:08:01.842 [job-0] INFO JobContainer - Job set Channel-Number to 2 channels.
|
|
|
+2022-08-08 16:08:01.842 [job-0] INFO HdfsReader$Job - split() begin...
|
|
|
+2022-08-08 16:08:01.842 [job-0] INFO JobContainer - DataX Reader.Job [hdfsreader] splits to [1] tasks.
|
|
|
+2022-08-08 16:08:01.843 [job-0] INFO JobContainer - DataX Writer.Job [mysqlwriter] splits to [1] tasks.
|
|
|
+2022-08-08 16:08:01.847 [job-0] INFO JobContainer - jobContainer starts to do schedule ...
|
|
|
+2022-08-08 16:08:01.850 [job-0] INFO JobContainer - Scheduler starts [1] taskGroups.
|
|
|
+2022-08-08 16:08:01.851 [job-0] INFO JobContainer - Running by standalone Mode.
|
|
|
+2022-08-08 16:08:01.854 [taskGroup-0] INFO TaskGroupContainer - taskGroupId=[0] start [1] channels for [1] tasks.
|
|
|
+2022-08-08 16:08:01.857 [taskGroup-0] INFO Channel - Channel set byte_speed_limit to -1, No bps activated.
|
|
|
+2022-08-08 16:08:01.857 [taskGroup-0] INFO Channel - Channel set record_speed_limit to -1, No tps activated.
|
|
|
+2022-08-08 16:08:01.864 [taskGroup-0] INFO TaskGroupContainer - taskGroup[0] taskId[0] attemptCount[1] is started
|
|
|
+2022-08-08 16:08:01.879 [0-0-0-reader] INFO HdfsReader$Job - hadoopConfig details:{"finalParameters":["mapreduce.job.end-notification.max.retry.interval","mapreduce.job.end-notification.max.attempts"]}
|
|
|
+2022-08-08 16:08:01.879 [0-0-0-reader] INFO Reader$Task - read start
|
|
|
+2022-08-08 16:08:01.879 [0-0-0-reader] INFO Reader$Task - reading file : [hdfs://hadoop03:8020/user/hive/warehouse/user_info/user_info_data.txt]
|
|
|
+2022-08-08 16:08:01.891 [0-0-0-reader] INFO UnstructuredStorageReaderUtil - CsvReader使用默认值[{"captureRawRecord":true,"columnCount":0,"comment":"#","currentRecord":-1,"delimiter":"\t","escapeMode":1,"headerCount":0,"rawRecord":"","recordDelimiter":"\u0000","safetySwitch":false,"skipEmptyRecords":true,"textQualifier":"\"","trimWhitespace":true,"useComments":false,"useTextQualifier":true,"values":[]}],csvReaderConfig值为[null]
|
|
|
+2022-08-08 16:08:01.896 [0-0-0-reader] INFO Reader$Task - end read source files...
|
|
|
+2022-08-08 16:08:01.905 [0-0-0-writer] WARN CommonRdbmsWriter$Task - 回滚此次写入, 采用每次写入一行方式提交. 因为:#HY000
|
|
|
+2022-08-08 16:08:01.909 [0-0-0-writer] ERROR StdoutPluginCollector -
|
|
|
+java.sql.SQLException: #HY000
|
|
|
+ at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:996) ~[mysql-connector-java-5.1.34.jar:5.1.34]
|
|
|
+ at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3887) ~[mysql-connector-java-5.1.34.jar:5.1.34]
|
|
|
+ at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3823) ~[mysql-connector-java-5.1.34.jar:5.1.34]
|
|
|
+ at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:2435) ~[mysql-connector-java-5.1.34.jar:5.1.34]
|
|
|
+ at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2582) ~[mysql-connector-java-5.1.34.jar:5.1.34]
|
|
|
+ at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2530) ~[mysql-connector-java-5.1.34.jar:5.1.34]
|
|
|
+ at com.mysql.jdbc.PreparedStatement.executeInternal(PreparedStatement.java:1907) ~[mysql-connector-java-5.1.34.jar:5.1.34]
|
|
|
+ at com.mysql.jdbc.PreparedStatement.execute(PreparedStatement.java:1199) ~[mysql-connector-java-5.1.34.jar:5.1.34]
|
|
|
+ at com.alibaba.datax.plugin.rdbms.writer.CommonRdbmsWriter$Task.doOneInsert(CommonRdbmsWriter.java:382) ~[plugin-rdbms-util-0.0.1-SNAPSHOT.jar:na]
|
|
|
+ at com.alibaba.datax.plugin.rdbms.writer.CommonRdbmsWriter$Task.doBatchInsert(CommonRdbmsWriter.java:362) ~[plugin-rdbms-util-0.0.1-SNAPSHOT.jar:na]
|
|
|
+ at com.alibaba.datax.plugin.rdbms.writer.CommonRdbmsWriter$Task.startWriteWithConnection(CommonRdbmsWriter.java:297) ~[plugin-rdbms-util-0.0.1-SNAPSHOT.jar:na]
|
|
|
+ at com.alibaba.datax.plugin.rdbms.writer.CommonRdbmsWriter$Task.startWrite(CommonRdbmsWriter.java:319) ~[plugin-rdbms-util-0.0.1-SNAPSHOT.jar:na]
|
|
|
+ at com.alibaba.datax.plugin.writer.mysqlwriter.MysqlWriter$Task.startWrite(MysqlWriter.java:78) ~[mysqlwriter-0.0.1-SNAPSHOT.jar:na]
|
|
|
+ at com.alibaba.datax.core.taskgroup.runner.WriterRunner.run(WriterRunner.java:56) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
|
|
|
+ at java.base/java.lang.Thread.run(Thread.java:829) ~[na:na]
|
|
|
+2022-08-08 16:08:01.910 [0-0-0-writer] ERROR StdoutPluginCollector - 脏数据:
|
|
|
+{"exception":"#HY000","record":[{"byteSize":8,"index":0,"rawData":"xiaoming","type":"STRING"},{"byteSize":5,"index":1,"rawData":"10000","type":"STRING"},{"byteSize":2,"index":2,"rawData":"18","type":"STRING"},{"byteSize":7,"index":3,"rawData":"student","type":"STRING"}],"type":"writer"}
|
|
|
+2022-08-08 16:08:01.911 [0-0-0-writer] ERROR StdoutPluginCollector - 脏数据:
|
|
|
+{"exception":"#HY000","record":[{"byteSize":7,"index":0,"rawData":"xiaobai","type":"STRING"},{"byteSize":5,"index":1,"rawData":"10000","type":"STRING"},{"byteSize":2,"index":2,"rawData":"25","type":"STRING"},{"byteSize":5,"index":3,"rawData":"coder","type":"STRING"}],"type":"writer"}
|
|
|
+2022-08-08 16:08:01.912 [0-0-0-writer] ERROR StdoutPluginCollector - 脏数据:
|
|
|
+{"exception":"#HY000","record":[{"byteSize":8,"index":0,"rawData":"zhangsan","type":"STRING"},{"byteSize":5,"index":1,"rawData":"11000","type":"STRING"},{"byteSize":2,"index":2,"rawData":"28","type":"STRING"},{"byteSize":5,"index":3,"rawData":"coder","type":"STRING"}],"type":"writer"}
|
|
|
+2022-08-08 16:08:01.913 [0-0-0-writer] ERROR StdoutPluginCollector - 脏数据:
|
|
|
+{"exception":"#HY000","record":[{"byteSize":4,"index":0,"rawData":"lisi","type":"STRING"},{"byteSize":5,"index":1,"rawData":"10000","type":"STRING"},{"byteSize":2,"index":2,"rawData":"30","type":"STRING"},{"byteSize":7,"index":3,"rawData":"teacher","type":"STRING"}],"type":"writer"}
|
|
|
+2022-08-08 16:08:01.964 [taskGroup-0] INFO TaskGroupContainer - taskGroup[0] taskId[0] is successed, used[101]ms
|
|
|
+2022-08-08 16:08:01.965 [taskGroup-0] INFO TaskGroupContainer - taskGroup[0] completed it's tasks.
|
|
|
+2022-08-08 16:08:11.861 [job-0] INFO StandAloneJobContainerCommunicator - Total 4 records, 79 bytes | Speed 7B/s, 0 records/s | Error 4 records, 79 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.013s | Percentage 100.00%
|
|
|
+2022-08-08 16:08:11.861 [job-0] INFO AbstractScheduler - Scheduler accomplished all tasks.
|
|
|
+2022-08-08 16:08:11.862 [job-0] INFO JobContainer - DataX Writer.Job [mysqlwriter] do post work.
|
|
|
+2022-08-08 16:08:11.862 [job-0] INFO JobContainer - DataX Reader.Job [hdfsreader] do post work.
|
|
|
+2022-08-08 16:08:11.862 [job-0] INFO JobContainer - DataX jobId [0] completed successfully.
|
|
|
+2022-08-08 16:08:11.863 [job-0] INFO HookInvoker - No hook invoked, because base dir not exists or is a file: /home/zhouze/PycharmProjects/yili/yili-portal/app/datax/hook
|
|
|
+2022-08-08 16:08:11.865 [job-0] INFO JobContainer -
|
|
|
+ [total cpu info] =>
|
|
|
+ averageCpu | maxDeltaCpu | minDeltaCpu
|
|
|
+ -1.00% | -1.00% | -1.00%
|
|
|
+
|
|
|
+
|
|
|
+ [total gc info] =>
|
|
|
+ NAME | totalGCCount | maxDeltaGCCount | minDeltaGCCount | totalGCTime | maxDeltaGCTime | minDeltaGCTime
|
|
|
+ G1 Young Generation | 2 | 2 | 2 | 0.013s | 0.013s | 0.013s
|
|
|
+ G1 Old Generation | 0 | 0 | 0 | 0.000s | 0.000s | 0.000s
|
|
|
+
|
|
|
+2022-08-08 16:08:11.865 [job-0] INFO JobContainer - PerfTrace not enable!
|
|
|
+2022-08-08 16:08:11.865 [job-0] INFO StandAloneJobContainerCommunicator - Total 4 records, 79 bytes | Speed 7B/s, 0 records/s | Error 4 records, 79 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.013s | Percentage 100.00%
|
|
|
+2022-08-08 16:08:11.866 [job-0] INFO JobContainer -
|
|
|
+任务启动时刻 : 2022-08-08 16:08:00
|
|
|
+任务结束时刻 : 2022-08-08 16:08:11
|
|
|
+任务总计耗时 : 10s
|
|
|
+任务平均流量 : 7B/s
|
|
|
+记录写入速度 : 0rec/s
|
|
|
+读出记录总数 : 4
|
|
|
+读写失败总数 : 4
|
|
|
+
|