x_job_hive2hive_json-22_23_09.620.log 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213
  1. 2022-07-17 22:23:09.903 [main] INFO VMInfo - VMInfo# operatingSystem class => sun.management.OperatingSystemImpl
  2. 2022-07-17 22:23:09.913 [main] INFO Engine - the machine info =>
  3. osInfo: Oracle Corporation 1.8 25.262-b10
  4. jvmInfo: Linux amd64 3.10.0-957.el7.x86_64
  5. cpu num: 128
  6. totalPhysicalMemory: -0.00G
  7. freePhysicalMemory: -0.00G
  8. maxFileDescriptorCount: -1
  9. currentOpenFileDescriptorCount: -1
  10. GC Names [PS MarkSweep, PS Scavenge]
  11. MEMORY_NAME | allocation_size | init_size
  12. PS Eden Space | 256.00MB | 256.00MB
  13. Code Cache | 240.00MB | 2.44MB
  14. Compressed Class Space | 1,024.00MB | 0.00MB
  15. PS Survivor Space | 42.50MB | 42.50MB
  16. PS Old Gen | 683.00MB | 683.00MB
  17. Metaspace | -0.00MB | 0.00MB
  18. 2022-07-17 22:23:09.945 [main] INFO Engine -
  19. {
  20. "content":[
  21. {
  22. "reader":{
  23. "name":"hdfsreader",
  24. "parameter":{
  25. "column":[
  26. "*"
  27. ],
  28. "defaultFS":"hdfs://hadoop01:8020/",
  29. "encoding":"UTF-8",
  30. "fieldDelimiter":"\t",
  31. "fileType":"text",
  32. "path":"/user/hive/warehouse/user_info/user_info_data.txt"
  33. }
  34. },
  35. "writer":{
  36. "name":"hdfswriter",
  37. "parameter":{
  38. "column":[
  39. {
  40. "name":"user_id",
  41. "type":"string"
  42. },
  43. {
  44. "name":"age",
  45. "type":"int"
  46. }
  47. ],
  48. "compress":"",
  49. "defaultFS":"hdfs://hadoop01:8020/",
  50. "fieldDelimiter":"\t",
  51. "fileName":"user_info_data_1.txt",
  52. "fileType":"text",
  53. "path":"/user/hive/warehouse/user_info/",
  54. "writeMode":"append"
  55. }
  56. }
  57. }
  58. ],
  59. "setting":{
  60. "speed":{
  61. "channel":"1"
  62. }
  63. }
  64. }
  65. 2022-07-17 22:23:09.976 [main] WARN Engine - prioriy set to 0, because NumberFormatException, the value is: null
  66. 2022-07-17 22:23:09.980 [main] INFO PerfTrace - PerfTrace traceId=job_-1, isEnable=false, priority=0
  67. 2022-07-17 22:23:09.980 [main] INFO JobContainer - DataX jobContainer starts job.
  68. 2022-07-17 22:23:09.983 [main] INFO JobContainer - Set jobId = 0
  69. 2022-07-17 22:23:10.019 [job-0] INFO HdfsReader$Job - init() begin...
  70. 2022-07-17 22:23:10.430 [job-0] INFO HdfsReader$Job - hadoopConfig details:{"finalParameters":[]}
  71. 2022-07-17 22:23:10.431 [job-0] INFO HdfsReader$Job - init() ok and end...
  72. 2022-07-17 22:23:11.634 [job-0] INFO JobContainer - jobContainer starts to do prepare ...
  73. 2022-07-17 22:23:11.634 [job-0] INFO JobContainer - DataX Reader.Job [hdfsreader] do prepare work .
  74. 2022-07-17 22:23:11.634 [job-0] INFO HdfsReader$Job - prepare(), start to getAllFiles...
  75. 2022-07-17 22:23:11.635 [job-0] INFO HdfsReader$Job - get HDFS all files in path = [/user/hive/warehouse/user_info/user_info_data.txt]
  76. 2022-07-17 22:23:12.585 [job-0] INFO HdfsReader$Job - [hdfs://hadoop01:8020/user/hive/warehouse/user_info/user_info_data.txt]是[text]类型的文件, 将该文件加入source files列表
  77. 2022-07-17 22:23:12.587 [job-0] INFO HdfsReader$Job - 您即将读取的文件数为: [1], 列表为: [hdfs://hadoop01:8020/user/hive/warehouse/user_info/user_info_data.txt]
  78. 2022-07-17 22:23:12.587 [job-0] INFO JobContainer - DataX Writer.Job [hdfswriter] do prepare work .
  79. 2022-07-17 22:23:12.677 [job-0] INFO HdfsWriter$Job - 由于您配置了writeMode append, 写入前不做清理工作, [/user/hive/warehouse/user_info/] 目录下写入相应文件名前缀 [user_info_data_1.txt] 的文件
  80. 2022-07-17 22:23:12.678 [job-0] INFO JobContainer - jobContainer starts to do split ...
  81. 2022-07-17 22:23:12.679 [job-0] INFO JobContainer - Job set Channel-Number to 1 channels.
  82. 2022-07-17 22:23:12.680 [job-0] INFO HdfsReader$Job - split() begin...
  83. 2022-07-17 22:23:12.681 [job-0] INFO JobContainer - DataX Reader.Job [hdfsreader] splits to [1] tasks.
  84. 2022-07-17 22:23:12.681 [job-0] INFO HdfsWriter$Job - begin do split...
  85. 2022-07-17 22:23:12.689 [job-0] INFO HdfsWriter$Job - splited write file name:[hdfs://hadoop01:8020//user/hive/warehouse/user_info__89e76b8e_4d1d_4098_abd2_b284680d4027/user_info_data_1.txt__97b736bd_0183_4af1_a817_d16087c405ed]
  86. 2022-07-17 22:23:12.689 [job-0] INFO HdfsWriter$Job - end do split.
  87. 2022-07-17 22:23:12.689 [job-0] INFO JobContainer - DataX Writer.Job [hdfswriter] splits to [1] tasks.
  88. 2022-07-17 22:23:12.701 [job-0] INFO JobContainer - jobContainer starts to do schedule ...
  89. 2022-07-17 22:23:12.706 [job-0] INFO JobContainer - Scheduler starts [1] taskGroups.
  90. 2022-07-17 22:23:12.804 [job-0] INFO JobContainer - Running by standalone Mode.
  91. 2022-07-17 22:23:12.811 [taskGroup-0] INFO TaskGroupContainer - taskGroupId=[0] start [1] channels for [1] tasks.
  92. 2022-07-17 22:23:12.819 [taskGroup-0] INFO Channel - Channel set byte_speed_limit to -1, No bps activated.
  93. 2022-07-17 22:23:12.820 [taskGroup-0] INFO Channel - Channel set record_speed_limit to -1, No tps activated.
  94. 2022-07-17 22:23:12.833 [taskGroup-0] INFO TaskGroupContainer - taskGroup[0] taskId[0] attemptCount[1] is started
  95. 2022-07-17 22:23:12.889 [0-0-0-writer] INFO HdfsWriter$Task - begin do write...
  96. 2022-07-17 22:23:12.889 [0-0-0-writer] INFO HdfsWriter$Task - write to file : [hdfs://hadoop01:8020//user/hive/warehouse/user_info__89e76b8e_4d1d_4098_abd2_b284680d4027/user_info_data_1.txt__97b736bd_0183_4af1_a817_d16087c405ed]
  97. 2022-07-17 22:23:12.893 [0-0-0-reader] INFO HdfsReader$Job - hadoopConfig details:{"finalParameters":["mapreduce.job.end-notification.max.retry.interval","mapreduce.job.end-notification.max.attempts"]}
  98. 2022-07-17 22:23:12.896 [0-0-0-reader] INFO Reader$Task - read start
  99. 2022-07-17 22:23:12.896 [0-0-0-reader] INFO Reader$Task - reading file : [hdfs://hadoop01:8020/user/hive/warehouse/user_info/user_info_data.txt]
  100. 2022-07-17 22:23:12.927 [0-0-0-reader] INFO UnstructuredStorageReaderUtil - CsvReader使用默认值[{"captureRawRecord":true,"columnCount":0,"comment":"#","currentRecord":-1,"delimiter":"\t","escapeMode":1,"headerCount":0,"rawRecord":"","recordDelimiter":"\u0000","safetySwitch":false,"skipEmptyRecords":true,"textQualifier":"\"","trimWhitespace":true,"useComments":false,"useTextQualifier":true,"values":[]}],csvReaderConfig值为[null]
  101. 2022-07-17 22:23:12.934 [0-0-0-reader] INFO Reader$Task - end read source files...
  102. 2022-07-17 22:23:12.972 [0-0-0-writer] ERROR HdfsWriter$Job - 写文件文件[hdfs://hadoop01:8020//user/hive/warehouse/user_info__89e76b8e_4d1d_4098_abd2_b284680d4027/user_info_data_1.txt__97b736bd_0183_4af1_a817_d16087c405ed]时发生IO异常,请检查您的网络是否正常!
  103. 2022-07-17 22:23:12.972 [0-0-0-writer] INFO HdfsWriter$Job - start delete tmp dir [hdfs://hadoop01:8020/user/hive/warehouse/user_info__89e76b8e_4d1d_4098_abd2_b284680d4027] .
  104. 2022-07-17 22:23:12.982 [0-0-0-writer] INFO HdfsWriter$Job - finish delete tmp dir [hdfs://hadoop01:8020/user/hive/warehouse/user_info__89e76b8e_4d1d_4098_abd2_b284680d4027] .
  105. 2022-07-17 22:23:12.986 [0-0-0-writer] ERROR WriterRunner - Writer Runner Received Exceptions:
  106. com.alibaba.datax.common.exception.DataXException: Code:[HdfsWriter-04], Description:[您配置的文件在写入时出现IO异常.]. - java.lang.IndexOutOfBoundsException: Index: 2, Size: 2
  107. at java.util.ArrayList.rangeCheck(ArrayList.java:659)
  108. at java.util.ArrayList.get(ArrayList.java:435)
  109. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.transportOneRecord(HdfsHelper.java:495)
  110. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.transportOneRecord(HdfsHelper.java:323)
  111. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.textFileStartWrite(HdfsHelper.java:306)
  112. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Task.startWrite(HdfsWriter.java:360)
  113. at com.alibaba.datax.core.taskgroup.runner.WriterRunner.run(WriterRunner.java:56)
  114. at java.lang.Thread.run(Thread.java:748)
  115. - java.lang.IndexOutOfBoundsException: Index: 2, Size: 2
  116. at java.util.ArrayList.rangeCheck(ArrayList.java:659)
  117. at java.util.ArrayList.get(ArrayList.java:435)
  118. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.transportOneRecord(HdfsHelper.java:495)
  119. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.transportOneRecord(HdfsHelper.java:323)
  120. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.textFileStartWrite(HdfsHelper.java:306)
  121. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Task.startWrite(HdfsWriter.java:360)
  122. at com.alibaba.datax.core.taskgroup.runner.WriterRunner.run(WriterRunner.java:56)
  123. at java.lang.Thread.run(Thread.java:748)
  124. at com.alibaba.datax.common.exception.DataXException.asDataXException(DataXException.java:40) ~[datax-common-0.0.1-SNAPSHOT.jar:na]
  125. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.textFileStartWrite(HdfsHelper.java:317) ~[hdfswriter-0.0.1-SNAPSHOT.jar:na]
  126. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Task.startWrite(HdfsWriter.java:360) ~[hdfswriter-0.0.1-SNAPSHOT.jar:na]
  127. at com.alibaba.datax.core.taskgroup.runner.WriterRunner.run(WriterRunner.java:56) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  128. at java.lang.Thread.run(Thread.java:748) [na:1.8.0_262]
  129. Caused by: java.lang.IndexOutOfBoundsException: Index: 2, Size: 2
  130. at java.util.ArrayList.rangeCheck(ArrayList.java:659) ~[na:1.8.0_262]
  131. at java.util.ArrayList.get(ArrayList.java:435) ~[na:1.8.0_262]
  132. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.transportOneRecord(HdfsHelper.java:495) ~[hdfswriter-0.0.1-SNAPSHOT.jar:na]
  133. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.transportOneRecord(HdfsHelper.java:323) ~[hdfswriter-0.0.1-SNAPSHOT.jar:na]
  134. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.textFileStartWrite(HdfsHelper.java:306) ~[hdfswriter-0.0.1-SNAPSHOT.jar:na]
  135. ... 3 common frames omitted
  136. 2022-07-17 22:23:22.825 [job-0] INFO StandAloneJobContainerCommunicator - Total 4 records, 79 bytes | Speed 7B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 0.00%
  137. 2022-07-17 22:23:22.826 [job-0] ERROR JobContainer - 运行scheduler 模式[standalone]出错.
  138. 2022-07-17 22:23:22.827 [job-0] ERROR JobContainer - Exception when job run
  139. com.alibaba.datax.common.exception.DataXException: Code:[HdfsWriter-04], Description:[您配置的文件在写入时出现IO异常.]. - java.lang.IndexOutOfBoundsException: Index: 2, Size: 2
  140. at java.util.ArrayList.rangeCheck(ArrayList.java:659)
  141. at java.util.ArrayList.get(ArrayList.java:435)
  142. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.transportOneRecord(HdfsHelper.java:495)
  143. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.transportOneRecord(HdfsHelper.java:323)
  144. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.textFileStartWrite(HdfsHelper.java:306)
  145. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Task.startWrite(HdfsWriter.java:360)
  146. at com.alibaba.datax.core.taskgroup.runner.WriterRunner.run(WriterRunner.java:56)
  147. at java.lang.Thread.run(Thread.java:748)
  148. - java.lang.IndexOutOfBoundsException: Index: 2, Size: 2
  149. at java.util.ArrayList.rangeCheck(ArrayList.java:659)
  150. at java.util.ArrayList.get(ArrayList.java:435)
  151. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.transportOneRecord(HdfsHelper.java:495)
  152. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.transportOneRecord(HdfsHelper.java:323)
  153. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.textFileStartWrite(HdfsHelper.java:306)
  154. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Task.startWrite(HdfsWriter.java:360)
  155. at com.alibaba.datax.core.taskgroup.runner.WriterRunner.run(WriterRunner.java:56)
  156. at java.lang.Thread.run(Thread.java:748)
  157. at com.alibaba.datax.common.exception.DataXException.asDataXException(DataXException.java:40) ~[datax-common-0.0.1-SNAPSHOT.jar:na]
  158. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.textFileStartWrite(HdfsHelper.java:317) ~[na:na]
  159. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Task.startWrite(HdfsWriter.java:360) ~[na:na]
  160. at com.alibaba.datax.core.taskgroup.runner.WriterRunner.run(WriterRunner.java:56) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  161. at java.lang.Thread.run(Thread.java:748) ~[na:1.8.0_262]
  162. Caused by: java.lang.IndexOutOfBoundsException: Index: 2, Size: 2
  163. at java.util.ArrayList.rangeCheck(ArrayList.java:659) ~[na:1.8.0_262]
  164. at java.util.ArrayList.get(ArrayList.java:435) ~[na:1.8.0_262]
  165. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.transportOneRecord(HdfsHelper.java:495) ~[na:na]
  166. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.transportOneRecord(HdfsHelper.java:323) ~[na:na]
  167. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.textFileStartWrite(HdfsHelper.java:306) ~[na:na]
  168. ... 3 common frames omitted
  169. 2022-07-17 22:23:22.829 [job-0] INFO StandAloneJobContainerCommunicator - Total 4 records, 79 bytes | Speed 79B/s, 4 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 0.00%
  170. 2022-07-17 22:23:22.957 [job-0] ERROR Engine -
  171. 经DataX智能分析,该任务最可能的错误原因是:
  172. com.alibaba.datax.common.exception.DataXException: Code:[HdfsWriter-04], Description:[您配置的文件在写入时出现IO异常.]. - java.lang.IndexOutOfBoundsException: Index: 2, Size: 2
  173. at java.util.ArrayList.rangeCheck(ArrayList.java:659)
  174. at java.util.ArrayList.get(ArrayList.java:435)
  175. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.transportOneRecord(HdfsHelper.java:495)
  176. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.transportOneRecord(HdfsHelper.java:323)
  177. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.textFileStartWrite(HdfsHelper.java:306)
  178. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Task.startWrite(HdfsWriter.java:360)
  179. at com.alibaba.datax.core.taskgroup.runner.WriterRunner.run(WriterRunner.java:56)
  180. at java.lang.Thread.run(Thread.java:748)
  181. - java.lang.IndexOutOfBoundsException: Index: 2, Size: 2
  182. at java.util.ArrayList.rangeCheck(ArrayList.java:659)
  183. at java.util.ArrayList.get(ArrayList.java:435)
  184. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.transportOneRecord(HdfsHelper.java:495)
  185. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.transportOneRecord(HdfsHelper.java:323)
  186. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.textFileStartWrite(HdfsHelper.java:306)
  187. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Task.startWrite(HdfsWriter.java:360)
  188. at com.alibaba.datax.core.taskgroup.runner.WriterRunner.run(WriterRunner.java:56)
  189. at java.lang.Thread.run(Thread.java:748)
  190. at com.alibaba.datax.common.exception.DataXException.asDataXException(DataXException.java:40)
  191. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.textFileStartWrite(HdfsHelper.java:317)
  192. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Task.startWrite(HdfsWriter.java:360)
  193. at com.alibaba.datax.core.taskgroup.runner.WriterRunner.run(WriterRunner.java:56)
  194. at java.lang.Thread.run(Thread.java:748)
  195. Caused by: java.lang.IndexOutOfBoundsException: Index: 2, Size: 2
  196. at java.util.ArrayList.rangeCheck(ArrayList.java:659)
  197. at java.util.ArrayList.get(ArrayList.java:435)
  198. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.transportOneRecord(HdfsHelper.java:495)
  199. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.transportOneRecord(HdfsHelper.java:323)
  200. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsHelper.textFileStartWrite(HdfsHelper.java:306)
  201. ... 3 more