2hive1659874181_json-20_09_41.611.log 7.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143
  1. 2022-08-07 20:09:41.783 [main] INFO VMInfo - VMInfo# operatingSystem class => com.sun.management.internal.OperatingSystemImpl
  2. 2022-08-07 20:09:41.788 [main] INFO Engine - the machine info =>
  3. osInfo: Ubuntu 11 11.0.16+8-post-Ubuntu-0ubuntu120.04
  4. jvmInfo: Linux amd64 5.13.0-52-generic
  5. cpu num: 8
  6. totalPhysicalMemory: -0.00G
  7. freePhysicalMemory: -0.00G
  8. maxFileDescriptorCount: -1
  9. currentOpenFileDescriptorCount: -1
  10. GC Names [G1 Young Generation, G1 Old Generation]
  11. MEMORY_NAME | allocation_size | init_size
  12. CodeHeap 'profiled nmethods' | 117.21MB | 2.44MB
  13. G1 Old Gen | 1,024.00MB | 970.00MB
  14. G1 Survivor Space | -0.00MB | 0.00MB
  15. CodeHeap 'non-profiled nmethods' | 117.22MB | 2.44MB
  16. Compressed Class Space | 1,024.00MB | 0.00MB
  17. Metaspace | -0.00MB | 0.00MB
  18. G1 Eden Space | -0.00MB | 54.00MB
  19. CodeHeap 'non-nmethods' | 5.57MB | 2.44MB
  20. 2022-08-07 20:09:41.800 [main] INFO Engine -
  21. {
  22. "content":[
  23. {
  24. "reader":{
  25. "name":"hdfsreader",
  26. "parameter":{
  27. "column":[
  28. "*"
  29. ],
  30. "defaultFS":"hdfs://hadoop03:8020/",
  31. "encoding":"UTF-8",
  32. "fieldDelimiter":"\t",
  33. "fileType":"text",
  34. "path":"/user/hive/warehouse/user_info/user_info_data.txt"
  35. }
  36. },
  37. "writer":{
  38. "column":[
  39. {
  40. "name":"user_id",
  41. "type":"string"
  42. },
  43. {
  44. "name":"area_id",
  45. "type":"string"
  46. },
  47. {
  48. "name":"age",
  49. "type":"int"
  50. },
  51. {
  52. "name":"occupation",
  53. "type":"string"
  54. }
  55. ],
  56. "name":"hdfswriter",
  57. "parameter":{
  58. "column":"${columns}",
  59. "compress":"",
  60. "defaultFS":"hdfs://hadoop03:8020/",
  61. "fieldDelimiter":"\t",
  62. "fileName":"user_info_data_2.txt",
  63. "fileType":"text",
  64. "path":"/user/hive/warehouse/user_info/",
  65. "writeMode":"append"
  66. }
  67. }
  68. }
  69. ],
  70. "setting":{
  71. "speed":{
  72. "channel":"2"
  73. }
  74. }
  75. }
  76. 2022-08-07 20:09:41.809 [main] WARN Engine - prioriy set to 0, because NumberFormatException, the value is: null
  77. 2022-08-07 20:09:41.810 [main] INFO PerfTrace - PerfTrace traceId=job_-1, isEnable=false, priority=0
  78. 2022-08-07 20:09:41.810 [main] INFO JobContainer - DataX jobContainer starts job.
  79. 2022-08-07 20:09:41.811 [main] INFO JobContainer - Set jobId = 0
  80. 2022-08-07 20:09:41.824 [job-0] INFO HdfsReader$Job - init() begin...
  81. 2022-08-07 20:09:42.016 [job-0] INFO HdfsReader$Job - hadoopConfig details:{"finalParameters":[]}
  82. 2022-08-07 20:09:42.016 [job-0] INFO HdfsReader$Job - init() ok and end...
  83. 2022-08-07 20:09:42.024 [job-0] ERROR JobContainer - Exception when job run
  84. java.lang.ClassCastException: class java.lang.String cannot be cast to class java.util.List (java.lang.String and java.util.List are in module java.base of loader 'bootstrap')
  85. at com.alibaba.datax.common.util.Configuration.getList(Configuration.java:407) ~[datax-common-0.0.1-SNAPSHOT.jar:na]
  86. at com.alibaba.datax.common.util.Configuration.getListConfiguration(Configuration.java:463) ~[datax-common-0.0.1-SNAPSHOT.jar:na]
  87. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.validateParameter(HdfsWriter.java:72) ~[hdfswriter-0.0.1-SNAPSHOT.jar:na]
  88. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.init(HdfsWriter.java:42) ~[hdfswriter-0.0.1-SNAPSHOT.jar:na]
  89. at com.alibaba.datax.core.job.JobContainer.initJobWriter(JobContainer.java:704) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  90. at com.alibaba.datax.core.job.JobContainer.init(JobContainer.java:304) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  91. at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:113) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  92. at com.alibaba.datax.core.Engine.start(Engine.java:92) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  93. at com.alibaba.datax.core.Engine.entry(Engine.java:171) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  94. at com.alibaba.datax.core.Engine.main(Engine.java:204) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  95. 2022-08-07 20:09:42.027 [job-0] INFO StandAloneJobContainerCommunicator - Total 0 records, 0 bytes | Speed 0B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 0.00%
  96. 2022-08-07 20:09:42.029 [job-0] ERROR Engine -
  97. 经DataX智能分析,该任务最可能的错误原因是:
  98. com.alibaba.datax.common.exception.DataXException: Code:[Framework-02], Description:[DataX引擎运行过程出错,具体原因请参看DataX运行结束时的错误诊断信息 .]. - java.lang.ClassCastException: class java.lang.String cannot be cast to class java.util.List (java.lang.String and java.util.List are in module java.base of loader 'bootstrap')
  99. at com.alibaba.datax.common.util.Configuration.getList(Configuration.java:407)
  100. at com.alibaba.datax.common.util.Configuration.getListConfiguration(Configuration.java:463)
  101. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.validateParameter(HdfsWriter.java:72)
  102. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.init(HdfsWriter.java:42)
  103. at com.alibaba.datax.core.job.JobContainer.initJobWriter(JobContainer.java:704)
  104. at com.alibaba.datax.core.job.JobContainer.init(JobContainer.java:304)
  105. at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:113)
  106. at com.alibaba.datax.core.Engine.start(Engine.java:92)
  107. at com.alibaba.datax.core.Engine.entry(Engine.java:171)
  108. at com.alibaba.datax.core.Engine.main(Engine.java:204)
  109. - java.lang.ClassCastException: class java.lang.String cannot be cast to class java.util.List (java.lang.String and java.util.List are in module java.base of loader 'bootstrap')
  110. at com.alibaba.datax.common.util.Configuration.getList(Configuration.java:407)
  111. at com.alibaba.datax.common.util.Configuration.getListConfiguration(Configuration.java:463)
  112. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.validateParameter(HdfsWriter.java:72)
  113. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.init(HdfsWriter.java:42)
  114. at com.alibaba.datax.core.job.JobContainer.initJobWriter(JobContainer.java:704)
  115. at com.alibaba.datax.core.job.JobContainer.init(JobContainer.java:304)
  116. at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:113)
  117. at com.alibaba.datax.core.Engine.start(Engine.java:92)
  118. at com.alibaba.datax.core.Engine.entry(Engine.java:171)
  119. at com.alibaba.datax.core.Engine.main(Engine.java:204)
  120. at com.alibaba.datax.common.exception.DataXException.asDataXException(DataXException.java:40)
  121. at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:162)
  122. at com.alibaba.datax.core.Engine.start(Engine.java:92)
  123. at com.alibaba.datax.core.Engine.entry(Engine.java:171)
  124. at com.alibaba.datax.core.Engine.main(Engine.java:204)
  125. Caused by: java.lang.ClassCastException: class java.lang.String cannot be cast to class java.util.List (java.lang.String and java.util.List are in module java.base of loader 'bootstrap')
  126. at com.alibaba.datax.common.util.Configuration.getList(Configuration.java:407)
  127. at com.alibaba.datax.common.util.Configuration.getListConfiguration(Configuration.java:463)
  128. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.validateParameter(HdfsWriter.java:72)
  129. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.init(HdfsWriter.java:42)
  130. at com.alibaba.datax.core.job.JobContainer.initJobWriter(JobContainer.java:704)
  131. at com.alibaba.datax.core.job.JobContainer.init(JobContainer.java:304)
  132. at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:113)
  133. ... 3 more