2hive1659873359_json-19_56_00.031.log 7.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125
  1. 2022-08-07 19:56:00.197 [main] INFO VMInfo - VMInfo# operatingSystem class => com.sun.management.internal.OperatingSystemImpl
  2. 2022-08-07 19:56:00.201 [main] INFO Engine - the machine info =>
  3. osInfo: Ubuntu 11 11.0.16+8-post-Ubuntu-0ubuntu120.04
  4. jvmInfo: Linux amd64 5.13.0-52-generic
  5. cpu num: 8
  6. totalPhysicalMemory: -0.00G
  7. freePhysicalMemory: -0.00G
  8. maxFileDescriptorCount: -1
  9. currentOpenFileDescriptorCount: -1
  10. GC Names [G1 Young Generation, G1 Old Generation]
  11. MEMORY_NAME | allocation_size | init_size
  12. CodeHeap 'profiled nmethods' | 117.21MB | 2.44MB
  13. G1 Old Gen | 1,024.00MB | 970.00MB
  14. G1 Survivor Space | -0.00MB | 0.00MB
  15. CodeHeap 'non-profiled nmethods' | 117.22MB | 2.44MB
  16. Compressed Class Space | 1,024.00MB | 0.00MB
  17. Metaspace | -0.00MB | 0.00MB
  18. G1 Eden Space | -0.00MB | 54.00MB
  19. CodeHeap 'non-nmethods' | 5.57MB | 2.44MB
  20. 2022-08-07 19:56:00.211 [main] INFO Engine -
  21. {
  22. "content":[
  23. {
  24. "reader":{
  25. "name":"hdfsreader",
  26. "parameter":{
  27. "column":[
  28. "*"
  29. ],
  30. "defaultFS":"hdfs://hadoop03:8020/",
  31. "encoding":"UTF-8",
  32. "fieldDelimiter":"\t",
  33. "fileType":"text",
  34. "path":"/user/hive/warehouse/user_info/user_info_data.txt"
  35. }
  36. },
  37. "writer":{
  38. "name":"hdfswriter",
  39. "parameter":{
  40. "column":"[{'name': 'user_id', 'type': 'string'}, {'name': 'area_id', 'type': 'string'}, {'name': 'age', 'type': 'int'}, {'name': 'occupation', 'type': 'string'}]",
  41. "compress":"",
  42. "defaultFS":"hdfs://hadoop03:8020/",
  43. "fieldDelimiter":"\t",
  44. "fileName":"user_info_data_2.txt",
  45. "fileType":"text",
  46. "path":"/user/hive/warehouse/user_info/",
  47. "writeMode":"append"
  48. }
  49. }
  50. }
  51. ],
  52. "setting":{
  53. "speed":{
  54. "channel":"2"
  55. }
  56. }
  57. }
  58. 2022-08-07 19:56:00.220 [main] WARN Engine - prioriy set to 0, because NumberFormatException, the value is: null
  59. 2022-08-07 19:56:00.221 [main] INFO PerfTrace - PerfTrace traceId=job_-1, isEnable=false, priority=0
  60. 2022-08-07 19:56:00.221 [main] INFO JobContainer - DataX jobContainer starts job.
  61. 2022-08-07 19:56:00.222 [main] INFO JobContainer - Set jobId = 0
  62. 2022-08-07 19:56:00.234 [job-0] INFO HdfsReader$Job - init() begin...
  63. 2022-08-07 19:56:00.429 [job-0] INFO HdfsReader$Job - hadoopConfig details:{"finalParameters":[]}
  64. 2022-08-07 19:56:00.429 [job-0] INFO HdfsReader$Job - init() ok and end...
  65. 2022-08-07 19:56:00.437 [job-0] ERROR JobContainer - Exception when job run
  66. java.lang.ClassCastException: class java.lang.String cannot be cast to class java.util.List (java.lang.String and java.util.List are in module java.base of loader 'bootstrap')
  67. at com.alibaba.datax.common.util.Configuration.getList(Configuration.java:407) ~[datax-common-0.0.1-SNAPSHOT.jar:na]
  68. at com.alibaba.datax.common.util.Configuration.getListConfiguration(Configuration.java:463) ~[datax-common-0.0.1-SNAPSHOT.jar:na]
  69. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.validateParameter(HdfsWriter.java:72) ~[hdfswriter-0.0.1-SNAPSHOT.jar:na]
  70. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.init(HdfsWriter.java:42) ~[hdfswriter-0.0.1-SNAPSHOT.jar:na]
  71. at com.alibaba.datax.core.job.JobContainer.initJobWriter(JobContainer.java:704) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  72. at com.alibaba.datax.core.job.JobContainer.init(JobContainer.java:304) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  73. at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:113) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  74. at com.alibaba.datax.core.Engine.start(Engine.java:92) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  75. at com.alibaba.datax.core.Engine.entry(Engine.java:171) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  76. at com.alibaba.datax.core.Engine.main(Engine.java:204) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  77. 2022-08-07 19:56:00.439 [job-0] INFO StandAloneJobContainerCommunicator - Total 0 records, 0 bytes | Speed 0B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 0.00%
  78. 2022-08-07 19:56:00.440 [job-0] ERROR Engine -
  79. 经DataX智能分析,该任务最可能的错误原因是:
  80. com.alibaba.datax.common.exception.DataXException: Code:[Framework-02], Description:[DataX引擎运行过程出错,具体原因请参看DataX运行结束时的错误诊断信息 .]. - java.lang.ClassCastException: class java.lang.String cannot be cast to class java.util.List (java.lang.String and java.util.List are in module java.base of loader 'bootstrap')
  81. at com.alibaba.datax.common.util.Configuration.getList(Configuration.java:407)
  82. at com.alibaba.datax.common.util.Configuration.getListConfiguration(Configuration.java:463)
  83. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.validateParameter(HdfsWriter.java:72)
  84. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.init(HdfsWriter.java:42)
  85. at com.alibaba.datax.core.job.JobContainer.initJobWriter(JobContainer.java:704)
  86. at com.alibaba.datax.core.job.JobContainer.init(JobContainer.java:304)
  87. at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:113)
  88. at com.alibaba.datax.core.Engine.start(Engine.java:92)
  89. at com.alibaba.datax.core.Engine.entry(Engine.java:171)
  90. at com.alibaba.datax.core.Engine.main(Engine.java:204)
  91. - java.lang.ClassCastException: class java.lang.String cannot be cast to class java.util.List (java.lang.String and java.util.List are in module java.base of loader 'bootstrap')
  92. at com.alibaba.datax.common.util.Configuration.getList(Configuration.java:407)
  93. at com.alibaba.datax.common.util.Configuration.getListConfiguration(Configuration.java:463)
  94. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.validateParameter(HdfsWriter.java:72)
  95. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.init(HdfsWriter.java:42)
  96. at com.alibaba.datax.core.job.JobContainer.initJobWriter(JobContainer.java:704)
  97. at com.alibaba.datax.core.job.JobContainer.init(JobContainer.java:304)
  98. at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:113)
  99. at com.alibaba.datax.core.Engine.start(Engine.java:92)
  100. at com.alibaba.datax.core.Engine.entry(Engine.java:171)
  101. at com.alibaba.datax.core.Engine.main(Engine.java:204)
  102. at com.alibaba.datax.common.exception.DataXException.asDataXException(DataXException.java:40)
  103. at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:162)
  104. at com.alibaba.datax.core.Engine.start(Engine.java:92)
  105. at com.alibaba.datax.core.Engine.entry(Engine.java:171)
  106. at com.alibaba.datax.core.Engine.main(Engine.java:204)
  107. Caused by: java.lang.ClassCastException: class java.lang.String cannot be cast to class java.util.List (java.lang.String and java.util.List are in module java.base of loader 'bootstrap')
  108. at com.alibaba.datax.common.util.Configuration.getList(Configuration.java:407)
  109. at com.alibaba.datax.common.util.Configuration.getListConfiguration(Configuration.java:463)
  110. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.validateParameter(HdfsWriter.java:72)
  111. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.init(HdfsWriter.java:42)
  112. at com.alibaba.datax.core.job.JobContainer.initJobWriter(JobContainer.java:704)
  113. at com.alibaba.datax.core.job.JobContainer.init(JobContainer.java:304)
  114. at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:113)
  115. ... 3 more