2hive1659874333_json-20_12_13.426.log 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114
  1. 2022-08-07 20:12:13.605 [main] INFO VMInfo - VMInfo# operatingSystem class => com.sun.management.internal.OperatingSystemImpl
  2. 2022-08-07 20:12:13.610 [main] INFO Engine - the machine info =>
  3. osInfo: Ubuntu 11 11.0.16+8-post-Ubuntu-0ubuntu120.04
  4. jvmInfo: Linux amd64 5.13.0-52-generic
  5. cpu num: 8
  6. totalPhysicalMemory: -0.00G
  7. freePhysicalMemory: -0.00G
  8. maxFileDescriptorCount: -1
  9. currentOpenFileDescriptorCount: -1
  10. GC Names [G1 Young Generation, G1 Old Generation]
  11. MEMORY_NAME | allocation_size | init_size
  12. CodeHeap 'profiled nmethods' | 117.21MB | 2.44MB
  13. G1 Old Gen | 1,024.00MB | 970.00MB
  14. G1 Survivor Space | -0.00MB | 0.00MB
  15. CodeHeap 'non-profiled nmethods' | 117.22MB | 2.44MB
  16. Compressed Class Space | 1,024.00MB | 0.00MB
  17. Metaspace | -0.00MB | 0.00MB
  18. G1 Eden Space | -0.00MB | 54.00MB
  19. CodeHeap 'non-nmethods' | 5.57MB | 2.44MB
  20. 2022-08-07 20:12:13.622 [main] INFO Engine -
  21. {
  22. "content":[
  23. {
  24. "reader":{
  25. "name":"hdfsreader",
  26. "parameter":{
  27. "column":[
  28. "*"
  29. ],
  30. "defaultFS":"hdfs://hadoop03:8020/",
  31. "encoding":"UTF-8",
  32. "fieldDelimiter":"\t",
  33. "fileType":"text",
  34. "path":"/user/hive/warehouse/user_info/user_info_data.txt"
  35. }
  36. },
  37. "writer":{
  38. "name":"hdfswriter",
  39. "parameter":{
  40. "column":[
  41. {
  42. "name":"user_id",
  43. "type":"string"
  44. },
  45. {
  46. "name":"area_id",
  47. "type":"string"
  48. },
  49. {
  50. "name":"age",
  51. "type":"int"
  52. },
  53. {
  54. "name":"occupation",
  55. "type":"string"
  56. }
  57. ],
  58. "compress":"",
  59. "defaultFS":"hdfs://hadoop03:8020/",
  60. "fieldDelimiter":"\t",
  61. "fileName":"user_info_data_2.txt",
  62. "fileType":"text",
  63. "path":"/user/hive/warehouse/user_info/",
  64. "writeMode":"${WriteMode}"
  65. }
  66. }
  67. }
  68. ],
  69. "setting":{
  70. "speed":{
  71. "channel":"2"
  72. }
  73. }
  74. }
  75. 2022-08-07 20:12:13.632 [main] WARN Engine - prioriy set to 0, because NumberFormatException, the value is: null
  76. 2022-08-07 20:12:13.633 [main] INFO PerfTrace - PerfTrace traceId=job_-1, isEnable=false, priority=0
  77. 2022-08-07 20:12:13.633 [main] INFO JobContainer - DataX jobContainer starts job.
  78. 2022-08-07 20:12:13.635 [main] INFO JobContainer - Set jobId = 0
  79. 2022-08-07 20:12:13.647 [job-0] INFO HdfsReader$Job - init() begin...
  80. 2022-08-07 20:12:13.857 [job-0] INFO HdfsReader$Job - hadoopConfig details:{"finalParameters":[]}
  81. 2022-08-07 20:12:13.857 [job-0] INFO HdfsReader$Job - init() ok and end...
  82. 2022-08-07 20:12:13.881 [job-0] ERROR JobContainer - Exception when job run
  83. com.alibaba.datax.common.exception.DataXException: Code:[HdfsWriter-02], Description:[您填写的参数值不合法.]. - 仅支持append, nonConflict两种模式, 不支持您配置的 writeMode 模式 : [${writemode}]
  84. at com.alibaba.datax.common.exception.DataXException.asDataXException(DataXException.java:26) ~[datax-common-0.0.1-SNAPSHOT.jar:na]
  85. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.validateParameter(HdfsWriter.java:86) ~[hdfswriter-0.0.1-SNAPSHOT.jar:na]
  86. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.init(HdfsWriter.java:42) ~[hdfswriter-0.0.1-SNAPSHOT.jar:na]
  87. at com.alibaba.datax.core.job.JobContainer.initJobWriter(JobContainer.java:704) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  88. at com.alibaba.datax.core.job.JobContainer.init(JobContainer.java:304) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  89. at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:113) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  90. at com.alibaba.datax.core.Engine.start(Engine.java:92) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  91. at com.alibaba.datax.core.Engine.entry(Engine.java:171) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  92. at com.alibaba.datax.core.Engine.main(Engine.java:204) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  93. 2022-08-07 20:12:13.884 [job-0] INFO StandAloneJobContainerCommunicator - Total 0 records, 0 bytes | Speed 0B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 0.00%
  94. 2022-08-07 20:12:13.885 [job-0] ERROR Engine -
  95. 经DataX智能分析,该任务最可能的错误原因是:
  96. com.alibaba.datax.common.exception.DataXException: Code:[HdfsWriter-02], Description:[您填写的参数值不合法.]. - 仅支持append, nonConflict两种模式, 不支持您配置的 writeMode 模式 : [${writemode}]
  97. at com.alibaba.datax.common.exception.DataXException.asDataXException(DataXException.java:26)
  98. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.validateParameter(HdfsWriter.java:86)
  99. at com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter$Job.init(HdfsWriter.java:42)
  100. at com.alibaba.datax.core.job.JobContainer.initJobWriter(JobContainer.java:704)
  101. at com.alibaba.datax.core.job.JobContainer.init(JobContainer.java:304)
  102. at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:113)
  103. at com.alibaba.datax.core.Engine.start(Engine.java:92)
  104. at com.alibaba.datax.core.Engine.entry(Engine.java:171)
  105. at com.alibaba.datax.core.Engine.main(Engine.java:204)