x_job_hive2hive_json-22_51_12.634.log 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188
  1. 2022-07-17 22:51:12.919 [main] INFO VMInfo - VMInfo# operatingSystem class => sun.management.OperatingSystemImpl
  2. 2022-07-17 22:51:12.929 [main] INFO Engine - the machine info =>
  3. osInfo: Oracle Corporation 1.8 25.262-b10
  4. jvmInfo: Linux amd64 3.10.0-957.el7.x86_64
  5. cpu num: 128
  6. totalPhysicalMemory: -0.00G
  7. freePhysicalMemory: -0.00G
  8. maxFileDescriptorCount: -1
  9. currentOpenFileDescriptorCount: -1
  10. GC Names [PS MarkSweep, PS Scavenge]
  11. MEMORY_NAME | allocation_size | init_size
  12. PS Eden Space | 256.00MB | 256.00MB
  13. Code Cache | 240.00MB | 2.44MB
  14. Compressed Class Space | 1,024.00MB | 0.00MB
  15. PS Survivor Space | 42.50MB | 42.50MB
  16. PS Old Gen | 683.00MB | 683.00MB
  17. Metaspace | -0.00MB | 0.00MB
  18. 2022-07-17 22:51:12.950 [main] INFO Engine -
  19. {
  20. "content":[
  21. {
  22. "reader":{
  23. "name":"hdfsreader",
  24. "parameter":{
  25. "column":[
  26. "*"
  27. ],
  28. "defaultFS":"hdfs://iict/",
  29. "encoding":"UTF-8",
  30. "fieldDelimiter":"\t",
  31. "fileType":"text",
  32. "path":"/user/hive/warehouse/user_info/user_info_data.txt"
  33. }
  34. },
  35. "writer":{
  36. "name":"hdfswriter",
  37. "parameter":{
  38. "column":[
  39. {
  40. "name":"user_id",
  41. "type":"string"
  42. },
  43. {
  44. "name":"age",
  45. "type":"int"
  46. }
  47. ],
  48. "compress":"",
  49. "defaultFS":"hdfs://hadoop03:8020/",
  50. "fieldDelimiter":"\t",
  51. "fileName":"user_info_data_1.txt",
  52. "fileType":"text",
  53. "path":"/user/hive/warehouse/user_info/",
  54. "writeMode":"append"
  55. }
  56. }
  57. }
  58. ],
  59. "setting":{
  60. "speed":{
  61. "channel":"1"
  62. }
  63. }
  64. }
  65. 2022-07-17 22:51:12.970 [main] WARN Engine - prioriy set to 0, because NumberFormatException, the value is: null
  66. 2022-07-17 22:51:12.972 [main] INFO PerfTrace - PerfTrace traceId=job_-1, isEnable=false, priority=0
  67. 2022-07-17 22:51:12.972 [main] INFO JobContainer - DataX jobContainer starts job.
  68. 2022-07-17 22:51:12.974 [main] INFO JobContainer - Set jobId = 0
  69. 2022-07-17 22:51:12.998 [job-0] INFO HdfsReader$Job - init() begin...
  70. 2022-07-17 22:51:13.328 [job-0] INFO HdfsReader$Job - hadoopConfig details:{"finalParameters":[]}
  71. 2022-07-17 22:51:13.328 [job-0] INFO HdfsReader$Job - init() ok and end...
  72. 2022-07-17 22:51:14.681 [job-0] INFO JobContainer - jobContainer starts to do prepare ...
  73. 2022-07-17 22:51:14.682 [job-0] INFO JobContainer - DataX Reader.Job [hdfsreader] do prepare work .
  74. 2022-07-17 22:51:14.682 [job-0] INFO HdfsReader$Job - prepare(), start to getAllFiles...
  75. 2022-07-17 22:51:14.682 [job-0] INFO HdfsReader$Job - get HDFS all files in path = [/user/hive/warehouse/user_info/user_info_data.txt]
  76. 2022-07-17 22:51:15.146 [job-0] ERROR JobContainer - Exception when job run
  77. java.lang.IllegalArgumentException: java.net.UnknownHostException: iict
  78. at org.apache.hadoop.security.SecurityUtil.buildTokenService(SecurityUtil.java:378) ~[hadoop-common-2.7.1.jar:na]
  79. at org.apache.hadoop.hdfs.NameNodeProxies.createNonHAProxy(NameNodeProxies.java:310) ~[hadoop-hdfs-2.7.1.jar:na]
  80. at org.apache.hadoop.hdfs.NameNodeProxies.createProxy(NameNodeProxies.java:176) ~[hadoop-hdfs-2.7.1.jar:na]
  81. at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:678) ~[hadoop-hdfs-2.7.1.jar:na]
  82. at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:619) ~[hadoop-hdfs-2.7.1.jar:na]
  83. at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:149) ~[hadoop-hdfs-2.7.1.jar:na]
  84. at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2653) ~[hadoop-common-2.7.1.jar:na]
  85. at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:92) ~[hadoop-common-2.7.1.jar:na]
  86. at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2687) ~[hadoop-common-2.7.1.jar:na]
  87. at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2669) ~[hadoop-common-2.7.1.jar:na]
  88. at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:371) ~[hadoop-common-2.7.1.jar:na]
  89. at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:170) ~[hadoop-common-2.7.1.jar:na]
  90. at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getHDFSAllFiles(DFSUtil.java:123) ~[hdfsreader-0.0.1-SNAPSHOT.jar:na]
  91. at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getAllFiles(DFSUtil.java:112) ~[hdfsreader-0.0.1-SNAPSHOT.jar:na]
  92. at com.alibaba.datax.plugin.reader.hdfsreader.HdfsReader$Job.prepare(HdfsReader.java:169) ~[hdfsreader-0.0.1-SNAPSHOT.jar:na]
  93. at com.alibaba.datax.core.job.JobContainer.prepareJobReader(JobContainer.java:715) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  94. at com.alibaba.datax.core.job.JobContainer.prepare(JobContainer.java:308) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  95. at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:115) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
  96. at com.alibaba.datax.core.Engine.start(Engine.java:92) [datax-core-0.0.1-SNAPSHOT.jar:na]
  97. at com.alibaba.datax.core.Engine.entry(Engine.java:171) [datax-core-0.0.1-SNAPSHOT.jar:na]
  98. at com.alibaba.datax.core.Engine.main(Engine.java:204) [datax-core-0.0.1-SNAPSHOT.jar:na]
  99. Caused by: java.net.UnknownHostException: iict
  100. ... 21 common frames omitted
  101. 2022-07-17 22:51:15.158 [job-0] INFO StandAloneJobContainerCommunicator - Total 0 records, 0 bytes | Speed 0B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 0.00%
  102. 2022-07-17 22:51:15.170 [job-0] ERROR Engine -
  103. 经DataX智能分析,该任务最可能的错误原因是:
  104. com.alibaba.datax.common.exception.DataXException: Code:[Framework-02], Description:[DataX引擎运行过程出错,具体原因请参看DataX运行结束时的错误诊断信息 .]. - java.lang.IllegalArgumentException: java.net.UnknownHostException: iict
  105. at org.apache.hadoop.security.SecurityUtil.buildTokenService(SecurityUtil.java:378)
  106. at org.apache.hadoop.hdfs.NameNodeProxies.createNonHAProxy(NameNodeProxies.java:310)
  107. at org.apache.hadoop.hdfs.NameNodeProxies.createProxy(NameNodeProxies.java:176)
  108. at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:678)
  109. at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:619)
  110. at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:149)
  111. at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2653)
  112. at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:92)
  113. at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2687)
  114. at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2669)
  115. at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:371)
  116. at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:170)
  117. at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getHDFSAllFiles(DFSUtil.java:123)
  118. at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getAllFiles(DFSUtil.java:112)
  119. at com.alibaba.datax.plugin.reader.hdfsreader.HdfsReader$Job.prepare(HdfsReader.java:169)
  120. at com.alibaba.datax.core.job.JobContainer.prepareJobReader(JobContainer.java:715)
  121. at com.alibaba.datax.core.job.JobContainer.prepare(JobContainer.java:308)
  122. at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:115)
  123. at com.alibaba.datax.core.Engine.start(Engine.java:92)
  124. at com.alibaba.datax.core.Engine.entry(Engine.java:171)
  125. at com.alibaba.datax.core.Engine.main(Engine.java:204)
  126. Caused by: java.net.UnknownHostException: iict
  127. ... 21 more
  128. - java.lang.IllegalArgumentException: java.net.UnknownHostException: iict
  129. at org.apache.hadoop.security.SecurityUtil.buildTokenService(SecurityUtil.java:378)
  130. at org.apache.hadoop.hdfs.NameNodeProxies.createNonHAProxy(NameNodeProxies.java:310)
  131. at org.apache.hadoop.hdfs.NameNodeProxies.createProxy(NameNodeProxies.java:176)
  132. at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:678)
  133. at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:619)
  134. at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:149)
  135. at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2653)
  136. at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:92)
  137. at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2687)
  138. at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2669)
  139. at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:371)
  140. at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:170)
  141. at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getHDFSAllFiles(DFSUtil.java:123)
  142. at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getAllFiles(DFSUtil.java:112)
  143. at com.alibaba.datax.plugin.reader.hdfsreader.HdfsReader$Job.prepare(HdfsReader.java:169)
  144. at com.alibaba.datax.core.job.JobContainer.prepareJobReader(JobContainer.java:715)
  145. at com.alibaba.datax.core.job.JobContainer.prepare(JobContainer.java:308)
  146. at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:115)
  147. at com.alibaba.datax.core.Engine.start(Engine.java:92)
  148. at com.alibaba.datax.core.Engine.entry(Engine.java:171)
  149. at com.alibaba.datax.core.Engine.main(Engine.java:204)
  150. Caused by: java.net.UnknownHostException: iict
  151. ... 21 more
  152. at com.alibaba.datax.common.exception.DataXException.asDataXException(DataXException.java:40)
  153. at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:162)
  154. at com.alibaba.datax.core.Engine.start(Engine.java:92)
  155. at com.alibaba.datax.core.Engine.entry(Engine.java:171)
  156. at com.alibaba.datax.core.Engine.main(Engine.java:204)
  157. Caused by: java.lang.IllegalArgumentException: java.net.UnknownHostException: iict
  158. at org.apache.hadoop.security.SecurityUtil.buildTokenService(SecurityUtil.java:378)
  159. at org.apache.hadoop.hdfs.NameNodeProxies.createNonHAProxy(NameNodeProxies.java:310)
  160. at org.apache.hadoop.hdfs.NameNodeProxies.createProxy(NameNodeProxies.java:176)
  161. at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:678)
  162. at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:619)
  163. at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:149)
  164. at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2653)
  165. at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:92)
  166. at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2687)
  167. at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2669)
  168. at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:371)
  169. at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:170)
  170. at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getHDFSAllFiles(DFSUtil.java:123)
  171. at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getAllFiles(DFSUtil.java:112)
  172. at com.alibaba.datax.plugin.reader.hdfsreader.HdfsReader$Job.prepare(HdfsReader.java:169)
  173. at com.alibaba.datax.core.job.JobContainer.prepareJobReader(JobContainer.java:715)
  174. at com.alibaba.datax.core.job.JobContainer.prepare(JobContainer.java:308)
  175. at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:115)
  176. ... 3 more
  177. Caused by: java.net.UnknownHostException: iict
  178. ... 21 more