2hive1659860410_json-16_20_10.526.log 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153
  1. 2022-08-07 16:20:10.708 [main] INFO VMInfo - VMInfo# operatingSystem class => com.sun.management.internal.OperatingSystemImpl
  2. 2022-08-07 16:20:10.713 [main] INFO Engine - the machine info =>
  3. osInfo: Ubuntu 11 11.0.16+8-post-Ubuntu-0ubuntu120.04
  4. jvmInfo: Linux amd64 5.13.0-52-generic
  5. cpu num: 8
  6. totalPhysicalMemory: -0.00G
  7. freePhysicalMemory: -0.00G
  8. maxFileDescriptorCount: -1
  9. currentOpenFileDescriptorCount: -1
  10. GC Names [G1 Young Generation, G1 Old Generation]
  11. MEMORY_NAME | allocation_size | init_size
  12. CodeHeap 'profiled nmethods' | 117.21MB | 2.44MB
  13. G1 Old Gen | 1,024.00MB | 970.00MB
  14. G1 Survivor Space | -0.00MB | 0.00MB
  15. CodeHeap 'non-profiled nmethods' | 117.22MB | 2.44MB
  16. Compressed Class Space | 1,024.00MB | 0.00MB
  17. Metaspace | -0.00MB | 0.00MB
  18. G1 Eden Space | -0.00MB | 54.00MB
  19. CodeHeap 'non-nmethods' | 5.57MB | 2.44MB
  20. 2022-08-07 16:20:10.723 [main] INFO Engine -
  21. {
  22. "content":[
  23. {
  24. "reader":{
  25. "name":"hdfsreader",
  26. "parameter":{
  27. "column":[
  28. "*"
  29. ],
  30. "defaultFS":"hdfs://hadoop03:8020/",
  31. "encoding":"UTF-8",
  32. "fieldDelimiter":"\t",
  33. "fileType":"text",
  34. "path":"/user/hive/warehouse/user_info/user_info_data.txt"
  35. }
  36. },
  37. "writer":{
  38. "name":"hdfswriter",
  39. "parameter":{
  40. "column":[
  41. {
  42. "name":"user_id",
  43. "type":"string"
  44. },
  45. {
  46. "name":"area_id",
  47. "type":"string"
  48. },
  49. {
  50. "name":"age",
  51. "type":"int"
  52. },
  53. {
  54. "name":"occupation",
  55. "type":"string"
  56. }
  57. ],
  58. "compress":"",
  59. "defaultFS":"hdfs://hadoop03:8020/",
  60. "fieldDelimiter":"\t",
  61. "fileName":"user_info_data_2.txt",
  62. "fileType":"text",
  63. "path":"/user/hive/warehouse/user_info/",
  64. "writeMode":"append"
  65. }
  66. }
  67. }
  68. ],
  69. "setting":{
  70. "speed":{
  71. "channel":"2"
  72. }
  73. }
  74. }
  75. 2022-08-07 16:20:10.734 [main] WARN Engine - prioriy set to 0, because NumberFormatException, the value is: null
  76. 2022-08-07 16:20:10.735 [main] INFO PerfTrace - PerfTrace traceId=job_-1, isEnable=false, priority=0
  77. 2022-08-07 16:20:10.736 [main] INFO JobContainer - DataX jobContainer starts job.
  78. 2022-08-07 16:20:10.737 [main] INFO JobContainer - Set jobId = 0
  79. 2022-08-07 16:20:10.750 [job-0] INFO HdfsReader$Job - init() begin...
  80. 2022-08-07 16:20:10.979 [job-0] INFO HdfsReader$Job - hadoopConfig details:{"finalParameters":[]}
  81. 2022-08-07 16:20:10.979 [job-0] INFO HdfsReader$Job - init() ok and end...
  82. 2022-08-07 16:20:11.575 [job-0] INFO JobContainer - jobContainer starts to do prepare ...
  83. 2022-08-07 16:20:11.576 [job-0] INFO JobContainer - DataX Reader.Job [hdfsreader] do prepare work .
  84. 2022-08-07 16:20:11.576 [job-0] INFO HdfsReader$Job - prepare(), start to getAllFiles...
  85. 2022-08-07 16:20:11.576 [job-0] INFO HdfsReader$Job - get HDFS all files in path = [/user/hive/warehouse/user_info/user_info_data.txt]
  86. 2022-08-07 16:20:11.994 [job-0] INFO HdfsReader$Job - [hdfs://hadoop03:8020/user/hive/warehouse/user_info/user_info_data.txt]是[text]类型的文件, 将该文件加入source files列表
  87. 2022-08-07 16:20:11.995 [job-0] INFO HdfsReader$Job - 您即将读取的文件数为: [1], 列表为: [hdfs://hadoop03:8020/user/hive/warehouse/user_info/user_info_data.txt]
  88. 2022-08-07 16:20:11.996 [job-0] INFO JobContainer - DataX Writer.Job [hdfswriter] do prepare work .
  89. 2022-08-07 16:20:12.035 [job-0] INFO HdfsWriter$Job - 由于您配置了writeMode append, 写入前不做清理工作, [/user/hive/warehouse/user_info/] 目录下写入相应文件名前缀 [user_info_data_2.txt] 的文件
  90. 2022-08-07 16:20:12.036 [job-0] INFO JobContainer - jobContainer starts to do split ...
  91. 2022-08-07 16:20:12.036 [job-0] INFO JobContainer - Job set Channel-Number to 2 channels.
  92. 2022-08-07 16:20:12.036 [job-0] INFO HdfsReader$Job - split() begin...
  93. 2022-08-07 16:20:12.036 [job-0] INFO JobContainer - DataX Reader.Job [hdfsreader] splits to [1] tasks.
  94. 2022-08-07 16:20:12.037 [job-0] INFO HdfsWriter$Job - begin do split...
  95. 2022-08-07 16:20:12.041 [job-0] INFO HdfsWriter$Job - splited write file name:[hdfs://hadoop03:8020//user/hive/warehouse/user_info__e6d84b6f_0e53_4fc0_b8b4_b063711e54b6/user_info_data_2.txt__031bc8a3_7c7c_484f_af81_f539ac8c46a2]
  96. 2022-08-07 16:20:12.041 [job-0] INFO HdfsWriter$Job - end do split.
  97. 2022-08-07 16:20:12.041 [job-0] INFO JobContainer - DataX Writer.Job [hdfswriter] splits to [1] tasks.
  98. 2022-08-07 16:20:12.046 [job-0] INFO JobContainer - jobContainer starts to do schedule ...
  99. 2022-08-07 16:20:12.049 [job-0] INFO JobContainer - Scheduler starts [1] taskGroups.
  100. 2022-08-07 16:20:12.050 [job-0] INFO JobContainer - Running by standalone Mode.
  101. 2022-08-07 16:20:12.053 [taskGroup-0] INFO TaskGroupContainer - taskGroupId=[0] start [1] channels for [1] tasks.
  102. 2022-08-07 16:20:12.057 [taskGroup-0] INFO Channel - Channel set byte_speed_limit to -1, No bps activated.
  103. 2022-08-07 16:20:12.057 [taskGroup-0] INFO Channel - Channel set record_speed_limit to -1, No tps activated.
  104. 2022-08-07 16:20:12.062 [taskGroup-0] INFO TaskGroupContainer - taskGroup[0] taskId[0] attemptCount[1] is started
  105. 2022-08-07 16:20:12.084 [0-0-0-reader] INFO HdfsReader$Job - hadoopConfig details:{"finalParameters":["mapreduce.job.end-notification.max.retry.interval","mapreduce.job.end-notification.max.attempts"]}
  106. 2022-08-07 16:20:12.084 [0-0-0-reader] INFO Reader$Task - read start
  107. 2022-08-07 16:20:12.084 [0-0-0-reader] INFO Reader$Task - reading file : [hdfs://hadoop03:8020/user/hive/warehouse/user_info/user_info_data.txt]
  108. 2022-08-07 16:20:12.089 [0-0-0-writer] INFO HdfsWriter$Task - begin do write...
  109. 2022-08-07 16:20:12.089 [0-0-0-writer] INFO HdfsWriter$Task - write to file : [hdfs://hadoop03:8020//user/hive/warehouse/user_info__e6d84b6f_0e53_4fc0_b8b4_b063711e54b6/user_info_data_2.txt__031bc8a3_7c7c_484f_af81_f539ac8c46a2]
  110. 2022-08-07 16:20:12.096 [0-0-0-reader] INFO UnstructuredStorageReaderUtil - CsvReader使用默认值[{"captureRawRecord":true,"columnCount":0,"comment":"#","currentRecord":-1,"delimiter":"\t","escapeMode":1,"headerCount":0,"rawRecord":"","recordDelimiter":"\u0000","safetySwitch":false,"skipEmptyRecords":true,"textQualifier":"\"","trimWhitespace":true,"useComments":false,"useTextQualifier":true,"values":[]}],csvReaderConfig值为[null]
  111. 2022-08-07 16:20:12.100 [0-0-0-reader] INFO Reader$Task - end read source files...
  112. 2022-08-07 16:20:12.202 [0-0-0-writer] INFO HdfsWriter$Task - end do write
  113. 2022-08-07 16:20:12.263 [taskGroup-0] INFO TaskGroupContainer - taskGroup[0] taskId[0] is successed, used[202]ms
  114. 2022-08-07 16:20:12.263 [taskGroup-0] INFO TaskGroupContainer - taskGroup[0] completed it's tasks.
  115. 2022-08-07 16:20:22.060 [job-0] INFO StandAloneJobContainerCommunicator - Total 4 records, 79 bytes | Speed 7B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 100.00%
  116. 2022-08-07 16:20:22.060 [job-0] INFO AbstractScheduler - Scheduler accomplished all tasks.
  117. 2022-08-07 16:20:22.060 [job-0] INFO JobContainer - DataX Writer.Job [hdfswriter] do post work.
  118. 2022-08-07 16:20:22.060 [job-0] INFO HdfsWriter$Job - start rename file [hdfs://hadoop03:8020//user/hive/warehouse/user_info__e6d84b6f_0e53_4fc0_b8b4_b063711e54b6/user_info_data_2.txt__031bc8a3_7c7c_484f_af81_f539ac8c46a2] to file [hdfs://hadoop03:8020//user/hive/warehouse/user_info/user_info_data_2.txt__031bc8a3_7c7c_484f_af81_f539ac8c46a2].
  119. 2022-08-07 16:20:22.072 [job-0] INFO HdfsWriter$Job - finish rename file [hdfs://hadoop03:8020//user/hive/warehouse/user_info__e6d84b6f_0e53_4fc0_b8b4_b063711e54b6/user_info_data_2.txt__031bc8a3_7c7c_484f_af81_f539ac8c46a2] to file [hdfs://hadoop03:8020//user/hive/warehouse/user_info/user_info_data_2.txt__031bc8a3_7c7c_484f_af81_f539ac8c46a2].
  120. 2022-08-07 16:20:22.072 [job-0] INFO HdfsWriter$Job - start delete tmp dir [hdfs://hadoop03:8020/user/hive/warehouse/user_info__e6d84b6f_0e53_4fc0_b8b4_b063711e54b6] .
  121. 2022-08-07 16:20:22.079 [job-0] INFO HdfsWriter$Job - finish delete tmp dir [hdfs://hadoop03:8020/user/hive/warehouse/user_info__e6d84b6f_0e53_4fc0_b8b4_b063711e54b6] .
  122. 2022-08-07 16:20:22.079 [job-0] INFO JobContainer - DataX Reader.Job [hdfsreader] do post work.
  123. 2022-08-07 16:20:22.079 [job-0] INFO JobContainer - DataX jobId [0] completed successfully.
  124. 2022-08-07 16:20:22.080 [job-0] INFO HookInvoker - No hook invoked, because base dir not exists or is a file: /home/zhouze/PycharmProjects/yili/yili-portal/app/datax/hook
  125. 2022-08-07 16:20:22.182 [job-0] INFO JobContainer -
  126. [total cpu info] =>
  127. averageCpu | maxDeltaCpu | minDeltaCpu
  128. -1.00% | -1.00% | -1.00%
  129. [total gc info] =>
  130. NAME | totalGCCount | maxDeltaGCCount | minDeltaGCCount | totalGCTime | maxDeltaGCTime | minDeltaGCTime
  131. G1 Young Generation | 5 | 5 | 5 | 0.046s | 0.046s | 0.046s
  132. G1 Old Generation | 0 | 0 | 0 | 0.000s | 0.000s | 0.000s
  133. 2022-08-07 16:20:22.182 [job-0] INFO JobContainer - PerfTrace not enable!
  134. 2022-08-07 16:20:22.182 [job-0] INFO StandAloneJobContainerCommunicator - Total 4 records, 79 bytes | Speed 7B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 100.00%
  135. 2022-08-07 16:20:22.183 [job-0] INFO JobContainer -
  136. 任务启动时刻 : 2022-08-07 16:20:10
  137. 任务结束时刻 : 2022-08-07 16:20:22
  138. 任务总计耗时 : 11s
  139. 任务平均流量 : 7B/s
  140. 记录写入速度 : 0rec/s
  141. 读出记录总数 : 4
  142. 读写失败总数 : 0