123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379 |
- 2022-07-17 22:44:22.379 [main] INFO VMInfo - VMInfo# operatingSystem class => sun.management.OperatingSystemImpl
- 2022-07-17 22:44:22.389 [main] INFO Engine - the machine info =>
- osInfo: Oracle Corporation 1.8 25.262-b10
- jvmInfo: Linux amd64 3.10.0-957.el7.x86_64
- cpu num: 128
- totalPhysicalMemory: -0.00G
- freePhysicalMemory: -0.00G
- maxFileDescriptorCount: -1
- currentOpenFileDescriptorCount: -1
- GC Names [PS MarkSweep, PS Scavenge]
- MEMORY_NAME | allocation_size | init_size
- PS Eden Space | 256.00MB | 256.00MB
- Code Cache | 240.00MB | 2.44MB
- Compressed Class Space | 1,024.00MB | 0.00MB
- PS Survivor Space | 42.50MB | 42.50MB
- PS Old Gen | 683.00MB | 683.00MB
- Metaspace | -0.00MB | 0.00MB
- 2022-07-17 22:44:22.408 [main] INFO Engine -
- {
- "content":[
- {
- "reader":{
- "name":"hdfsreader",
- "parameter":{
- "column":[
- "*"
- ],
- "defaultFS":"hdfs://hadoop01:8020/",
- "encoding":"UTF-8",
- "fieldDelimiter":"\t",
- "fileType":"text",
- "path":"/user/hive/warehouse/user_info/user_info_data.txt"
- }
- },
- "writer":{
- "name":"hdfswriter",
- "parameter":{
- "column":[
- {
- "name":"user_id",
- "type":"string"
- },
- {
- "name":"age",
- "type":"int"
- }
- ],
- "compress":"",
- "defaultFS":"hdfs://hadoop01:8020/",
- "fieldDelimiter":"\t",
- "fileName":"user_info_data_1.txt",
- "fileType":"text",
- "path":"/user/hive/warehouse/user_info/",
- "writeMode":"append"
- }
- }
- }
- ],
- "setting":{
- "speed":{
- "channel":"1"
- }
- }
- }
- 2022-07-17 22:44:22.428 [main] WARN Engine - prioriy set to 0, because NumberFormatException, the value is: null
- 2022-07-17 22:44:22.430 [main] INFO PerfTrace - PerfTrace traceId=job_-1, isEnable=false, priority=0
- 2022-07-17 22:44:22.430 [main] INFO JobContainer - DataX jobContainer starts job.
- 2022-07-17 22:44:22.433 [main] INFO JobContainer - Set jobId = 0
- 2022-07-17 22:44:22.457 [job-0] INFO HdfsReader$Job - init() begin...
- 2022-07-17 22:44:22.869 [job-0] INFO HdfsReader$Job - hadoopConfig details:{"finalParameters":[]}
- 2022-07-17 22:44:22.869 [job-0] INFO HdfsReader$Job - init() ok and end...
- 2022-07-17 22:44:24.044 [job-0] INFO JobContainer - jobContainer starts to do prepare ...
- 2022-07-17 22:44:24.044 [job-0] INFO JobContainer - DataX Reader.Job [hdfsreader] do prepare work .
- 2022-07-17 22:44:24.044 [job-0] INFO HdfsReader$Job - prepare(), start to getAllFiles...
- 2022-07-17 22:44:24.045 [job-0] INFO HdfsReader$Job - get HDFS all files in path = [/user/hive/warehouse/user_info/user_info_data.txt]
- 2022-07-17 22:44:24.709 [job-0] ERROR HdfsReader$Job - 无法读取路径[/user/hive/warehouse/user_info/user_info_data.txt]下的所有文件,请确认您的配置项fs.defaultFS, path的值是否正确,是否有读写权限,网络是否已断开!
- 2022-07-17 22:44:24.715 [job-0] ERROR JobContainer - Exception when job run
- com.alibaba.datax.common.exception.DataXException: Code:[HdfsReader-09], Description:[您配置的path格式有误]. - org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.ipc.StandbyException): Operation category READ is not supported in state standby. Visit https://s.apache.org/sbnn-error
- at org.apache.hadoop.hdfs.server.namenode.ha.StandbyState.checkOperation(StandbyState.java:88)
- at org.apache.hadoop.hdfs.server.namenode.NameNode$NameNodeHAContext.checkOperation(NameNode.java:1962)
- at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkOperation(FSNamesystem.java:1421)
- at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getListing(FSNamesystem.java:3729)
- at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getListing(NameNodeRpcServer.java:1138)
- at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getListing(ClientNamenodeProtocolServerSideTranslatorPB.java:708)
- at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
- at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)
- at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)
- at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:870)
- at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:816)
- at java.security.AccessController.doPrivileged(Native Method)
- at javax.security.auth.Subject.doAs(Subject.java:422)
- at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1875)
- at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2680)
- at org.apache.hadoop.ipc.Client.call(Client.java:1476)
- at org.apache.hadoop.ipc.Client.call(Client.java:1407)
- at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
- at com.sun.proxy.$Proxy20.getListing(Unknown Source)
- at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getListing(ClientNamenodeProtocolTranslatorPB.java:573)
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
- at java.lang.reflect.Method.invoke(Method.java:498)
- at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:187)
- at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
- at com.sun.proxy.$Proxy21.getListing(Unknown Source)
- at org.apache.hadoop.hdfs.DFSClient.listPaths(DFSClient.java:2094)
- at org.apache.hadoop.hdfs.DFSClient.listPaths(DFSClient.java:2077)
- at org.apache.hadoop.hdfs.DistributedFileSystem.listStatusInternal(DistributedFileSystem.java:791)
- at org.apache.hadoop.hdfs.DistributedFileSystem.access$700(DistributedFileSystem.java:106)
- at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:853)
- at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:849)
- at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
- at org.apache.hadoop.hdfs.DistributedFileSystem.listStatus(DistributedFileSystem.java:860)
- at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getHDFSAllFilesNORegex(DFSUtil.java:162)
- at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getHDFSAllFiles(DFSUtil.java:141)
- at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getAllFiles(DFSUtil.java:112)
- at com.alibaba.datax.plugin.reader.hdfsreader.HdfsReader$Job.prepare(HdfsReader.java:169)
- at com.alibaba.datax.core.job.JobContainer.prepareJobReader(JobContainer.java:715)
- at com.alibaba.datax.core.job.JobContainer.prepare(JobContainer.java:308)
- at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:115)
- at com.alibaba.datax.core.Engine.start(Engine.java:92)
- at com.alibaba.datax.core.Engine.entry(Engine.java:171)
- at com.alibaba.datax.core.Engine.main(Engine.java:204)
- - org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.ipc.StandbyException): Operation category READ is not supported in state standby. Visit https://s.apache.org/sbnn-error
- at org.apache.hadoop.hdfs.server.namenode.ha.StandbyState.checkOperation(StandbyState.java:88)
- at org.apache.hadoop.hdfs.server.namenode.NameNode$NameNodeHAContext.checkOperation(NameNode.java:1962)
- at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkOperation(FSNamesystem.java:1421)
- at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getListing(FSNamesystem.java:3729)
- at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getListing(NameNodeRpcServer.java:1138)
- at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getListing(ClientNamenodeProtocolServerSideTranslatorPB.java:708)
- at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
- at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)
- at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)
- at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:870)
- at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:816)
- at java.security.AccessController.doPrivileged(Native Method)
- at javax.security.auth.Subject.doAs(Subject.java:422)
- at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1875)
- at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2680)
- at org.apache.hadoop.ipc.Client.call(Client.java:1476)
- at org.apache.hadoop.ipc.Client.call(Client.java:1407)
- at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
- at com.sun.proxy.$Proxy20.getListing(Unknown Source)
- at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getListing(ClientNamenodeProtocolTranslatorPB.java:573)
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
- at java.lang.reflect.Method.invoke(Method.java:498)
- at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:187)
- at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
- at com.sun.proxy.$Proxy21.getListing(Unknown Source)
- at org.apache.hadoop.hdfs.DFSClient.listPaths(DFSClient.java:2094)
- at org.apache.hadoop.hdfs.DFSClient.listPaths(DFSClient.java:2077)
- at org.apache.hadoop.hdfs.DistributedFileSystem.listStatusInternal(DistributedFileSystem.java:791)
- at org.apache.hadoop.hdfs.DistributedFileSystem.access$700(DistributedFileSystem.java:106)
- at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:853)
- at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:849)
- at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
- at org.apache.hadoop.hdfs.DistributedFileSystem.listStatus(DistributedFileSystem.java:860)
- at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getHDFSAllFilesNORegex(DFSUtil.java:162)
- at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getHDFSAllFiles(DFSUtil.java:141)
- at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getAllFiles(DFSUtil.java:112)
- at com.alibaba.datax.plugin.reader.hdfsreader.HdfsReader$Job.prepare(HdfsReader.java:169)
- at com.alibaba.datax.core.job.JobContainer.prepareJobReader(JobContainer.java:715)
- at com.alibaba.datax.core.job.JobContainer.prepare(JobContainer.java:308)
- at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:115)
- at com.alibaba.datax.core.Engine.start(Engine.java:92)
- at com.alibaba.datax.core.Engine.entry(Engine.java:171)
- at com.alibaba.datax.core.Engine.main(Engine.java:204)
- at com.alibaba.datax.common.exception.DataXException.asDataXException(DataXException.java:40) ~[datax-common-0.0.1-SNAPSHOT.jar:na]
- at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getHDFSAllFiles(DFSUtil.java:150) ~[hdfsreader-0.0.1-SNAPSHOT.jar:na]
- at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getAllFiles(DFSUtil.java:112) ~[hdfsreader-0.0.1-SNAPSHOT.jar:na]
- at com.alibaba.datax.plugin.reader.hdfsreader.HdfsReader$Job.prepare(HdfsReader.java:169) ~[hdfsreader-0.0.1-SNAPSHOT.jar:na]
- at com.alibaba.datax.core.job.JobContainer.prepareJobReader(JobContainer.java:715) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
- at com.alibaba.datax.core.job.JobContainer.prepare(JobContainer.java:308) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
- at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:115) ~[datax-core-0.0.1-SNAPSHOT.jar:na]
- at com.alibaba.datax.core.Engine.start(Engine.java:92) [datax-core-0.0.1-SNAPSHOT.jar:na]
- at com.alibaba.datax.core.Engine.entry(Engine.java:171) [datax-core-0.0.1-SNAPSHOT.jar:na]
- at com.alibaba.datax.core.Engine.main(Engine.java:204) [datax-core-0.0.1-SNAPSHOT.jar:na]
- Caused by: org.apache.hadoop.ipc.RemoteException: Operation category READ is not supported in state standby. Visit https://s.apache.org/sbnn-error
- at org.apache.hadoop.hdfs.server.namenode.ha.StandbyState.checkOperation(StandbyState.java:88)
- at org.apache.hadoop.hdfs.server.namenode.NameNode$NameNodeHAContext.checkOperation(NameNode.java:1962)
- at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkOperation(FSNamesystem.java:1421)
- at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getListing(FSNamesystem.java:3729)
- at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getListing(NameNodeRpcServer.java:1138)
- at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getListing(ClientNamenodeProtocolServerSideTranslatorPB.java:708)
- at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
- at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)
- at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)
- at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:870)
- at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:816)
- at java.security.AccessController.doPrivileged(Native Method)
- at javax.security.auth.Subject.doAs(Subject.java:422)
- at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1875)
- at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2680)
- at org.apache.hadoop.ipc.Client.call(Client.java:1476) ~[hadoop-common-2.7.1.jar:na]
- at org.apache.hadoop.ipc.Client.call(Client.java:1407) ~[hadoop-common-2.7.1.jar:na]
- at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229) ~[hadoop-common-2.7.1.jar:na]
- at com.sun.proxy.$Proxy20.getListing(Unknown Source) ~[na:na]
- at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getListing(ClientNamenodeProtocolTranslatorPB.java:573) ~[hadoop-hdfs-2.7.1.jar:na]
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_262]
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_262]
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_262]
- at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_262]
- at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:187) ~[hadoop-common-2.7.1.jar:na]
- at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102) ~[hadoop-common-2.7.1.jar:na]
- at com.sun.proxy.$Proxy21.getListing(Unknown Source) ~[na:na]
- at org.apache.hadoop.hdfs.DFSClient.listPaths(DFSClient.java:2094) ~[hadoop-hdfs-2.7.1.jar:na]
- at org.apache.hadoop.hdfs.DFSClient.listPaths(DFSClient.java:2077) ~[hadoop-hdfs-2.7.1.jar:na]
- at org.apache.hadoop.hdfs.DistributedFileSystem.listStatusInternal(DistributedFileSystem.java:791) ~[hadoop-hdfs-2.7.1.jar:na]
- at org.apache.hadoop.hdfs.DistributedFileSystem.access$700(DistributedFileSystem.java:106) ~[hadoop-hdfs-2.7.1.jar:na]
- at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:853) ~[hadoop-hdfs-2.7.1.jar:na]
- at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:849) ~[hadoop-hdfs-2.7.1.jar:na]
- at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-2.7.1.jar:na]
- at org.apache.hadoop.hdfs.DistributedFileSystem.listStatus(DistributedFileSystem.java:860) ~[hadoop-hdfs-2.7.1.jar:na]
- at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getHDFSAllFilesNORegex(DFSUtil.java:162) ~[hdfsreader-0.0.1-SNAPSHOT.jar:na]
- at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getHDFSAllFiles(DFSUtil.java:141) ~[hdfsreader-0.0.1-SNAPSHOT.jar:na]
- ... 8 common frames omitted
- 2022-07-17 22:44:24.726 [job-0] INFO StandAloneJobContainerCommunicator - Total 0 records, 0 bytes | Speed 0B/s, 0 records/s | Error 0 records, 0 bytes | All Task WaitWriterTime 0.000s | All Task WaitReaderTime 0.000s | Percentage 0.00%
- 2022-07-17 22:44:24.731 [job-0] ERROR Engine -
- 经DataX智能分析,该任务最可能的错误原因是:
- com.alibaba.datax.common.exception.DataXException: Code:[HdfsReader-09], Description:[您配置的path格式有误]. - org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.ipc.StandbyException): Operation category READ is not supported in state standby. Visit https://s.apache.org/sbnn-error
- at org.apache.hadoop.hdfs.server.namenode.ha.StandbyState.checkOperation(StandbyState.java:88)
- at org.apache.hadoop.hdfs.server.namenode.NameNode$NameNodeHAContext.checkOperation(NameNode.java:1962)
- at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkOperation(FSNamesystem.java:1421)
- at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getListing(FSNamesystem.java:3729)
- at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getListing(NameNodeRpcServer.java:1138)
- at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getListing(ClientNamenodeProtocolServerSideTranslatorPB.java:708)
- at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
- at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)
- at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)
- at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:870)
- at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:816)
- at java.security.AccessController.doPrivileged(Native Method)
- at javax.security.auth.Subject.doAs(Subject.java:422)
- at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1875)
- at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2680)
- at org.apache.hadoop.ipc.Client.call(Client.java:1476)
- at org.apache.hadoop.ipc.Client.call(Client.java:1407)
- at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
- at com.sun.proxy.$Proxy20.getListing(Unknown Source)
- at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getListing(ClientNamenodeProtocolTranslatorPB.java:573)
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
- at java.lang.reflect.Method.invoke(Method.java:498)
- at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:187)
- at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
- at com.sun.proxy.$Proxy21.getListing(Unknown Source)
- at org.apache.hadoop.hdfs.DFSClient.listPaths(DFSClient.java:2094)
- at org.apache.hadoop.hdfs.DFSClient.listPaths(DFSClient.java:2077)
- at org.apache.hadoop.hdfs.DistributedFileSystem.listStatusInternal(DistributedFileSystem.java:791)
- at org.apache.hadoop.hdfs.DistributedFileSystem.access$700(DistributedFileSystem.java:106)
- at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:853)
- at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:849)
- at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
- at org.apache.hadoop.hdfs.DistributedFileSystem.listStatus(DistributedFileSystem.java:860)
- at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getHDFSAllFilesNORegex(DFSUtil.java:162)
- at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getHDFSAllFiles(DFSUtil.java:141)
- at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getAllFiles(DFSUtil.java:112)
- at com.alibaba.datax.plugin.reader.hdfsreader.HdfsReader$Job.prepare(HdfsReader.java:169)
- at com.alibaba.datax.core.job.JobContainer.prepareJobReader(JobContainer.java:715)
- at com.alibaba.datax.core.job.JobContainer.prepare(JobContainer.java:308)
- at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:115)
- at com.alibaba.datax.core.Engine.start(Engine.java:92)
- at com.alibaba.datax.core.Engine.entry(Engine.java:171)
- at com.alibaba.datax.core.Engine.main(Engine.java:204)
- - org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.ipc.StandbyException): Operation category READ is not supported in state standby. Visit https://s.apache.org/sbnn-error
- at org.apache.hadoop.hdfs.server.namenode.ha.StandbyState.checkOperation(StandbyState.java:88)
- at org.apache.hadoop.hdfs.server.namenode.NameNode$NameNodeHAContext.checkOperation(NameNode.java:1962)
- at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkOperation(FSNamesystem.java:1421)
- at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getListing(FSNamesystem.java:3729)
- at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getListing(NameNodeRpcServer.java:1138)
- at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getListing(ClientNamenodeProtocolServerSideTranslatorPB.java:708)
- at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
- at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)
- at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)
- at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:870)
- at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:816)
- at java.security.AccessController.doPrivileged(Native Method)
- at javax.security.auth.Subject.doAs(Subject.java:422)
- at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1875)
- at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2680)
- at org.apache.hadoop.ipc.Client.call(Client.java:1476)
- at org.apache.hadoop.ipc.Client.call(Client.java:1407)
- at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
- at com.sun.proxy.$Proxy20.getListing(Unknown Source)
- at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getListing(ClientNamenodeProtocolTranslatorPB.java:573)
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
- at java.lang.reflect.Method.invoke(Method.java:498)
- at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:187)
- at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
- at com.sun.proxy.$Proxy21.getListing(Unknown Source)
- at org.apache.hadoop.hdfs.DFSClient.listPaths(DFSClient.java:2094)
- at org.apache.hadoop.hdfs.DFSClient.listPaths(DFSClient.java:2077)
- at org.apache.hadoop.hdfs.DistributedFileSystem.listStatusInternal(DistributedFileSystem.java:791)
- at org.apache.hadoop.hdfs.DistributedFileSystem.access$700(DistributedFileSystem.java:106)
- at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:853)
- at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:849)
- at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
- at org.apache.hadoop.hdfs.DistributedFileSystem.listStatus(DistributedFileSystem.java:860)
- at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getHDFSAllFilesNORegex(DFSUtil.java:162)
- at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getHDFSAllFiles(DFSUtil.java:141)
- at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getAllFiles(DFSUtil.java:112)
- at com.alibaba.datax.plugin.reader.hdfsreader.HdfsReader$Job.prepare(HdfsReader.java:169)
- at com.alibaba.datax.core.job.JobContainer.prepareJobReader(JobContainer.java:715)
- at com.alibaba.datax.core.job.JobContainer.prepare(JobContainer.java:308)
- at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:115)
- at com.alibaba.datax.core.Engine.start(Engine.java:92)
- at com.alibaba.datax.core.Engine.entry(Engine.java:171)
- at com.alibaba.datax.core.Engine.main(Engine.java:204)
- at com.alibaba.datax.common.exception.DataXException.asDataXException(DataXException.java:40)
- at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getHDFSAllFiles(DFSUtil.java:150)
- at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getAllFiles(DFSUtil.java:112)
- at com.alibaba.datax.plugin.reader.hdfsreader.HdfsReader$Job.prepare(HdfsReader.java:169)
- at com.alibaba.datax.core.job.JobContainer.prepareJobReader(JobContainer.java:715)
- at com.alibaba.datax.core.job.JobContainer.prepare(JobContainer.java:308)
- at com.alibaba.datax.core.job.JobContainer.start(JobContainer.java:115)
- at com.alibaba.datax.core.Engine.start(Engine.java:92)
- at com.alibaba.datax.core.Engine.entry(Engine.java:171)
- at com.alibaba.datax.core.Engine.main(Engine.java:204)
- Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.ipc.StandbyException): Operation category READ is not supported in state standby. Visit https://s.apache.org/sbnn-error
- at org.apache.hadoop.hdfs.server.namenode.ha.StandbyState.checkOperation(StandbyState.java:88)
- at org.apache.hadoop.hdfs.server.namenode.NameNode$NameNodeHAContext.checkOperation(NameNode.java:1962)
- at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkOperation(FSNamesystem.java:1421)
- at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getListing(FSNamesystem.java:3729)
- at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getListing(NameNodeRpcServer.java:1138)
- at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getListing(ClientNamenodeProtocolServerSideTranslatorPB.java:708)
- at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
- at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)
- at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)
- at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:870)
- at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:816)
- at java.security.AccessController.doPrivileged(Native Method)
- at javax.security.auth.Subject.doAs(Subject.java:422)
- at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1875)
- at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2680)
- at org.apache.hadoop.ipc.Client.call(Client.java:1476)
- at org.apache.hadoop.ipc.Client.call(Client.java:1407)
- at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
- at com.sun.proxy.$Proxy20.getListing(Unknown Source)
- at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getListing(ClientNamenodeProtocolTranslatorPB.java:573)
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
- at java.lang.reflect.Method.invoke(Method.java:498)
- at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:187)
- at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
- at com.sun.proxy.$Proxy21.getListing(Unknown Source)
- at org.apache.hadoop.hdfs.DFSClient.listPaths(DFSClient.java:2094)
- at org.apache.hadoop.hdfs.DFSClient.listPaths(DFSClient.java:2077)
- at org.apache.hadoop.hdfs.DistributedFileSystem.listStatusInternal(DistributedFileSystem.java:791)
- at org.apache.hadoop.hdfs.DistributedFileSystem.access$700(DistributedFileSystem.java:106)
- at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:853)
- at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:849)
- at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
- at org.apache.hadoop.hdfs.DistributedFileSystem.listStatus(DistributedFileSystem.java:860)
- at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getHDFSAllFilesNORegex(DFSUtil.java:162)
- at com.alibaba.datax.plugin.reader.hdfsreader.DFSUtil.getHDFSAllFiles(DFSUtil.java:141)
- ... 8 more
|