Your HDFS is healthy? Have your run fsck on it? If you grep the namenode logs for the file /hbase/oldWALs/xxxxxxxxx , can you trace its history? Ditto in hbase master logs? St.Ack
On Thu, Jul 28, 2016 at 10:32 PM, ankit beohar <[email protected]> wrote: > Hi All, > > I am facing recurring region server down issue below are the error can > anybody help me out this:- > > > > java.io.IOException: File from recovered queue is nowhere to be found > at > > org.apache.hadoop.hbase.replication.regionserver.ReplicationSource$ReplicationSourceWorkerThread.openReader(ReplicationSource.java:804) > at > > org.apache.hadoop.hbase.replication.regionserver.ReplicationSource$ReplicationSourceWorkerThread.run(ReplicationSource.java:542) > Caused by: java.io.FileNotFoundException: File does not exist: > /hbase/oldWALs/xxxxxxxxxx > at > org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:66) > at > org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56) > at > > org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1932) > at > > org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:1873) > at > > org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1853) > at > > org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1825) > at > > org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:559) > at > > org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.getBlockLocations(AuthorizationProviderProxyClientProtocol.java:87) > at > > org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:363) > at > > org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) > at > > org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:619) > at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1060) > at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2044) > at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2040) > at java.security.AccessController.doPrivileged(Native Method) > at javax.security.auth.Subject.doAs(Subject.java:415) > at > > org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1707) > at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2038) > > at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native > Method) > at > > sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57) > at > > sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) > at java.lang.reflect.Constructor.newInstance(Constructor.java:526) > at > > org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106) > at > > org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:73) > at > org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:1215) > at > org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1203) > at > org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1193) > at > > org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:299) > at > org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:265) > at > org.apache.hadoop.hdfs.DFSInputStream.<init>(DFSInputStream.java:257) > at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:1492) > at > > org.apache.hadoop.hdfs.DistributedFileSystem$3.doCall(DistributedFileSystem.java:302) > at > > org.apache.hadoop.hdfs.DistributedFileSystem$3.doCall(DistributedFileSystem.java:298) > at > > org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) > at > > org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:298) > at > org.apache.hadoop.fs.FilterFileSystem.open(FilterFileSystem.java:161) > at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:766) > at > org.apache.hadoop.hbase.wal.WALFactory.createReader(WALFactory.java:291) > at > org.apache.hadoop.hbase.wal.WALFactory.createReader(WALFactory.java:267) > at > org.apache.hadoop.hbase.wal.WALFactory.createReader(WALFactory.java:255) > at > org.apache.hadoop.hbase.wal.WALFactory.createReader(WALFactory.java:397) > at > > org.apache.hadoop.hbase.replication.regionserver.ReplicationWALReaderManager.openReader(ReplicationWALReaderManager.java:69) > at > > org.apache.hadoop.hbase.replication.regionserver.ReplicationSource$ReplicationSourceWorkerThread.openReader(ReplicationSource.java:746) > ... 1 more > Caused by: > org.apache.hadoop.ipc.RemoteException(java.io.FileNotFoundException): File > does not exist: /hbase/oldWALs/xxxxxxxxx > at > org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:66) > at > org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56) > at > > org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1932) > > Best Regards, > ANKIT BEOHAR >
