[ 
https://issues.apache.org/jira/browse/HDFS-15998?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Haiyang Hu updated HDFS-15998:
------------------------------
    Description: 
Use the Hadoop 3.2.0 client execute the following command: occasionally appear 
NPE.

hdfs dfsadmin -Dfs.defaultFS=hdfs://xxx -listOpenFiles -blockingDecommission 
-path /xxx

 
{code:java}
// code placeholder
21/03/17 14:17:38 DEBUG retry.RetryInvocationHandler: Exception while invoking 
call #0 ClientNamenodeProtocolTranslatorPB.listOpenFiles over xxx:8020. Not 
retrying because try once and fail.21/03/17 14:17:38 DEBUG 
retry.RetryInvocationHandler: Exception while invoking call #0 
ClientNamenodeProtocolTranslatorPB.listOpenFiles over xxx:8020. Not retrying 
because try once and fail. 
org.apache.hadoop.ipc.RemoteException(java.lang.NullPointerException): 
java.lang.NullPointerException  at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getFilesBlockingDecom(FSNamesystem.java:1917)
  at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.listOpenFiles(FSNamesystem.java:1876)
  at 
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.listOpenFiles(NameNodeRpcServer.java:1453)
  at 
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.listOpenFiles(ClientNamenodeProtocolServerSideTranslatorPB.java:1894)
  at 
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:524)
  at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1025)  at 
org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:886)  at 
org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:828)  at 
java.security.AccessController.doPrivileged(Native Method)  at 
javax.security.auth.Subject.doAs(Subject.java:422)  at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1903)
  at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2717)
  at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1511)  at 
org.apache.hadoop.ipc.Client.call(Client.java:1457)  at 
org.apache.hadoop.ipc.Client.call(Client.java:1367)  at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:228)
  at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)
  at com.sun.proxy.$Proxy9.listOpenFiles(Unknown Source)  at 
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.listOpenFiles(ClientNamenodeProtocolTranslatorPB.java:1952)
  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)  at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)  
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  at java.lang.reflect.Method.invoke(Method.java:498)  at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
  at 
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
  at 
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
  at 
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
  at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
  at com.sun.proxy.$Proxy10.listOpenFiles(Unknown Source)  at 
org.apache.hadoop.hdfs.protocol.OpenFilesIterator.makeRequest(OpenFilesIterator.java:89)
  at 
org.apache.hadoop.hdfs.protocol.OpenFilesIterator.makeRequest(OpenFilesIterator.java:35)
  at 
org.apache.hadoop.fs.BatchedRemoteIterator.makeRequest(BatchedRemoteIterator.java:77)
  at 
org.apache.hadoop.fs.BatchedRemoteIterator.makeRequestIfNeeded(BatchedRemoteIterator.java:85)
  at 
org.apache.hadoop.fs.BatchedRemoteIterator.hasNext(BatchedRemoteIterator.java:99)
  at org.apache.hadoop.hdfs.tools.DFSAdmin.printOpenFiles(DFSAdmin.java:1006)  
at org.apache.hadoop.hdfs.tools.DFSAdmin.listOpenFiles(DFSAdmin.java:994)  at 
org.apache.hadoop.hdfs.tools.DFSAdmin.run(DFSAdmin.java:2431)  at 
org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:76)  at 
org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:90)  
at org.apache.hadoop.hdfs.tools.DFSAdmin.main(DFSAdmin.java:2590) 
List open files failed. 
listOpenFiles: java.lang.NullPointerException
{code}
 

 

 

  was:
Use the Hadoop 3.2.0 client execute the following command: occasionally appear 
NPE.

hdfs dfsadmin -Dfs.defaultFS=hdfs://xxx -listOpenFiles -blockingDecommission 
-path /xxx

 

{{{quote}}}

 21/03/17 14:17:38 DEBUG retry.RetryInvocationHandler: Exception while invoking 
call #0 ClientNamenodeProtocolTranslatorPB.listOpenFiles over xxx:8020. Not 
retrying because try once and fail. 21/03/17 14:17:38 DEBUG 
retry.RetryInvocationHandler: Exception while invoking call #0 
ClientNamenodeProtocolTranslatorPB.listOpenFiles over xxx:8020. Not retrying 
because try once and fail. 
org.apache.hadoop.ipc.RemoteException(java.lang.NullPointerException): 
java.lang.NullPointerException  at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getFilesBlockingDecom(FSNamesystem.java:1917)
  at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.listOpenFiles(FSNamesystem.java:1876)
  at 
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.listOpenFiles(NameNodeRpcServer.java:1453)
  at 
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.listOpenFiles(ClientNamenodeProtocolServerSideTranslatorPB.java:1894)
  at 
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:524)
  at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1025)  at 
org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:886)  at 
org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:828)  at 
java.security.AccessController.doPrivileged(Native Method)  at 
javax.security.auth.Subject.doAs(Subject.java:422)  at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1903)
  at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2717)
  at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1511)  at 
org.apache.hadoop.ipc.Client.call(Client.java:1457)  at 
org.apache.hadoop.ipc.Client.call(Client.java:1367)  at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:228)
  at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)
  at com.sun.proxy.$Proxy9.listOpenFiles(Unknown Source)  at 
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.listOpenFiles(ClientNamenodeProtocolTranslatorPB.java:1952)
  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)  at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)  
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  at java.lang.reflect.Method.invoke(Method.java:498)  at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
  at 
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
  at 
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
  at 
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
  at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
  at com.sun.proxy.$Proxy10.listOpenFiles(Unknown Source)  at 
org.apache.hadoop.hdfs.protocol.OpenFilesIterator.makeRequest(OpenFilesIterator.java:89)
  at 
org.apache.hadoop.hdfs.protocol.OpenFilesIterator.makeRequest(OpenFilesIterator.java:35)
  at 
org.apache.hadoop.fs.BatchedRemoteIterator.makeRequest(BatchedRemoteIterator.java:77)
  at 
org.apache.hadoop.fs.BatchedRemoteIterator.makeRequestIfNeeded(BatchedRemoteIterator.java:85)
  at 
org.apache.hadoop.fs.BatchedRemoteIterator.hasNext(BatchedRemoteIterator.java:99)
  at org.apache.hadoop.hdfs.tools.DFSAdmin.printOpenFiles(DFSAdmin.java:1006)  
at org.apache.hadoop.hdfs.tools.DFSAdmin.listOpenFiles(DFSAdmin.java:994)  at 
org.apache.hadoop.hdfs.tools.DFSAdmin.run(DFSAdmin.java:2431)  at 
org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:76)  at 
org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:90)  at 
org.apache.hadoop.hdfs.tools.DFSAdmin.main(DFSAdmin.java:2590) List open files 
failed. listOpenFiles: java.lang.NullPointerException{{}}

 

{{{quote}}}

 


> Fix NullPointException In listOpenFiles
> ---------------------------------------
>
>                 Key: HDFS-15998
>                 URL: https://issues.apache.org/jira/browse/HDFS-15998
>             Project: Hadoop HDFS
>          Issue Type: Bug
>    Affects Versions: 3.2.0
>            Reporter: Haiyang Hu
>            Priority: Major
>
> Use the Hadoop 3.2.0 client execute the following command: occasionally 
> appear NPE.
> hdfs dfsadmin -Dfs.defaultFS=hdfs://xxx -listOpenFiles -blockingDecommission 
> -path /xxx
>  
> {code:java}
> // code placeholder
> 21/03/17 14:17:38 DEBUG retry.RetryInvocationHandler: Exception while 
> invoking call #0 ClientNamenodeProtocolTranslatorPB.listOpenFiles over 
> xxx:8020. Not retrying because try once and fail.21/03/17 14:17:38 DEBUG 
> retry.RetryInvocationHandler: Exception while invoking call #0 
> ClientNamenodeProtocolTranslatorPB.listOpenFiles over xxx:8020. Not retrying 
> because try once and fail. 
> org.apache.hadoop.ipc.RemoteException(java.lang.NullPointerException): 
> java.lang.NullPointerException  at 
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getFilesBlockingDecom(FSNamesystem.java:1917)
>   at 
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.listOpenFiles(FSNamesystem.java:1876)
>   at 
> org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.listOpenFiles(NameNodeRpcServer.java:1453)
>   at 
> org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.listOpenFiles(ClientNamenodeProtocolServerSideTranslatorPB.java:1894)
>   at 
> org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
>   at 
> org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:524)
>   at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1025)  at 
> org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:886)  at 
> org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:828)  at 
> java.security.AccessController.doPrivileged(Native Method)  at 
> javax.security.auth.Subject.doAs(Subject.java:422)  at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1903)
>   at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2717)
>   at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1511)  at 
> org.apache.hadoop.ipc.Client.call(Client.java:1457)  at 
> org.apache.hadoop.ipc.Client.call(Client.java:1367)  at 
> org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:228)
>   at 
> org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)
>   at com.sun.proxy.$Proxy9.listOpenFiles(Unknown Source)  at 
> org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.listOpenFiles(ClientNamenodeProtocolTranslatorPB.java:1952)
>   at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)  at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
>  at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>   at java.lang.reflect.Method.invoke(Method.java:498)  at 
> org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
>   at 
> org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
>   at 
> org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
>   at 
> org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
>   at 
> org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
>   at com.sun.proxy.$Proxy10.listOpenFiles(Unknown Source)  at 
> org.apache.hadoop.hdfs.protocol.OpenFilesIterator.makeRequest(OpenFilesIterator.java:89)
>   at 
> org.apache.hadoop.hdfs.protocol.OpenFilesIterator.makeRequest(OpenFilesIterator.java:35)
>   at 
> org.apache.hadoop.fs.BatchedRemoteIterator.makeRequest(BatchedRemoteIterator.java:77)
>   at 
> org.apache.hadoop.fs.BatchedRemoteIterator.makeRequestIfNeeded(BatchedRemoteIterator.java:85)
>   at 
> org.apache.hadoop.fs.BatchedRemoteIterator.hasNext(BatchedRemoteIterator.java:99)
>   at org.apache.hadoop.hdfs.tools.DFSAdmin.printOpenFiles(DFSAdmin.java:1006) 
>  at org.apache.hadoop.hdfs.tools.DFSAdmin.listOpenFiles(DFSAdmin.java:994)  
> at org.apache.hadoop.hdfs.tools.DFSAdmin.run(DFSAdmin.java:2431)  at 
> org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:76)  at 
> org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:90)  
> at org.apache.hadoop.hdfs.tools.DFSAdmin.main(DFSAdmin.java:2590) 
> List open files failed. 
> listOpenFiles: java.lang.NullPointerException
> {code}
>  
>  
>  



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to