[
https://issues.apache.org/jira/browse/HDFS-14708?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16904634#comment-16904634
]
Lisheng Sun commented on HDFS-14708:
------------------------------------
Attached the v1 patch and uploaded it.
> TestLargeBlockReport#testBlockReportSucceedsWithLargerLengthLimit fails in
> trunk
> --------------------------------------------------------------------------------
>
> Key: HDFS-14708
> URL: https://issues.apache.org/jira/browse/HDFS-14708
> Project: Hadoop HDFS
> Issue Type: Bug
> Reporter: Lisheng Sun
> Assignee: Lisheng Sun
> Priority: Minor
> Attachments: HDFS-14708-001.patch
>
>
> {code:java}
> [ERROR]
> testBlockReportSucceedsWithLargerLengthLimit(org.apache.hadoop.hdfs.server.datanode.TestLargeBlockReport)
> Time elapsed: 47.956 s <<< ERROR!
> org.apache.hadoop.ipc.RemoteException(java.io.IOException):
> java.lang.IllegalStateException:
> com.google.protobuf.InvalidProtocolBufferException: Protocol message was too
> large. May be malicious. Use CodedInputStream.setSizeLimit() to increase
> the size limit.
> at
> org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.runBlockOp(BlockManager.java:5011)
> at
> org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.blockReport(NameNodeRpcServer.java:1581)
> at
> org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolServerSideTranslatorPB.blockReport(DatanodeProtocolServerSideTranslatorPB.java:181)
> at
> org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos$DatanodeProtocolService$2.callBlockingMethod(DatanodeProtocolProtos.java:31664)
> at
> org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:529)
> at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1070)
> at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1001)
> at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:929)
> at java.security.AccessController.doPrivileged(Native Method)
> at javax.security.auth.Subject.doAs(Subject.java:422)
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1891)
> at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2921)
> Caused by: java.lang.IllegalStateException:
> com.google.protobuf.InvalidProtocolBufferException: Protocol message was too
> large. May be malicious. Use CodedInputStream.setSizeLimit() to increase
> the size limit.
> at
> org.apache.hadoop.hdfs.protocol.BlockListAsLongs$BufferDecoder$1.next(BlockListAsLongs.java:424)
> at
> org.apache.hadoop.hdfs.protocol.BlockListAsLongs$BufferDecoder$1.next(BlockListAsLongs.java:396)
> at
> org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.reportDiffSorted(BlockManager.java:2952)
> at
> org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.processReport(BlockManager.java:2787)
> at
> org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.processReport(BlockManager.java:2655)
> at
> org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.lambda$blockReport$0(NameNodeRpcServer.java:1582)
> at java.util.concurrent.FutureTask.run(FutureTask.java:266)
> at
> org.apache.hadoop.hdfs.server.blockmanagement.BlockManager$BlockReportProcessingThread.processQueue(BlockManager.java:5089)
> at
> org.apache.hadoop.hdfs.server.blockmanagement.BlockManager$BlockReportProcessingThread.run(BlockManager.java:5068)
> Caused by: com.google.protobuf.InvalidProtocolBufferException: Protocol
> message was too large. May be malicious. Use
> CodedInputStream.setSizeLimit() to increase the size limit.
> at
> com.google.protobuf.InvalidProtocolBufferException.sizeLimitExceeded(InvalidProtocolBufferException.java:110)
> at
> com.google.protobuf.CodedInputStream.refillBuffer(CodedInputStream.java:755)
> at
> com.google.protobuf.CodedInputStream.readRawByte(CodedInputStream.java:769)
> at
> com.google.protobuf.CodedInputStream.readRawVarint64(CodedInputStream.java:462)
> at
> org.apache.hadoop.hdfs.protocol.BlockListAsLongs$BufferDecoder$1.next(BlockListAsLongs.java:420)
> ... 8 more
> at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1553)
> at org.apache.hadoop.ipc.Client.call(Client.java:1499)
> at org.apache.hadoop.ipc.Client.call(Client.java:1396)
> at
> org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:233)
> at
> org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118)
> at com.sun.proxy.$Proxy25.blockReport(Unknown Source)
> at
> org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.blockReport(DatanodeProtocolClientSideTranslatorPB.java:218)
> at
> org.apache.hadoop.hdfs.server.datanode.TestLargeBlockReport.testBlockReportSucceedsWithLargerLengthLimit(TestLargeBlockReport.java:97)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:498)
> at
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
> at
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
> at
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
> at
> org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
> at
> org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
> at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
> at
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
> at
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
> at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
> at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
> at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
> at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
> at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
> at
> org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26)
> at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
> at
> org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365)
> at
> org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273)
> at
> org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238)
> at
> org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159)
> at
> org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:384)
> at
> org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:345)
> at
> org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:126)
> at
> org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:418)
> {code}
> Ref ::
> [https://builds.apache.org/job/PreCommit-HDFS-Build/27416/artifact/out/patch-unit-hadoop-hdfs-project_hadoop-hdfs.txt]
--
This message was sent by Atlassian JIRA
(v7.6.14#76016)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]