This is an automated email from the ASF dual-hosted git repository.

binlijin pushed a commit to branch branch-2.4
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-2.4 by this push:
     new 814b196  HBASE-25997 NettyRpcFrameDecoder decode request header wrong 
when han… (#3380)
814b196 is described below

commit 814b196f038817b2dd683c89a289b5780113b010
Author: binlijin <[email protected]>
AuthorDate: Tue Jun 15 14:25:18 2021 +0800

    HBASE-25997 NettyRpcFrameDecoder decode request header wrong when han… 
(#3380)
    
    Signed-off-by: Duo Zhang <[email protected]>
---
 .../hadoop/hbase/ipc/NettyRpcFrameDecoder.java     |  2 +
 .../hbase/client/TestRequestTooBigException.java   | 85 ++++++++++++++++++++++
 2 files changed, 87 insertions(+)

diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.java
index 9444cd0..c46fe78 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.java
@@ -87,6 +87,7 @@ public class NettyRpcFrameDecoder extends 
ByteToMessageDecoder {
       NettyRpcServer.LOG.warn(requestTooBigMessage);
 
       if (connection.connectionHeaderRead) {
+        in.skipBytes(FRAME_LENGTH_FIELD_LENGTH);
         handleTooBigRequest(in);
         return;
       }
@@ -122,6 +123,7 @@ public class NettyRpcFrameDecoder extends 
ByteToMessageDecoder {
     }
 
     RPCProtos.RequestHeader header = getHeader(in, headerSize);
+    NettyRpcServer.LOG.info("BigRequest header is = " + header);
 
     // Notify the client about the offending request
     NettyServerCall reqTooBig = connection.createCall(header.getCallId(), 
connection.service, null,
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRequestTooBigException.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRequestTooBigException.java
new file mode 100644
index 0000000..5f7f154
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRequestTooBigException.java
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client;
+
+import static org.junit.Assert.assertTrue;
+
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.exceptions.RequestTooBigException;
+import org.apache.hadoop.hbase.testclassification.ClientTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
+
+@Category({MediumTests.class, ClientTests.class})
+public class TestRequestTooBigException {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+      HBaseClassTestRule.forClass(TestRequestTooBigException.class);
+
+  private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
+
+  @Rule
+  public TestName name = new TestName();
+
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+    TEST_UTIL.startMiniCluster();
+  }
+
+  @AfterClass
+  public static void tearDownAfterClass() throws Exception {
+    TEST_UTIL.shutdownMiniCluster();
+  }
+
+  @Test
+  public void testHbasePutDeleteCell() throws Exception {
+    final TableName tableName = TableName.valueOf(name.getMethodName());
+    final byte[] family = Bytes.toBytes("cf");
+    Table table = TEST_UTIL.createTable(tableName, family);
+    TEST_UTIL.waitTableAvailable(tableName.getName(), 5000);
+    try {
+      byte[] value = new byte[2 * 2014 * 1024];
+
+      Put p = new Put(Bytes.toBytes("bigrow"));
+      // big request = 400*2 M
+      for (int i = 0; i < 400; i++) {
+        p.addColumn(family, Bytes.toBytes("someQualifier" + i), value);
+      }
+      try {
+        table.put(p);
+        assertTrue("expected RequestTooBigException", false);
+      } catch (RequestTooBigException e) {
+        assertTrue("expected RequestTooBigException", true);
+      }
+    } finally {
+      table.close();
+    }
+  }
+}
+
+

Reply via email to