This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-2 by this push:
     new 20aea54f7a9 HBASE-28124 Missing fields in Scan.toJSON (#5678)
20aea54f7a9 is described below

commit 20aea54f7a9079860e1fa3207c03ebc55cca8edc
Author: chandrasekhar-188k 
<154109917+chandrasekhar-1...@users.noreply.github.com>
AuthorDate: Tue Mar 19 18:56:28 2024 +0530

    HBASE-28124 Missing fields in Scan.toJSON (#5678)
    
    Signed-off-by: Pankaj Kumar < pankajku...@apache.org>
    Signed-off-by: Rajeshbabu Chintaguntla <rajeshb...@apache.org>
    Signed-off-by: Duo Zhang <zhang...@apache.org>
    (cherry picked from commit 0763a740960f7cbb177abd596d9cb203aaf5f025)
---
 .../java/org/apache/hadoop/hbase/client/Scan.java  | 31 ++++++-
 .../hadoop/hbase/client/TestOnlineLogRecord.java   | 26 +++---
 .../apache/hadoop/hbase/client/TestOperation.java  | 98 ++++++++++++++++++++++
 3 files changed, 144 insertions(+), 11 deletions(-)

diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
index cdc467d9ca9..74138d9e29f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
@@ -25,6 +25,7 @@ import java.util.Map;
 import java.util.NavigableSet;
 import java.util.TreeMap;
 import java.util.TreeSet;
+import java.util.stream.Collectors;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
 import org.apache.hadoop.hbase.filter.Filter;
@@ -904,7 +905,7 @@ public class Scan extends Query {
    */
   @Override
   public Map<String, Object> toMap(int maxCols) {
-    // start with the fingerpring map and build on top of it
+    // start with the fingerprint map and build on top of it
     Map<String, Object> map = getFingerprint();
     // map from families to column list replaces fingerprint's list of families
     Map<String, List<String>> familyColumns = new HashMap<>();
@@ -952,6 +953,34 @@ public class Scan extends Query {
     if (getId() != null) {
       map.put("id", getId());
     }
+    map.put("includeStartRow", includeStartRow);
+    map.put("includeStopRow", includeStopRow);
+    map.put("allowPartialResults", allowPartialResults);
+    map.put("storeLimit", storeLimit);
+    map.put("storeOffset", storeOffset);
+    map.put("reversed", reversed);
+    if (null != asyncPrefetch) {
+      map.put("asyncPrefetch", asyncPrefetch);
+    }
+    map.put("mvccReadPoint", mvccReadPoint);
+    map.put("limit", limit);
+    map.put("readType", readType);
+    map.put("needCursorResult", needCursorResult);
+    map.put("targetReplicaId", targetReplicaId);
+    map.put("consistency", consistency);
+    if (!colFamTimeRangeMap.isEmpty()) {
+      Map<String, List<Long>> colFamTimeRangeMapStr = 
colFamTimeRangeMap.entrySet().stream()
+        .collect(Collectors.toMap((e) -> Bytes.toStringBinary(e.getKey()), e 
-> {
+          TimeRange value = e.getValue();
+          List<Long> rangeList = new ArrayList<>();
+          rangeList.add(value.getMin());
+          rangeList.add(value.getMax());
+          return rangeList;
+        }));
+
+      map.put("colFamTimeRangeMap", colFamTimeRangeMapStr);
+    }
+    map.put("priority", getPriority());
     return map;
   }
 
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOnlineLogRecord.java
 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOnlineLogRecord.java
index a16993d5659..72013b6f294 100644
--- 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOnlineLogRecord.java
+++ 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOnlineLogRecord.java
@@ -44,20 +44,26 @@ public class TestOnlineLogRecord {
     Scan scan = new Scan();
     scan.withStartRow(Bytes.toBytes(123));
     scan.withStopRow(Bytes.toBytes(456));
-    String expectedOutput = "{\n" + "  \"startTime\": 1,\n" + "  
\"processingTime\": 2,\n"
-      + "  \"queueTime\": 3,\n" + "  \"responseSize\": 4,\n" + "  
\"blockBytesScanned\": 5,\n"
-      + "  \"fsReadTime\": 6,\n" + "  \"multiGetsCount\": 6,\n" + "  
\"multiMutationsCount\": 7,\n"
-      + "  \"scan\": {\n" + "    \"startRow\": \"\\\\x00\\\\x00\\\\x00{\",\n"
-      + "    \"stopRow\": \"\\\\x00\\\\x00\\\\x01\\\\xC8\",\n" + "    
\"batch\": -1,\n"
-      + "    \"cacheBlocks\": true,\n" + "    \"totalColumns\": 0,\n"
-      + "    \"maxResultSize\": -1,\n" + "    \"families\": {},\n" + "    
\"caching\": -1,\n"
-      + "    \"maxVersions\": 1,\n" + "    \"timeRange\": [\n" + "      0,\n"
-      + "      9223372036854775807\n" + "    ]\n" + "  }\n" + "}";
+    String expectedOutput =
+      "{\n" + "  \"startTime\": 1,\n" + "  \"processingTime\": 2,\n" + "  
\"queueTime\": 3,\n"
+        + "  \"responseSize\": 4,\n" + "  \"blockBytesScanned\": 5,\n" + "  
\"fsReadTime\": 6,\n"
+        + "  \"multiGetsCount\": 6,\n" + "  \"multiMutationsCount\": 7,\n" + " 
 \"scan\": {\n"
+        + "    \"startRow\": \"\\\\x00\\\\x00\\\\x00{\",\n" + "    
\"targetReplicaId\": -1,\n"
+        + "    \"batch\": -1,\n" + "    \"totalColumns\": 0,\n" + "    
\"maxResultSize\": -1,\n"
+        + "    \"families\": {},\n" + "    \"priority\": -1,\n" + "    
\"caching\": -1,\n"
+        + "    \"includeStopRow\": false,\n" + "    \"consistency\": 
\"STRONG\",\n"
+        + "    \"maxVersions\": 1,\n" + "    \"storeOffset\": 0,\n" + "    
\"mvccReadPoint\": -1,\n"
+        + "    \"includeStartRow\": true,\n" + "    \"needCursorResult\": 
false,\n"
+        + "    \"stopRow\": \"\\\\x00\\\\x00\\\\x01\\\\xC8\",\n" + "    
\"storeLimit\": -1,\n"
+        + "    \"limit\": -1,\n" + "    \"cacheBlocks\": true,\n"
+        + "    \"readType\": \"DEFAULT\",\n" + "    \"allowPartialResults\": 
false,\n"
+        + "    \"reversed\": false,\n" + "    \"timeRange\": [\n" + "      
0,\n"
+        + "      9223372036854775807\n" + "    ]\n" + "  }\n" + "}";
     OnlineLogRecord o = new OnlineLogRecord(1, 2, 3, 4, 5, 6, null, null, 
null, null, null, null,
       null, 6, 7, 0, scan, Collections.emptyMap(), Collections.emptyMap());
     String actualOutput = o.toJsonPrettyPrint();
     System.out.println(actualOutput);
-    Assert.assertEquals(actualOutput, expectedOutput);
+    Assert.assertEquals(expectedOutput, actualOutput);
   }
 
   @Test
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
index a4552f1a407..6725f161f20 100644
--- 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
+++ 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
@@ -69,6 +69,9 @@ import org.junit.experimental.categories.Category;
 
 import org.apache.hbase.thirdparty.com.google.common.reflect.TypeToken;
 import org.apache.hbase.thirdparty.com.google.gson.Gson;
+import org.apache.hbase.thirdparty.com.google.gson.GsonBuilder;
+import org.apache.hbase.thirdparty.com.google.gson.LongSerializationPolicy;
+import org.apache.hbase.thirdparty.com.google.gson.ToNumberPolicy;
 
 /**
  * Run tests that use the functionality of the Operation superclass for Puts, 
Gets, Deletes, Scans,
@@ -345,6 +348,101 @@ public class TestOperation {
       kvMap.get("qualifier"));
   }
 
+  /**
+   * Test the client Scan Operations' JSON encoding to ensure that produced 
JSON is parseable and
+   * that the details are present and not corrupted.
+   * @throws IOException if the JSON conversion fails
+   */
+  @Test
+  public void testScanOperationToJSON() throws IOException {
+    // produce a Scan Operation
+    Scan scan = new Scan().withStartRow(ROW, true);
+    scan.addColumn(FAMILY, QUALIFIER);
+    scan.withStopRow(ROW, true);
+    scan.readVersions(5);
+    scan.setBatch(10);
+    scan.setAllowPartialResults(true);
+    scan.setMaxResultsPerColumnFamily(3);
+    scan.setRowOffsetPerColumnFamily(8);
+    scan.setCaching(20);
+    scan.setMaxResultSize(50);
+    scan.setCacheBlocks(true);
+    scan.setReversed(true);
+    scan.setTimeRange(1000, 2000);
+    scan.setAsyncPrefetch(true);
+    scan.setMvccReadPoint(123);
+    scan.setLimit(5);
+    scan.setReadType(Scan.ReadType.PREAD);
+    scan.setNeedCursorResult(true);
+    scan.setFilter(SCV_FILTER);
+    scan.setReplicaId(1);
+    scan.setConsistency(Consistency.STRONG);
+    scan.setLoadColumnFamiliesOnDemand(true);
+    scan.setColumnFamilyTimeRange(FAMILY, 2000, 3000);
+    scan.setPriority(10);
+
+    // get its JSON representation, and parse it
+    String json = scan.toJSON();
+    Type typeOfHashMap = new TypeToken<Map<String, Object>>() {
+    }.getType();
+    Gson gson = new 
GsonBuilder().setLongSerializationPolicy(LongSerializationPolicy.STRING)
+      .setObjectToNumberStrategy(ToNumberPolicy.LONG_OR_DOUBLE).create();
+    Map<String, Object> parsedJSON = gson.fromJson(json, typeOfHashMap);
+    // check for the row
+    assertEquals("startRow incorrect in Scan.toJSON()", 
Bytes.toStringBinary(ROW),
+      parsedJSON.get("startRow"));
+    // check for the family and the qualifier.
+    List familyInfo = (List) ((Map) 
parsedJSON.get("families")).get(Bytes.toStringBinary(FAMILY));
+    assertNotNull("Family absent in Scan.toJSON()", familyInfo);
+    assertEquals("Qualifier absent in Scan.toJSON()", 1, familyInfo.size());
+    assertEquals("Qualifier incorrect in Scan.toJSON()", 
Bytes.toStringBinary(QUALIFIER),
+      familyInfo.get(0));
+    assertEquals("stopRow incorrect in Scan.toJSON()", 
Bytes.toStringBinary(ROW),
+      parsedJSON.get("stopRow"));
+    assertEquals("includeStartRow incorrect in Scan.toJSON()", true,
+      parsedJSON.get("includeStartRow"));
+    assertEquals("includeStopRow incorrect in Scan.toJSON()", true,
+      parsedJSON.get("includeStopRow"));
+    assertEquals("maxVersions incorrect in Scan.toJSON()", 5L, 
parsedJSON.get("maxVersions"));
+    assertEquals("batch incorrect in Scan.toJSON()", 10L, 
parsedJSON.get("batch"));
+    assertEquals("allowPartialResults incorrect in Scan.toJSON()", true,
+      parsedJSON.get("allowPartialResults"));
+    assertEquals("storeLimit incorrect in Scan.toJSON()", 3L, 
parsedJSON.get("storeLimit"));
+    assertEquals("storeOffset incorrect in Scan.toJSON()", 8L, 
parsedJSON.get("storeOffset"));
+    assertEquals("caching incorrect in Scan.toJSON()", 20L, 
parsedJSON.get("caching"));
+    assertEquals("maxResultSize incorrect in Scan.toJSON()", "50", 
parsedJSON.get("maxResultSize"));
+    assertEquals("cacheBlocks incorrect in Scan.toJSON()", true, 
parsedJSON.get("cacheBlocks"));
+    assertEquals("reversed incorrect in Scan.toJSON()", true, 
parsedJSON.get("reversed"));
+    List trList = (List) parsedJSON.get("timeRange");
+    assertEquals("timeRange incorrect in Scan.toJSON()", 2, trList.size());
+    assertEquals("timeRange incorrect in Scan.toJSON()", "1000", 
trList.get(0));
+    assertEquals("timeRange incorrect in Scan.toJSON()", "2000", 
trList.get(1));
+
+    assertEquals("asyncPrefetch incorrect in Scan.toJSON()", true, 
parsedJSON.get("asyncPrefetch"));
+    assertEquals("mvccReadPoint incorrect in Scan.toJSON()", "123",
+      parsedJSON.get("mvccReadPoint"));
+    assertEquals("limit incorrect in Scan.toJSON()", 5L, 
parsedJSON.get("limit"));
+    assertEquals("readType incorrect in Scan.toJSON()", "PREAD", 
parsedJSON.get("readType"));
+    assertEquals("needCursorResult incorrect in Scan.toJSON()", true,
+      parsedJSON.get("needCursorResult"));
+
+    Map colFamTimeRange = (Map) parsedJSON.get("colFamTimeRangeMap");
+    assertEquals("colFamTimeRangeMap incorrect in Scan.toJSON()", 1L, 
colFamTimeRange.size());
+    List testFamily = (List) colFamTimeRange.get("testFamily");
+    assertEquals("colFamTimeRangeMap incorrect in Scan.toJSON()", 2L, 
testFamily.size());
+    assertEquals("colFamTimeRangeMap incorrect in Scan.toJSON()", "2000", 
testFamily.get(0));
+    assertEquals("colFamTimeRangeMap incorrect in Scan.toJSON()", "3000", 
testFamily.get(1));
+
+    assertEquals("targetReplicaId incorrect in Scan.toJSON()", 1L,
+      parsedJSON.get("targetReplicaId"));
+    assertEquals("consistency incorrect in Scan.toJSON()", "STRONG", 
parsedJSON.get("consistency"));
+    assertEquals("loadColumnFamiliesOnDemand incorrect in Scan.toJSON()", true,
+      parsedJSON.get("loadColumnFamiliesOnDemand"));
+
+    assertEquals("priority incorrect in Scan.toJSON()", 10L, 
parsedJSON.get("priority"));
+
+  }
+
   @Test
   public void testPutCreationWithByteBuffer() {
     Put p = new Put(ROW);

Reply via email to