HBASE-17371 Enhance 'HBaseContextSuite @ distributedScan to test HBase client' 
with filter


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ccb8d671
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ccb8d671
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ccb8d671

Branch: refs/heads/hbase-12439
Commit: ccb8d671d590f4ea347fb85049f84620564ce1cb
Parents: 5ffbd4a
Author: tedyu <[email protected]>
Authored: Tue Dec 27 15:44:18 2016 -0800
Committer: tedyu <[email protected]>
Committed: Tue Dec 27 15:44:18 2016 -0800

----------------------------------------------------------------------
 .../apache/hadoop/hbase/spark/HBaseContextSuite.scala   | 12 ++++++++++++
 1 file changed, 12 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/ccb8d671/hbase-spark/src/test/scala/org/apache/hadoop/hbase/spark/HBaseContextSuite.scala
----------------------------------------------------------------------
diff --git 
a/hbase-spark/src/test/scala/org/apache/hadoop/hbase/spark/HBaseContextSuite.scala
 
b/hbase-spark/src/test/scala/org/apache/hadoop/hbase/spark/HBaseContextSuite.scala
index b27cfc7..1e1e52d 100644
--- 
a/hbase-spark/src/test/scala/org/apache/hadoop/hbase/spark/HBaseContextSuite.scala
+++ 
b/hbase-spark/src/test/scala/org/apache/hadoop/hbase/spark/HBaseContextSuite.scala
@@ -17,6 +17,7 @@
 package org.apache.hadoop.hbase.spark
 
 import org.apache.hadoop.hbase.client._
+import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter
 import org.apache.hadoop.hbase.util.Bytes
 import org.apache.hadoop.hbase.{ CellUtil, TableName, HBaseTestingUtility}
 import org.apache.spark.{SparkException, Logging, SparkContext}
@@ -311,6 +312,9 @@ BeforeAndAfterEach with BeforeAndAfterAll  with Logging {
       put = new Put(Bytes.toBytes("scan2"))
       put.addColumn(Bytes.toBytes(columnFamily), Bytes.toBytes("a"), 
Bytes.toBytes("foo2"))
       table.put(put)
+      put = new Put(Bytes.toBytes("scan2"))
+      put.addColumn(Bytes.toBytes(columnFamily), Bytes.toBytes("b"), 
Bytes.toBytes("foo-2"))
+      table.put(put)
       put = new Put(Bytes.toBytes("scan3"))
       put.addColumn(Bytes.toBytes(columnFamily), Bytes.toBytes("a"), 
Bytes.toBytes("foo3"))
       table.put(put)
@@ -328,15 +332,23 @@ BeforeAndAfterEach with BeforeAndAfterAll  with Logging {
     val hbaseContext = new HBaseContext(sc, config)
 
     val scan = new Scan()
+    val filter = new FirstKeyOnlyFilter()
     scan.setCaching(100)
     scan.setStartRow(Bytes.toBytes("scan2"))
     scan.setStopRow(Bytes.toBytes("scan4_"))
+    scan.setFilter(filter)
 
     val scanRdd = hbaseContext.hbaseRDD(TableName.valueOf(tableName), scan)
 
     try {
       val scanList = scanRdd.map(r => r._1.copyBytes()).collect()
       assert(scanList.length == 3)
+      var cnt = 0
+      scanRdd.map(r => r._2.listCells().size()).collect().foreach(l => {
+        cnt += l
+      })
+      // the number of cells returned would be 4 without the Filter
+      assert(cnt == 3);
     } catch {
       case ex: Exception => ex.printStackTrace()
     }

Reply via email to