This is an automated email from the ASF dual-hosted git repository.
chenglei pushed a commit to branch 4.14-HBase-1.3
in repository https://gitbox.apache.org/repos/asf/phoenix.git
The following commit(s) were added to refs/heads/4.14-HBase-1.3 by this push:
new e602407 PHOENIX-5217 Incorrect result for COUNT DISTINCT limit
e602407 is described below
commit e602407f804752c02e0c2caaadc9ea63363d3bbd
Author: chenglei <[email protected]>
AuthorDate: Fri Apr 19 15:28:40 2019 +0800
PHOENIX-5217 Incorrect result for COUNT DISTINCT limit
---
.../apache/phoenix/end2end/DistinctCountIT.java | 28 ++++++++++++++++++++++
.../phoenix/iterate/BaseResultIterators.java | 23 ++++++++++--------
.../apache/phoenix/compile/QueryCompilerTest.java | 25 +++++++++++++++++++
.../java/org/apache/phoenix/util/TestUtil.java | 13 ++++++++++
4 files changed, 79 insertions(+), 10 deletions(-)
diff --git
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/DistinctCountIT.java
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/DistinctCountIT.java
index e586ebc..ae86c36 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/DistinctCountIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/DistinctCountIT.java
@@ -32,6 +32,7 @@ import static org.apache.phoenix.util.TestUtil.ROW7;
import static org.apache.phoenix.util.TestUtil.ROW8;
import static org.apache.phoenix.util.TestUtil.ROW9;
import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.apache.phoenix.util.TestUtil.assertResultSet;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@@ -468,4 +469,31 @@ public class DistinctCountIT extends
ParallelStatsDisabledIT {
assertEquals(2, rs.getInt(1));
conn.close();
}
+
+ @Test
+ public void testDistinctCountLimitBug5217() throws Exception {
+ Connection conn = null;
+ try {
+ Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+ conn = DriverManager.getConnection(getUrl(), props);
+ String tableName = generateUniqueName();
+ String sql = "create table " + tableName + "( "+
+ " pk1 integer not null , " +
+ " pk2 integer not null, " +
+ " v integer, " +
+ " CONSTRAINT TEST_PK PRIMARY KEY (pk1,pk2))";
+ conn.createStatement().execute(sql);
+ conn.createStatement().execute("UPSERT INTO
"+tableName+"(pk1,pk2,v) VALUES (1,1,1)");
+ conn.createStatement().execute("UPSERT INTO
"+tableName+"(pk1,pk2,v) VALUES (2,2,2)");
+ conn.commit();
+
+ sql = "select count(distinct pk1) from " + tableName + " limit 1";
+ ResultSet rs = conn.prepareStatement(sql).executeQuery();
+ assertResultSet(rs, new Object[][]{{Long.valueOf(2L)}});
+ } finally {
+ if(conn!=null) {
+ conn.close();
+ }
+ }
+ }
}
diff --git
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
index 6bc9a15..d2c2641 100644
---
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
+++
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
@@ -67,6 +67,7 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.phoenix.cache.ServerCacheClient.ServerCache;
+import org.apache.phoenix.compile.GroupByCompiler.GroupBy;
import org.apache.phoenix.compile.QueryPlan;
import org.apache.phoenix.compile.RowProjector;
import org.apache.phoenix.compile.ScanRanges;
@@ -256,19 +257,21 @@ public abstract class BaseResultIterators extends
ExplainTable implements Result
if(offset!=null){
ScanUtil.addOffsetAttribute(scan, offset);
}
- int cols = plan.getGroupBy().getOrderPreservingColumnCount();
+ GroupBy groupBy = plan.getGroupBy();
+ int cols = groupBy.getOrderPreservingColumnCount();
if (cols > 0 && keyOnlyFilter &&
!plan.getStatement().getHint().hasHint(HintNode.Hint.RANGE_SCAN) &&
cols <
plan.getTableRef().getTable().getRowKeySchema().getFieldCount() &&
- plan.getGroupBy().isOrderPreserving() &&
- (context.getAggregationManager().isEmpty() ||
plan.getGroupBy().isUngroupedAggregate())) {
-
- ScanUtil.andFilterAtEnd(scan,
- new
DistinctPrefixFilter(plan.getTableRef().getTable().getRowKeySchema(),
- cols));
- if (plan.getLimit() != null) { // We can push the limit to the
server
- ScanUtil.andFilterAtEnd(scan, new
PageFilter(plan.getLimit()));
- }
+ groupBy.isOrderPreserving() &&
+ (context.getAggregationManager().isEmpty() ||
groupBy.isUngroupedAggregate())) {
+
+ ScanUtil.andFilterAtEnd(scan,
+ new
DistinctPrefixFilter(plan.getTableRef().getTable().getRowKeySchema(),cols));
+ if (!groupBy.isUngroupedAggregate() && plan.getLimit() !=
null) {
+ // We can push the limit to the server,but for
UngroupedAggregate
+ // we can not push the limit.
+ ScanUtil.andFilterAtEnd(scan, new
PageFilter(plan.getLimit()));
+ }
}
scan.setAttribute(BaseScannerRegionObserver.QUALIFIER_ENCODING_SCHEME, new
byte[]{table.getEncodingScheme().getSerializedMetadataValue()});
scan.setAttribute(BaseScannerRegionObserver.IMMUTABLE_STORAGE_ENCODING_SCHEME,
new byte[]{table.getImmutableStorageScheme().getSerializedMetadataValue()});
diff --git
a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
index 0ff17d3..581d507 100644
---
a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
+++
b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
@@ -48,6 +48,7 @@ import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
+import org.apache.hadoop.hbase.filter.PageFilter;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.compile.OrderByCompiler.OrderBy;
import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
@@ -4960,4 +4961,28 @@ public class QueryCompilerTest extends
BaseConnectionlessQueryTest {
}
}
}
+
+ @Test
+ public void testDistinctCountLimitBug5217() throws Exception {
+ Connection conn = null;
+ try {
+ conn = DriverManager.getConnection(getUrl());
+ String tableName = generateUniqueName();
+ String sql = "create table " + tableName + "( "+
+ " pk1 integer not null , " +
+ " pk2 integer not null, " +
+ " v integer, " +
+ " CONSTRAINT TEST_PK PRIMARY KEY (pk1,pk2))";
+ conn.createStatement().execute(sql);
+
+ sql = "select count(distinct pk1) from " + tableName + " limit 1";
+ QueryPlan plan = TestUtil.getOptimizeQueryPlan(conn, sql);
+ Scan scan = plan.getContext().getScan();
+ assertFalse(TestUtil.hasFilter(scan, PageFilter.class));
+ } finally {
+ if(conn!=null) {
+ conn.close();
+ }
+ }
+ }
}
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/util/TestUtil.java
b/phoenix-core/src/test/java/org/apache/phoenix/util/TestUtil.java
index 6920772..a4b1d0a 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/util/TestUtil.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/util/TestUtil.java
@@ -47,6 +47,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
+import java.util.Iterator;
import java.util.List;
import java.util.Properties;
@@ -65,6 +66,7 @@ import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
+import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
@@ -1067,4 +1069,15 @@ public class TestUtil {
}
assertTrue(!rs.next());
}
+
+ public static boolean hasFilter(Scan scan, Class<? extends Filter>
filterClass) {
+ Iterator<Filter> filterIter = ScanUtil.getFilterIterator(scan);
+ while(filterIter.hasNext()) {
+ Filter filter = filterIter.next();
+ if(filterClass.isInstance(filter)) {
+ return true;
+ }
+ }
+ return false;
+ }
}