This is an automated email from the ASF dual-hosted git repository.
chenglei pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/phoenix.git
The following commit(s) were added to refs/heads/master by this push:
new e453a89 Revert "PHOENIX-5217 Incorrect result for COUNT DISTINCT
limit"
e453a89 is described below
commit e453a89092c1ba8ff376d77b115afaac75d7492f
Author: chenglei <[email protected]>
AuthorDate: Fri Apr 19 14:50:06 2019 +0800
Revert "PHOENIX-5217 Incorrect result for COUNT DISTINCT limit"
This reverts commit 76541c5862d27fedc407fba6744f1cc793130127.
---
.../apache/phoenix/end2end/DistinctCountIT.java | 28 ----------------------
.../phoenix/iterate/BaseResultIterators.java | 23 ++++++++----------
.../apache/phoenix/compile/QueryCompilerTest.java | 25 -------------------
.../java/org/apache/phoenix/util/TestUtil.java | 12 ----------
4 files changed, 10 insertions(+), 78 deletions(-)
diff --git
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/DistinctCountIT.java
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/DistinctCountIT.java
index ae86c36..e586ebc 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/DistinctCountIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/DistinctCountIT.java
@@ -32,7 +32,6 @@ import static org.apache.phoenix.util.TestUtil.ROW7;
import static org.apache.phoenix.util.TestUtil.ROW8;
import static org.apache.phoenix.util.TestUtil.ROW9;
import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
-import static org.apache.phoenix.util.TestUtil.assertResultSet;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@@ -469,31 +468,4 @@ public class DistinctCountIT extends
ParallelStatsDisabledIT {
assertEquals(2, rs.getInt(1));
conn.close();
}
-
- @Test
- public void testDistinctCountLimitBug5217() throws Exception {
- Connection conn = null;
- try {
- Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
- conn = DriverManager.getConnection(getUrl(), props);
- String tableName = generateUniqueName();
- String sql = "create table " + tableName + "( "+
- " pk1 integer not null , " +
- " pk2 integer not null, " +
- " v integer, " +
- " CONSTRAINT TEST_PK PRIMARY KEY (pk1,pk2))";
- conn.createStatement().execute(sql);
- conn.createStatement().execute("UPSERT INTO
"+tableName+"(pk1,pk2,v) VALUES (1,1,1)");
- conn.createStatement().execute("UPSERT INTO
"+tableName+"(pk1,pk2,v) VALUES (2,2,2)");
- conn.commit();
-
- sql = "select count(distinct pk1) from " + tableName + " limit 1";
- ResultSet rs = conn.prepareStatement(sql).executeQuery();
- assertResultSet(rs, new Object[][]{{Long.valueOf(2L)}});
- } finally {
- if(conn!=null) {
- conn.close();
- }
- }
- }
}
diff --git
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
index a562b8d..7fbb636 100644
---
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
+++
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
@@ -69,7 +69,6 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.phoenix.cache.ServerCacheClient.ServerCache;
-import org.apache.phoenix.compile.GroupByCompiler.GroupBy;
import org.apache.phoenix.compile.QueryPlan;
import org.apache.phoenix.compile.RowProjector;
import org.apache.phoenix.compile.ScanRanges;
@@ -263,21 +262,19 @@ public abstract class BaseResultIterators extends
ExplainTable implements Result
if(offset!=null){
ScanUtil.addOffsetAttribute(scan, offset);
}
- GroupBy groupBy = plan.getGroupBy();
- int cols = groupBy.getOrderPreservingColumnCount();
+ int cols = plan.getGroupBy().getOrderPreservingColumnCount();
if (cols > 0 && keyOnlyFilter &&
!plan.getStatement().getHint().hasHint(HintNode.Hint.RANGE_SCAN) &&
cols <
plan.getTableRef().getTable().getRowKeySchema().getFieldCount() &&
- groupBy.isOrderPreserving() &&
- (context.getAggregationManager().isEmpty() ||
groupBy.isUngroupedAggregate())) {
-
- ScanUtil.andFilterAtEnd(scan,
- new
DistinctPrefixFilter(plan.getTableRef().getTable().getRowKeySchema(),cols));
- if (!groupBy.isUngroupedAggregate() && plan.getLimit() !=
null) {
- // We can push the limit to the server,but for
UngroupedAggregate
- // we can not push the limit.
- ScanUtil.andFilterAtEnd(scan, new
PageFilter(plan.getLimit()));
- }
+ plan.getGroupBy().isOrderPreserving() &&
+ (context.getAggregationManager().isEmpty() ||
plan.getGroupBy().isUngroupedAggregate())) {
+
+ ScanUtil.andFilterAtEnd(scan,
+ new
DistinctPrefixFilter(plan.getTableRef().getTable().getRowKeySchema(),
+ cols));
+ if (plan.getLimit() != null) { // We can push the limit to the
server
+ ScanUtil.andFilterAtEnd(scan, new
PageFilter(plan.getLimit()));
+ }
}
scan.setAttribute(BaseScannerRegionObserver.QUALIFIER_ENCODING_SCHEME, new
byte[]{table.getEncodingScheme().getSerializedMetadataValue()});
scan.setAttribute(BaseScannerRegionObserver.IMMUTABLE_STORAGE_ENCODING_SCHEME,
new byte[]{table.getImmutableStorageScheme().getSerializedMetadataValue()});
diff --git
a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
index d645995..bb18c29 100644
---
a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
+++
b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
@@ -48,7 +48,6 @@ import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
-import org.apache.hadoop.hbase.filter.PageFilter;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.compile.OrderByCompiler.OrderBy;
import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
@@ -5961,28 +5960,4 @@ public class QueryCompilerTest extends
BaseConnectionlessQueryTest {
plan.contains("SERVER SORTED BY"));
}
}
-
- @Test
- public void testDistinctCountLimitBug5217() throws Exception {
- Connection conn = null;
- try {
- conn = DriverManager.getConnection(getUrl());
- String tableName = generateUniqueName();
- String sql = "create table " + tableName + "( "+
- " pk1 integer not null , " +
- " pk2 integer not null, " +
- " v integer, " +
- " CONSTRAINT TEST_PK PRIMARY KEY (pk1,pk2))";
- conn.createStatement().execute(sql);
-
- sql = "select count(distinct pk1) from " + tableName + " limit 1";
- QueryPlan plan = TestUtil.getOptimizeQueryPlan(conn, sql);
- Scan scan = plan.getContext().getScan();
- assertFalse(TestUtil.hasFilter(scan, PageFilter.class));
- } finally {
- if(conn!=null) {
- conn.close();
- }
- }
- }
}
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/util/TestUtil.java
b/phoenix-core/src/test/java/org/apache/phoenix/util/TestUtil.java
index 40563d5..40b9cfb 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/util/TestUtil.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/util/TestUtil.java
@@ -68,7 +68,6 @@ import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
-import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils.BlockingRpcCallback;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
@@ -1127,15 +1126,4 @@ public class TestUtil {
return -1;
}
}
-
- public static boolean hasFilter(Scan scan, Class<? extends Filter>
filterClass) {
- Iterator<Filter> filterIter = ScanUtil.getFilterIterator(scan);
- while(filterIter.hasNext()) {
- Filter filter = filterIter.next();
- if(filterClass.isInstance(filter)) {
- return true;
- }
- }
- return false;
- }
}