This is an automated email from the ASF dual-hosted git repository.

dataroaring pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 6503aaf7db9 [feature](planner) allow `HLL` and `QUANTILE_STATE` types 
on duplicate and unique table (#28546)
6503aaf7db9 is described below

commit 6503aaf7db9d33d4dc23a9cb117987f6ff9a6b86
Author: bobhan1 <[email protected]>
AuthorDate: Tue Dec 19 09:54:24 2023 +0800

    [feature](planner) allow `HLL` and `QUANTILE_STATE` types on duplicate and 
unique table (#28546)
---
 be/src/olap/rowset/segment_v2/segment_writer.cpp   |   9 +-
 .../rowset/segment_v2/vertical_segment_writer.cpp  |   8 +-
 be/src/vec/functions/function.h                    |   3 +-
 be/src/vec/functions/function_case.h               |   1 +
 .../sql-manual/sql-reference/Data-Types/HLL.md     |   2 +-
 .../sql-reference/Data-Types/QUANTILE_STATE.md     |   9 +-
 .../sql-manual/sql-reference/Data-Types/HLL.md     |  14 +-
 .../sql-reference/Data-Types/QUANTILE_STATE.md     |  38 ++---
 .../org/apache/doris/analysis/CreateTableStmt.java |   8 -
 .../org/apache/doris/catalog/AggregateType.java    |   5 +-
 .../plans/commands/info/ColumnDefinition.java      |  16 +-
 .../apache/doris/analysis/CreateTableStmtTest.java |  13 --
 .../duplicate/storage/test_duplicate_hll.out       | 113 +++++++++++++
 .../storage/test_duplicate_quantile_state.out      |  85 ++++++++++
 .../data_model_p0/unique/test_unique_bitmap.out    |  20 +++
 .../data/data_model_p0/unique/test_unique_hll.out  | 185 +++++++++++++++++++++
 .../unique/test_unique_quantile_state.out          | 121 ++++++++++++++
 .../duplicate/storage/test_duplicate_bitmap.groovy | 103 +++++-------
 .../duplicate/storage/test_duplicate_hll.groovy    |  81 +++++++++
 .../storage/test_duplicate_quantile_state.groovy   |  77 +++++++++
 .../data_model_p0/unique/test_unique_bitmap.groovy |  73 ++++----
 .../data_model_p0/unique/test_unique_hll.groovy    |  83 +++++++++
 .../unique/test_unique_quantile_state.groovy       |  79 +++++++++
 23 files changed, 970 insertions(+), 176 deletions(-)

diff --git a/be/src/olap/rowset/segment_v2/segment_writer.cpp 
b/be/src/olap/rowset/segment_v2/segment_writer.cpp
index cf208f08dc4..eece72cf5af 100644
--- a/be/src/olap/rowset/segment_v2/segment_writer.cpp
+++ b/be/src/olap/rowset/segment_v2/segment_writer.cpp
@@ -178,10 +178,8 @@ Status SegmentWriter::init(const std::vector<uint32_t>& 
col_ids, bool has_key) {
         init_column_meta(opts.meta, cid, column, _tablet_schema);
 
         // now we create zone map for key columns in AGG_KEYS or all column in 
UNIQUE_KEYS or DUP_KEYS
-        // and not support zone map for array type and jsonb type.
-        opts.need_zone_map =
-                (column.is_key() || _tablet_schema->keys_type() != 
KeysType::AGG_KEYS) &&
-                column.type() != FieldType::OLAP_FIELD_TYPE_OBJECT;
+        // except for columns whose type don't support zone map.
+        opts.need_zone_map = column.is_key() || _tablet_schema->keys_type() != 
KeysType::AGG_KEYS;
         opts.need_bloom_filter = column.is_bf_column();
         auto* tablet_index = 
_tablet_schema->get_ngram_bf_index(column.unique_id());
         if (tablet_index) {
@@ -234,6 +232,9 @@ Status SegmentWriter::init(const std::vector<uint32_t>& 
col_ids, bool has_key) {
         CHECK_FIELD_TYPE(AGG_STATE, "agg_state")
         CHECK_FIELD_TYPE(MAP, "map")
         CHECK_FIELD_TYPE(VARIANT, "variant")
+        CHECK_FIELD_TYPE(OBJECT, "object")
+        CHECK_FIELD_TYPE(HLL, "hll")
+        CHECK_FIELD_TYPE(QUANTILE_STATE, "quantile_state")
 
 #undef CHECK_FIELD_TYPE
 
diff --git a/be/src/olap/rowset/segment_v2/vertical_segment_writer.cpp 
b/be/src/olap/rowset/segment_v2/vertical_segment_writer.cpp
index 766c3e47a78..54a0e9ad2dd 100644
--- a/be/src/olap/rowset/segment_v2/vertical_segment_writer.cpp
+++ b/be/src/olap/rowset/segment_v2/vertical_segment_writer.cpp
@@ -131,9 +131,8 @@ Status 
VerticalSegmentWriter::_create_column_writer(uint32_t cid, const TabletCo
     _init_column_meta(opts.meta, cid, column);
 
     // now we create zone map for key columns in AGG_KEYS or all column in 
UNIQUE_KEYS or DUP_KEYS
-    // and not support zone map for array type and jsonb type.
-    opts.need_zone_map = (column.is_key() || _tablet_schema->keys_type() != 
KeysType::AGG_KEYS) &&
-                         column.type() != FieldType::OLAP_FIELD_TYPE_OBJECT;
+    // except for columns whose type don't support zone map.
+    opts.need_zone_map = column.is_key() || _tablet_schema->keys_type() != 
KeysType::AGG_KEYS;
     opts.need_bloom_filter = column.is_bf_column();
     auto* tablet_index = 
_tablet_schema->get_ngram_bf_index(column.unique_id());
     if (tablet_index) {
@@ -186,6 +185,9 @@ Status 
VerticalSegmentWriter::_create_column_writer(uint32_t cid, const TabletCo
     CHECK_FIELD_TYPE(AGG_STATE, "agg_state")
     CHECK_FIELD_TYPE(MAP, "map")
     CHECK_FIELD_TYPE(VARIANT, "variant")
+    CHECK_FIELD_TYPE(OBJECT, "object")
+    CHECK_FIELD_TYPE(HLL, "hll")
+    CHECK_FIELD_TYPE(QUANTILE_STATE, "quantile_state")
 
 #undef CHECK_FIELD_TYPE
 
diff --git a/be/src/vec/functions/function.h b/be/src/vec/functions/function.h
index cb8ff34cdbb..44a629bf248 100644
--- a/be/src/vec/functions/function.h
+++ b/be/src/vec/functions/function.h
@@ -716,7 +716,8 @@ ColumnPtr wrap_in_nullable(const ColumnPtr& src, const 
Block& block, const Colum
     M(Struct, ColumnStruct)            \
     M(VARIANT, ColumnObject)           \
     M(BitMap, ColumnBitmap)            \
-    M(HLL, ColumnHLL)
+    M(HLL, ColumnHLL)                  \
+    M(QuantileState, ColumnQuantileState)
 
 #define TYPE_TO_BASIC_COLUMN_TYPE(M) \
     NUMERIC_TYPE_TO_COLUMN_TYPE(M)   \
diff --git a/be/src/vec/functions/function_case.h 
b/be/src/vec/functions/function_case.h
index 1360e47aa6a..2ecc6bd186d 100644
--- a/be/src/vec/functions/function_case.h
+++ b/be/src/vec/functions/function_case.h
@@ -261,6 +261,7 @@ public:
                       std::is_same_v<ColumnType, ColumnStruct> ||
                       std::is_same_v<ColumnType, ColumnObject> ||
                       std::is_same_v<ColumnType, ColumnHLL> ||
+                      std::is_same_v<ColumnType, ColumnQuantileState> ||
                       std::is_same_v<ColumnType, ColumnIPv4> ||
                       std::is_same_v<ColumnType, ColumnIPv6>) {
             // result_column and all then_column is not nullable.
diff --git a/docs/en/docs/sql-manual/sql-reference/Data-Types/HLL.md 
b/docs/en/docs/sql-manual/sql-reference/Data-Types/HLL.md
index 65e452f5464..9cfe4d5e0e8 100644
--- a/docs/en/docs/sql-manual/sql-reference/Data-Types/HLL.md
+++ b/docs/en/docs/sql-manual/sql-reference/Data-Types/HLL.md
@@ -28,7 +28,7 @@ under the License.
 ### Description
 HLL
 
-HLL cannot be used as a key column, and the aggregation type is HLL_UNION when 
create table.
+HLL cannot be used as a key column. The columns of the HLL type can be used in 
Aggregate tables, Duplicate tables and Unique tables. When used in an Aggregate 
table, the aggregation type is HLL_UNION when building table.
 The user does not need to specify the length and default value. 
 The length is controlled within the system according to the degree of data 
aggregation.
 And HLL columns can only be queried or used through the matching 
hll_union_agg, hll_raw_agg, hll_cardinality, and hll_hash.
diff --git a/docs/en/docs/sql-manual/sql-reference/Data-Types/QUANTILE_STATE.md 
b/docs/en/docs/sql-manual/sql-reference/Data-Types/QUANTILE_STATE.md
index 8dfe5463173..e53c63ce5f2 100644
--- a/docs/en/docs/sql-manual/sql-reference/Data-Types/QUANTILE_STATE.md
+++ b/docs/en/docs/sql-manual/sql-reference/Data-Types/QUANTILE_STATE.md
@@ -31,10 +31,11 @@ QUANTILE_STATE
 
 **In 2.0, we support the [agg_state](AGG_STATE.md) function, and it is 
recommended to use agg_state quantile_union(quantile_state not null) instead of 
this type.**
 
-    QUANTILE_STATE cannot be used as a key column, and the aggregation type is 
QUANTILE_UNION when building the table.
-    The user does not need to specify the length and default value. The length 
is controlled within the system according to the degree of data aggregation.
-    And the QUANTILE_STATE column can only be queried or used through the 
supporting QUANTILE_PERCENT, QUANTILE_UNION and TO_QUANTILE_STATE functions.    
-    QUANTILE_STATE is a type for calculating the approximate value of 
quantiles. Different values with the same key are pre-aggregated during loading 
process. When the number of aggregated values does not exceed 2048, all data 
are recorded in detail. When the number of aggregated values is greater than 
2048, [TDigest] is used. 
(https://github.com/tdunning/t-digest/blob/main/docs/t-digest-paper/histo.pdf) 
algorithm to aggregate (cluster) the data and save the centroid points after 
clustering.
+QUANTILE_STATE cannot be used as a key column. The columns of the HLL type can 
be used in Aggregate tables, Duplicate tables and Unique tables. When used in 
an Aggregate table, the aggregation type is HLL_UNION when building table.
+
+The user does not need to specify the length and default value. The length is 
controlled within the system according to the degree of data aggregation.
+And the QUANTILE_STATE column can only be queried or used through the 
supporting QUANTILE_PERCENT, QUANTILE_UNION and TO_QUANTILE_STATE functions.    
+QUANTILE_STATE is a type for calculating the approximate value of quantiles. 
Different values with the same key are pre-aggregated during loading process. 
When the number of aggregated values does not exceed 2048, all data are 
recorded in detail. When the number of aggregated values is greater than 2048, 
[TDigest] is used. 
(https://github.com/tdunning/t-digest/blob/main/docs/t-digest-paper/histo.pdf) 
algorithm to aggregate (cluster) the data and save the centroid points after 
clustering.
 
 related functions:
     
diff --git a/docs/zh-CN/docs/sql-manual/sql-reference/Data-Types/HLL.md 
b/docs/zh-CN/docs/sql-manual/sql-reference/Data-Types/HLL.md
index 0290eb82b8b..dfc44723448 100644
--- a/docs/zh-CN/docs/sql-manual/sql-reference/Data-Types/HLL.md
+++ b/docs/zh-CN/docs/sql-manual/sql-reference/Data-Types/HLL.md
@@ -26,13 +26,13 @@ under the License.
 
 ## HLL(HyperLogLog)
 ### description
-    HLL
-    HLL不能作为key列使用,建表时配合聚合类型为HLL_UNION。
-    用户不需要指定长度和默认值。长度根据数据的聚合程度系统内控制。
-    并且HLL列只能通过配套的hll_union_agg、hll_raw_agg、hll_cardinality、hll_hash进行查询或使用。
-    
-    HLL是模糊去重,在数据量大的情况性能优于Count Distinct。
-    HLL的误差通常在1%左右,有时会达到2%。
+HLL
+HLL不能作为key列使用,支持在Aggregate模型、Duplicate模型和Unique模型的表中使用。在Aggregate模型表中使用时,建表时配合的聚合类型为HLL_UNION。
+用户不需要指定长度和默认值。长度根据数据的聚合程度系统内控制。
+并且HLL列只能通过配套的hll_union_agg、hll_raw_agg、hll_cardinality、hll_hash进行查询或使用。
+
+HLL是模糊去重,在数据量大的情况性能优于Count Distinct。
+HLL的误差通常在1%左右,有时会达到2%。
 
 ### example
 
diff --git 
a/docs/zh-CN/docs/sql-manual/sql-reference/Data-Types/QUANTILE_STATE.md 
b/docs/zh-CN/docs/sql-manual/sql-reference/Data-Types/QUANTILE_STATE.md
index b249dbe5e41..c91befee895 100644
--- a/docs/zh-CN/docs/sql-manual/sql-reference/Data-Types/QUANTILE_STATE.md
+++ b/docs/zh-CN/docs/sql-manual/sql-reference/Data-Types/QUANTILE_STATE.md
@@ -28,27 +28,27 @@ under the License.
 ### description
     QUANTILE_STATE
 
-  **在2.0中我们支持了[agg_state](AGG_STATE.md)功能,推荐使用agg_state 
quantile_union(quantile_state not null)来代替本类型。**
+**在2.0中我们支持了[agg_state](AGG_STATE.md)功能,推荐使用agg_state 
quantile_union(quantile_state not null)来代替本类型。**
 
-    QUANTILE_STATE不能作为key列使用,建表时配合聚合类型为QUANTILE_UNION。
-    用户不需要指定长度和默认值。长度根据数据的聚合程度系统内控制。
-    
并且QUANTILE_STATE列只能通过配套的QUANTILE_PERCENT、QUANTILE_UNION、TO_QUANTILE_STATE等函数进行查询或使用。
-    
-    QUANTILE_STATE 是一种计算分位数近似值的类型,在导入时会对相同的key,不同 value 
进行预聚合,当value数量不超过2048时采用明细记录所有数据,当 value 数量大于2048时采用 
[TDigest](https://github.com/tdunning/t-digest/blob/main/docs/t-digest-paper/histo.pdf)
 算法,对数据进行聚合(聚类)保存聚类后的质心点。
+QUANTILE_STATE不能作为key列使用,支持在Aggregate模型、Duplicate模型和Unique模型的表中使用。在Aggregate模型表中使用时,建表时配合的聚合类型为QUANTILE_UNION。
+用户不需要指定长度和默认值。长度根据数据的聚合程度系统内控制。
+并且QUANTILE_STATE列只能通过配套的QUANTILE_PERCENT、QUANTILE_UNION、TO_QUANTILE_STATE等函数进行查询或使用。
 
-    相关函数:
-    
-      QUANTILE_UNION(QUANTILE_STATE):
-      此函数为聚合函数,用于将不同的分位数计算中间结果进行聚合操作。此函数返回的结果仍是QUANTILE_STATE
-    
-      
-      TO_QUANTILE_STATE(DOUBLE raw_data [,FLOAT compression]):
-      此函数将数值类型转化成QUANTILE_STATE类型
-      compression参数是可选项,可设置范围是[2048, 10000],值越大,后续分位数近似计算的精度越高,内存消耗越大,计算耗时越长。 
-      compression参数未指定或设置的值在[2048, 10000]范围外,以2048的默认值运行
-
-      QUANTILE_PERCENT(QUANTILE_STATE, percent):
-      此函数将分位数计算的中间结果变量(QUANTILE_STATE)转化为具体的分位数数值
+QUANTILE_STATE 是一种计算分位数近似值的类型,在导入时会对相同的key,不同 value 
进行预聚合,当value数量不超过2048时采用明细记录所有数据,当 value 数量大于2048时采用 
[TDigest](https://github.com/tdunning/t-digest/blob/main/docs/t-digest-paper/histo.pdf)
 算法,对数据进行聚合(聚类)保存聚类后的质心点。
+
+相关函数:
+
+  QUANTILE_UNION(QUANTILE_STATE):
+  此函数为聚合函数,用于将不同的分位数计算中间结果进行聚合操作。此函数返回的结果仍是QUANTILE_STATE
+
+  
+  TO_QUANTILE_STATE(DOUBLE raw_data [,FLOAT compression]):
+  此函数将数值类型转化成QUANTILE_STATE类型
+  compression参数是可选项,可设置范围是[2048, 10000],值越大,后续分位数近似计算的精度越高,内存消耗越大,计算耗时越长。 
+  compression参数未指定或设置的值在[2048, 10000]范围外,以2048的默认值运行
+
+  QUANTILE_PERCENT(QUANTILE_STATE, percent):
+  此函数将分位数计算的中间结果变量(QUANTILE_STATE)转化为具体的分位数数值
 
     
 
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java 
b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java
index 8ecf6868053..3bd12b64eef 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java
@@ -521,14 +521,6 @@ public class CreateTableStmt extends DdlStmt {
                 throw new AnalysisException("Time type is not supported for 
olap table");
             }
 
-            if (columnDef.getType().isObjectStored()) {
-                if (!columnDef.getType().isBitmapType()) {
-                    if (keysDesc.getKeysType() != KeysType.AGG_KEYS) {
-                        throw new AnalysisException("column:" + 
columnDef.getName() + " must be used in AGG_KEYS.");
-                    }
-                }
-            }
-
             if (!columnSet.add(columnDef.getName())) {
                 
ErrorReport.reportAnalysisException(ErrorCode.ERR_DUP_FIELDNAME, 
columnDef.getName());
             }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/catalog/AggregateType.java 
b/fe/fe-core/src/main/java/org/apache/doris/catalog/AggregateType.java
index f86ea7855eb..47ee91c0b5c 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/AggregateType.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/AggregateType.java
@@ -115,11 +115,8 @@ public enum AggregateType {
         compatibilityMap.put(MAX, EnumSet.copyOf(primitiveTypeList));
 
         primitiveTypeList.clear();
-        // all types except object stored column type, such as bitmap hll
-        // quantile_state.
+        // all types except agg_state.
         EnumSet<PrimitiveType> excObjectStored = 
EnumSet.allOf(PrimitiveType.class);
-        excObjectStored.remove(PrimitiveType.HLL);
-        excObjectStored.remove(PrimitiveType.QUANTILE_STATE);
         excObjectStored.remove(PrimitiveType.AGG_STATE);
         compatibilityMap.put(REPLACE, EnumSet.copyOf(excObjectStored));
 
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/ColumnDefinition.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/ColumnDefinition.java
index 86d48556f87..2736d71a50c 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/ColumnDefinition.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/ColumnDefinition.java
@@ -191,13 +191,7 @@ public class ColumnDefinition {
                 throw new AnalysisException("Type exceeds the maximum nesting 
depth of 9");
             }
         }
-        if (type.isHllType() || type.isQuantileStateType()) {
-            if (aggType == null) {
-                throw new AnalysisException("column: " + name + " must be used 
in AGG_KEYS.");
-            }
-            isNullable = false;
-        }
-        if (type.isBitmapType()) {
+        if (type.isHllType() || type.isQuantileStateType() || 
type.isBitmapType()) {
             if (aggType != null) {
                 isNullable = false;
             }
@@ -349,14 +343,6 @@ public class ColumnDefinition {
         if (type.isTimeLikeType()) {
             throw new AnalysisException("Time type is not supported for olap 
table");
         }
-
-        if (type.isObjectType()) {
-            if (!type.isBitmapType()) {
-                if (keysType != KeysType.AGG_KEYS) {
-                    throw new AnalysisException("column:" + name + " must be 
used in AGG_KEYS.");
-                }
-            }
-        }
     }
 
     /**
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/analysis/CreateTableStmtTest.java 
b/fe/fe-core/src/test/java/org/apache/doris/analysis/CreateTableStmtTest.java
index 539c42dbd7b..30da4033976 100644
--- 
a/fe/fe-core/src/test/java/org/apache/doris/analysis/CreateTableStmtTest.java
+++ 
b/fe/fe-core/src/test/java/org/apache/doris/analysis/CreateTableStmtTest.java
@@ -355,19 +355,6 @@ public class CreateTableStmtTest {
         stmt.analyze(analyzer);
     }
 
-    @Test
-    public void testHllNoAggTab() throws Exception {
-        ColumnDef hll = new ColumnDef("col3", new 
TypeDef(ScalarType.createType(PrimitiveType.HLL)));
-        cols.add(hll);
-        CreateTableStmt stmt = new CreateTableStmt(false, false, tblNameNoDb, 
cols, "olap",
-                        new KeysDesc(KeysType.DUP_KEYS, colsName), null, new 
RandomDistributionDesc(10),
-                                        null, null, "");
-        expectedEx.expect(AnalysisException.class);
-        expectedEx.expectMessage(
-                "Aggregate type `col3` HLL NONE NOT NULL COMMENT \"\" is not 
compatible with primitive type HLL");
-        stmt.analyze(analyzer);
-    }
-
     @Test
     public void testBmpHllIncAgg() throws Exception {
         ColumnDef bitmap = new ColumnDef("col3", new 
TypeDef(ScalarType.createType(PrimitiveType.BITMAP)));
diff --git 
a/regression-test/data/data_model_p0/duplicate/storage/test_duplicate_hll.out 
b/regression-test/data/data_model_p0/duplicate/storage/test_duplicate_hll.out
new file mode 100644
index 00000000000..deeceba2986
--- /dev/null
+++ 
b/regression-test/data/data_model_p0/duplicate/storage/test_duplicate_hll.out
@@ -0,0 +1,113 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !sql --
+1      0
+2      6
+3      2
+
+-- !sql --
+7
+
+-- !from_agg --
+1      0
+2      6
+3      2
+
+-- !from_agg --
+7
+
+-- !from_values --
+1      0
+1      1
+2      1
+2      6
+3      2
+4      1
+
+-- !from_values --
+1      1
+2      6
+3      2
+4      1
+
+-- !from_values --
+8
+
+-- !from_dup --
+1      0
+1      0
+1      1
+1      1
+2      1
+2      1
+2      6
+2      6
+3      2
+3      2
+4      1
+4      1
+
+-- !from_dup --
+1      1
+2      6
+3      2
+4      1
+
+-- !from_dup --
+8
+
+-- !sql --
+1      0
+2      6
+3      2
+
+-- !sql --
+7
+
+-- !from_agg --
+1      0
+2      6
+3      2
+
+-- !from_agg --
+7
+
+-- !from_values --
+1      0
+1      1
+2      1
+2      6
+3      2
+4      1
+
+-- !from_values --
+1      1
+2      6
+3      2
+4      1
+
+-- !from_values --
+8
+
+-- !from_dup --
+1      0
+1      0
+1      1
+1      1
+2      1
+2      1
+2      6
+2      6
+3      2
+3      2
+4      1
+4      1
+
+-- !from_dup --
+1      1
+2      6
+3      2
+4      1
+
+-- !from_dup --
+8
+
diff --git 
a/regression-test/data/data_model_p0/duplicate/storage/test_duplicate_quantile_state.out
 
b/regression-test/data/data_model_p0/duplicate/storage/test_duplicate_quantile_state.out
new file mode 100644
index 00000000000..d75ad2aa66c
--- /dev/null
+++ 
b/regression-test/data/data_model_p0/duplicate/storage/test_duplicate_quantile_state.out
@@ -0,0 +1,85 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !sql --
+1      -1.0    -1.0    -1.0
+2      0.0     0.5     1.0
+3      0.0     1.0     2.0
+
+-- !from_agg --
+1      -1.0    -1.0    -1.0
+2      0.0     0.5     1.0
+3      0.0     1.0     2.0
+
+-- !from_values --
+1      -2.0    -2.0    -2.0
+1      -1.0    -1.0    -1.0
+1      0.0     0.0     0.0
+2      -100.0  -100.0  -100.0
+2      0.0     0.5     1.0
+3      0.0     1.0     2.0
+
+-- !from_values --
+1      -2.0    -1.0    0.0
+2      -100.0  0.0     1.0
+3      0.0     1.0     2.0
+
+-- !from_dup --
+1      -2.0    -2.0    -2.0
+1      -2.0    -2.0    -2.0
+1      -1.0    -1.0    -1.0
+1      -1.0    -1.0    -1.0
+1      0.0     0.0     0.0
+1      0.0     0.0     0.0
+2      -100.0  -100.0  -100.0
+2      -100.0  -100.0  -100.0
+2      0.0     0.5     1.0
+2      0.0     0.5     1.0
+3      0.0     1.0     2.0
+3      0.0     1.0     2.0
+
+-- !from_dup --
+1      -2.0    -1.0    0.0
+2      -100.0  0.0     1.0
+3      0.0     1.0     2.0
+
+-- !sql --
+1      -1.0    -1.0    -1.0
+2      0.0     0.5     1.0
+3      0.0     1.0     2.0
+
+-- !from_agg --
+1      -1.0    -1.0    -1.0
+2      0.0     0.5     1.0
+3      0.0     1.0     2.0
+
+-- !from_values --
+1      -2.0    -2.0    -2.0
+1      -1.0    -1.0    -1.0
+1      0.0     0.0     0.0
+2      -100.0  -100.0  -100.0
+2      0.0     0.5     1.0
+3      0.0     1.0     2.0
+
+-- !from_values --
+1      -2.0    -1.0    0.0
+2      -100.0  0.0     1.0
+3      0.0     1.0     2.0
+
+-- !from_dup --
+1      -2.0    -2.0    -2.0
+1      -2.0    -2.0    -2.0
+1      -1.0    -1.0    -1.0
+1      -1.0    -1.0    -1.0
+1      0.0     0.0     0.0
+1      0.0     0.0     0.0
+2      -100.0  -100.0  -100.0
+2      -100.0  -100.0  -100.0
+2      0.0     0.5     1.0
+2      0.0     0.5     1.0
+3      0.0     1.0     2.0
+3      0.0     1.0     2.0
+
+-- !from_dup --
+1      -2.0    -1.0    0.0
+2      -100.0  0.0     1.0
+3      0.0     1.0     2.0
+
diff --git a/regression-test/data/data_model_p0/unique/test_unique_bitmap.out 
b/regression-test/data/data_model_p0/unique/test_unique_bitmap.out
index 85f559e2387..2240e4201ad 100644
--- a/regression-test/data/data_model_p0/unique/test_unique_bitmap.out
+++ b/regression-test/data/data_model_p0/unique/test_unique_bitmap.out
@@ -19,3 +19,23 @@
 2      2       3,1000
 3      8       0,1,2,3,5,99,876,2445
 
+-- !sql --
+1      1       1
+2      2       3,1000
+3      3       999,1000,888888
+
+-- !sql --
+1      4       5,90,876,1000
+2      2       3,1000
+3      8       0,1,2,3,5,99,876,2445
+
+-- !sql --
+1      1       1
+2      2       3,1000
+3      3       999,1000,888888
+
+-- !sql --
+1      4       5,90,876,1000
+2      2       3,1000
+3      8       0,1,2,3,5,99,876,2445
+
diff --git a/regression-test/data/data_model_p0/unique/test_unique_hll.out 
b/regression-test/data/data_model_p0/unique/test_unique_hll.out
new file mode 100644
index 00000000000..c6bc798fa3d
--- /dev/null
+++ b/regression-test/data/data_model_p0/unique/test_unique_hll.out
@@ -0,0 +1,185 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !sql --
+1      0
+2      6
+3      2
+
+-- !sql --
+7
+
+-- !from_agg --
+1      0
+2      6
+3      2
+
+-- !from_agg --
+7
+
+-- !from_values --
+1      1
+2      1
+3      2
+4      1
+
+-- !from_values --
+1      1
+2      1
+3      2
+4      1
+
+-- !from_values --
+4
+
+-- !from_uniq --
+1      1
+2      1
+3      2
+4      1
+
+-- !from_uniq --
+1      1
+2      1
+3      2
+4      1
+
+-- !from_uniq --
+4
+
+-- !sql --
+1      0
+2      6
+3      2
+
+-- !sql --
+7
+
+-- !from_agg --
+1      0
+2      6
+3      2
+
+-- !from_agg --
+7
+
+-- !from_values --
+1      1
+2      1
+3      2
+4      1
+
+-- !from_values --
+1      1
+2      1
+3      2
+4      1
+
+-- !from_values --
+4
+
+-- !from_uniq --
+1      1
+2      1
+3      2
+4      1
+
+-- !from_uniq --
+1      1
+2      1
+3      2
+4      1
+
+-- !from_uniq --
+4
+
+-- !sql --
+1      0
+2      6
+3      2
+
+-- !sql --
+7
+
+-- !from_agg --
+1      0
+2      6
+3      2
+
+-- !from_agg --
+7
+
+-- !from_values --
+1      1
+2      1
+3      2
+4      1
+
+-- !from_values --
+1      1
+2      1
+3      2
+4      1
+
+-- !from_values --
+4
+
+-- !from_uniq --
+1      1
+2      1
+3      2
+4      1
+
+-- !from_uniq --
+1      1
+2      1
+3      2
+4      1
+
+-- !from_uniq --
+4
+
+-- !sql --
+1      0
+2      6
+3      2
+
+-- !sql --
+7
+
+-- !from_agg --
+1      0
+2      6
+3      2
+
+-- !from_agg --
+7
+
+-- !from_values --
+1      1
+2      1
+3      2
+4      1
+
+-- !from_values --
+1      1
+2      1
+3      2
+4      1
+
+-- !from_values --
+4
+
+-- !from_uniq --
+1      1
+2      1
+3      2
+4      1
+
+-- !from_uniq --
+1      1
+2      1
+3      2
+4      1
+
+-- !from_uniq --
+4
+
diff --git 
a/regression-test/data/data_model_p0/unique/test_unique_quantile_state.out 
b/regression-test/data/data_model_p0/unique/test_unique_quantile_state.out
new file mode 100644
index 00000000000..280ea0a2cc1
--- /dev/null
+++ b/regression-test/data/data_model_p0/unique/test_unique_quantile_state.out
@@ -0,0 +1,121 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !sql --
+1      -1.0    -1.0    -1.0
+2      0.0     0.5     1.0
+3      0.0     1.0     2.0
+
+-- !from_agg --
+1      -1.0    -1.0    -1.0
+2      0.0     0.5     1.0
+3      0.0     1.0     2.0
+
+-- !from_values --
+1      0.0     0.0     0.0
+2      -100.0  -100.0  -100.0
+3      0.0     1.0     2.0
+
+-- !from_values --
+1      0.0     0.0     0.0
+2      -100.0  -100.0  -100.0
+3      0.0     1.0     2.0
+
+-- !from_uniq --
+1      0.0     0.0     0.0
+2      -100.0  -100.0  -100.0
+3      0.0     1.0     2.0
+
+-- !from_uniq --
+1      0.0     0.0     0.0
+2      -100.0  -100.0  -100.0
+3      0.0     1.0     2.0
+
+-- !sql --
+1      -1.0    -1.0    -1.0
+2      0.0     0.5     1.0
+3      0.0     1.0     2.0
+
+-- !from_agg --
+1      -1.0    -1.0    -1.0
+2      0.0     0.5     1.0
+3      0.0     1.0     2.0
+
+-- !from_values --
+1      0.0     0.0     0.0
+2      -100.0  -100.0  -100.0
+3      0.0     1.0     2.0
+
+-- !from_values --
+1      0.0     0.0     0.0
+2      -100.0  -100.0  -100.0
+3      0.0     1.0     2.0
+
+-- !from_uniq --
+1      0.0     0.0     0.0
+2      -100.0  -100.0  -100.0
+3      0.0     1.0     2.0
+
+-- !from_uniq --
+1      0.0     0.0     0.0
+2      -100.0  -100.0  -100.0
+3      0.0     1.0     2.0
+
+-- !sql --
+1      -1.0    -1.0    -1.0
+2      0.0     0.5     1.0
+3      0.0     1.0     2.0
+
+-- !from_agg --
+1      -1.0    -1.0    -1.0
+2      0.0     0.5     1.0
+3      0.0     1.0     2.0
+
+-- !from_values --
+1      0.0     0.0     0.0
+2      -100.0  -100.0  -100.0
+3      0.0     1.0     2.0
+
+-- !from_values --
+1      0.0     0.0     0.0
+2      -100.0  -100.0  -100.0
+3      0.0     1.0     2.0
+
+-- !from_uniq --
+1      0.0     0.0     0.0
+2      -100.0  -100.0  -100.0
+3      0.0     1.0     2.0
+
+-- !from_uniq --
+1      0.0     0.0     0.0
+2      -100.0  -100.0  -100.0
+3      0.0     1.0     2.0
+
+-- !sql --
+1      -1.0    -1.0    -1.0
+2      0.0     0.5     1.0
+3      0.0     1.0     2.0
+
+-- !from_agg --
+1      -1.0    -1.0    -1.0
+2      0.0     0.5     1.0
+3      0.0     1.0     2.0
+
+-- !from_values --
+1      0.0     0.0     0.0
+2      -100.0  -100.0  -100.0
+3      0.0     1.0     2.0
+
+-- !from_values --
+1      0.0     0.0     0.0
+2      -100.0  -100.0  -100.0
+3      0.0     1.0     2.0
+
+-- !from_uniq --
+1      0.0     0.0     0.0
+2      -100.0  -100.0  -100.0
+3      0.0     1.0     2.0
+
+-- !from_uniq --
+1      0.0     0.0     0.0
+2      -100.0  -100.0  -100.0
+3      0.0     1.0     2.0
+
diff --git 
a/regression-test/suites/data_model_p0/duplicate/storage/test_duplicate_bitmap.groovy
 
b/regression-test/suites/data_model_p0/duplicate/storage/test_duplicate_bitmap.groovy
index 9dc9052a481..36586b7dc30 100644
--- 
a/regression-test/suites/data_model_p0/duplicate/storage/test_duplicate_bitmap.groovy
+++ 
b/regression-test/suites/data_model_p0/duplicate/storage/test_duplicate_bitmap.groovy
@@ -17,73 +17,52 @@
 
 suite("test_duplicate_table_bitmap") {
 
-    // 1. nerieds planner
-    sql "set enable_nereids_planner=true"
-       sql "set enable_fallback_to_original_planner=false"
-    sql "set enable_nereids_dml=true;"
-    sql "sync;"
-    def tbName = "test_duplicate_bitmap1"
-    sql "DROP TABLE IF EXISTS ${tbName}"
-    sql """ CREATE TABLE IF NOT EXISTS ${tbName} (
-                k int,
-                id_bitmap bitmap
-            ) DUPLICATE KEY(k)
-            DISTRIBUTED BY HASH(k) BUCKETS 1 properties("replication_num" = 
"1"); """
-    sql "insert into ${tbName} values(1,to_bitmap(1));"
-    sql "insert into ${tbName} 
values(2,bitmap_or(to_bitmap(3),to_bitmap(1000)));"
-    sql "insert into ${tbName} 
values(3,bitmap_or(to_bitmap(999),to_bitmap(1000),to_bitmap(888888)));"
-    qt_sql "select k,bitmap_count(id_bitmap),bitmap_to_string(id_bitmap) from 
${tbName} order by k, bitmap_count(id_bitmap);"
-    sql "insert into ${tbName} 
values(3,bitmap_from_string('1,0,1,2,3,1,5,99,876,2445'));"
-    sql "insert into ${tbName} 
values(1,bitmap_or(bitmap_from_string('90,5,876'),to_bitmap(1000)));"
-    qt_sql "select k,bitmap_count(id_bitmap),bitmap_to_string(id_bitmap) from 
${tbName} order by k, bitmap_count(id_bitmap);"
-    sql "insert into ${tbName} select * from ${tbName};"
-    qt_sql "select k,bitmap_count(id_bitmap),bitmap_to_string(id_bitmap) from 
${tbName} order by k, bitmap_count(id_bitmap);"
-    sql "DROP TABLE IF EXISTS ${tbName};"
+    for (def use_nereids : [true, false]) {
+        if (use_nereids) {
+            sql "set enable_nereids_planner=true"
+            sql "set enable_fallback_to_original_planner=false"
+            sql "set enable_nereids_dml=true;"
+        } else {
+            sql "set enable_nereids_planner=false"
+            sql "set enable_nereids_dml=false;"
+        }
+        sql "sync;"
 
-    tbName = "test_duplicate_bitmap2"
-    sql "DROP TABLE IF EXISTS ${tbName}"
-    test {
+        def tbName = "test_duplicate_bitmap1"
+        sql "DROP TABLE IF EXISTS ${tbName}"
         sql """ CREATE TABLE IF NOT EXISTS ${tbName} (
-                k bitmap,
-                v int
-            ) DUPLICATE KEY(k)
-            DISTRIBUTED BY HASH(k) BUCKETS 1 properties("replication_num" = 
"1"); """
-        exception "Key column can not set complex type:k"
-    }
+                    k int,
+                    id_bitmap bitmap
+                ) DUPLICATE KEY(k)
+                DISTRIBUTED BY HASH(k) BUCKETS 1 properties("replication_num" 
= "1"); """
 
+        def result = sql "show create table ${tbName}"
+        logger.info("${result}")
+        assertTrue(result.toString().containsIgnoreCase('`id_bitmap` BITMAP 
NOT NULL'))
 
-    // 2. origin planner
-    sql "set enable_nereids_planner=false"
-    sql "set enable_nereids_dml=false;"
-    sql "sync;"
-    tbName = "test_duplicate_bitmap3"
-    sql "DROP TABLE IF EXISTS ${tbName}"
-    sql """ CREATE TABLE IF NOT EXISTS ${tbName} (
-                k int,
-                id_bitmap bitmap
-            ) DUPLICATE KEY(k)
-            DISTRIBUTED BY HASH(k) BUCKETS 1 properties("replication_num" = 
"1"); """
-    sql "insert into ${tbName} values(1,to_bitmap(1));"
-    sql "insert into ${tbName} 
values(2,bitmap_or(to_bitmap(3),to_bitmap(1000)));"
-    sql "insert into ${tbName} 
values(3,bitmap_or(to_bitmap(999),to_bitmap(1000),to_bitmap(888888)));"
-    qt_sql "select k,bitmap_count(id_bitmap),bitmap_to_string(id_bitmap) from 
${tbName} order by k, bitmap_count(id_bitmap);"
-    sql "insert into ${tbName} 
values(3,bitmap_from_string('1,0,1,2,3,1,5,99,876,2445'));"
-    sql "insert into ${tbName} 
values(1,bitmap_or(bitmap_from_string('90,5,876'),to_bitmap(1000)));"
-    qt_sql "select k,bitmap_count(id_bitmap),bitmap_to_string(id_bitmap) from 
${tbName} order by k, bitmap_count(id_bitmap);"
-    sql "insert into ${tbName} select * from ${tbName};"
-    qt_sql "select k,bitmap_count(id_bitmap),bitmap_to_string(id_bitmap) from 
${tbName} order by k, bitmap_count(id_bitmap);"
-    sql "DROP TABLE IF EXISTS ${tbName};"
+        sql "insert into ${tbName} values(1,to_bitmap(1));"
+        sql "insert into ${tbName} 
values(2,bitmap_or(to_bitmap(3),to_bitmap(1000)));"
+        sql "insert into ${tbName} 
values(3,bitmap_or(to_bitmap(999),to_bitmap(1000),to_bitmap(888888)));"
+        qt_sql "select k,bitmap_count(id_bitmap),bitmap_to_string(id_bitmap) 
from ${tbName} order by k, bitmap_count(id_bitmap);"
 
+        sql "insert into ${tbName} 
values(3,bitmap_from_string('1,0,1,2,3,1,5,99,876,2445'));"
+        sql "insert into ${tbName} 
values(1,bitmap_or(bitmap_from_string('90,5,876'),to_bitmap(1000)));"
+        qt_sql "select k,bitmap_count(id_bitmap),bitmap_to_string(id_bitmap) 
from ${tbName} order by k, bitmap_count(id_bitmap);"
 
-    tbName = "test_duplicate_bitmap4"
-    sql "DROP TABLE IF EXISTS ${tbName}"
-    test {
-        sql """ CREATE TABLE IF NOT EXISTS ${tbName} (
-                k bitmap,
-                v int
-            ) DUPLICATE KEY(k)
-            DISTRIBUTED BY HASH(k) BUCKETS 1 properties("replication_num" = 
"1"); """
-        exception "Key column can not set complex type:k"
-    }
+        sql "insert into ${tbName} select * from ${tbName};"
+        qt_sql "select k,bitmap_count(id_bitmap),bitmap_to_string(id_bitmap) 
from ${tbName} order by k, bitmap_count(id_bitmap);"
 
+        sql "DROP TABLE IF EXISTS ${tbName};"
+
+        tbName = "test_duplicate_bitmap2"
+        sql "DROP TABLE IF EXISTS ${tbName}"
+        test {
+            sql """ CREATE TABLE IF NOT EXISTS ${tbName} (
+                    k bitmap,
+                    v int
+                ) DUPLICATE KEY(k)
+                DISTRIBUTED BY HASH(k) BUCKETS 1 properties("replication_num" 
= "1"); """
+            exception "Key column can not set complex type:k"
+        }
+    }
 }
diff --git 
a/regression-test/suites/data_model_p0/duplicate/storage/test_duplicate_hll.groovy
 
b/regression-test/suites/data_model_p0/duplicate/storage/test_duplicate_hll.groovy
new file mode 100644
index 00000000000..5185d5c93ca
--- /dev/null
+++ 
b/regression-test/suites/data_model_p0/duplicate/storage/test_duplicate_hll.groovy
@@ -0,0 +1,81 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_duplicate_table_hll") {
+
+    for (def use_nereids : [true, false]) {
+        if (use_nereids) {
+            sql "set enable_nereids_planner=true"
+            sql "set enable_fallback_to_original_planner=false"
+            sql "set enable_nereids_dml=true;"
+        } else {
+            sql "set enable_nereids_planner=false"
+            sql "set enable_nereids_dml=false;"
+        }
+        sql "sync;"
+
+        def tbName = "test_duplicate_hll1"
+        sql "DROP TABLE IF EXISTS ${tbName}"
+        sql """ CREATE TABLE IF NOT EXISTS ${tbName} ( k int, v hll ) 
DUPLICATE KEY(k)
+                DISTRIBUTED BY HASH(k) BUCKETS 1 properties("replication_num" 
= "1"); """
+
+        def result = sql "show create table ${tbName}"
+        logger.info("${result}")
+        assertTrue(result.toString().containsIgnoreCase('`v` HLL NOT NULL'))
+
+        def tbNameAgg = "test_duplicate_hll_agg1"
+        sql "DROP TABLE IF EXISTS ${tbNameAgg}"
+        sql """ CREATE TABLE IF NOT EXISTS ${tbNameAgg} ( k int, v hll 
hll_union ) AGGREGATE KEY(k)
+                DISTRIBUTED BY HASH(k) BUCKETS 1 properties("replication_num" 
= "1"); """
+
+        sql """ insert into ${tbNameAgg} values
+                (1,hll_empty()),(2, hll_hash(100)),
+                (2,hll_hash(0)),(2, hll_hash(4875)),
+                (2,hll_hash(9234)),(2, hll_hash(45)),
+                (2,hll_hash(0)),(2,hll_hash(100000)),
+                (3,hll_hash(0)),(3,hll_hash(1)); """
+        qt_sql "select k, hll_cardinality(v) from ${tbNameAgg} order by k;"
+        qt_sql "select HLL_UNION_AGG(v) from ${tbNameAgg};"
+
+        // 1. insert from aggregate table
+        sql "insert into ${tbName} select * from ${tbNameAgg};"
+        qt_from_agg "select k, hll_cardinality(v) from ${tbName} order by k, 
hll_cardinality(v);"
+        qt_from_agg "select HLL_UNION_AGG(v) from ${tbName};"
+        // 2. insert into values
+        sql """ insert into ${tbName} values (4, hll_hash(100)), (1, 
hll_hash(999)), (2, hll_hash(0));"""
+        qt_from_values "select k, hll_cardinality(v) from ${tbName} order by 
k, hll_cardinality(v);"
+        qt_from_values "select k, hll_cardinality(hll_union(v)) from ${tbName} 
group by k order by k, hll_cardinality(hll_union(v));"
+        qt_from_values "select HLL_UNION_AGG(v) from ${tbName};"
+        // 3. insert from duplicate table
+        sql "insert into ${tbName} select * from ${tbName};"
+        qt_from_dup "select k, hll_cardinality(v) from ${tbName} order by k, 
hll_cardinality(v);"
+        qt_from_dup "select k, hll_cardinality(hll_union(v)) from ${tbName} 
group by k order by k, hll_cardinality(hll_union(v));"
+        qt_from_dup "select HLL_UNION_AGG(v) from ${tbName};"
+
+        sql "DROP TABLE IF EXISTS ${tbName};"
+        sql "DROP TABLE IF EXISTS ${tbNameAgg};"
+
+
+        tbName = "test_duplicate_hll3"
+        sql "DROP TABLE IF EXISTS ${tbName}"
+        test {
+            sql """ CREATE TABLE IF NOT EXISTS ${tbName} (k hll, v int) 
DUPLICATE KEY(k)
+                DISTRIBUTED BY HASH(k) BUCKETS 1 properties("replication_num" 
= "1"); """
+            exception "Key column can not set complex type:k"
+        }
+    }
+}
diff --git 
a/regression-test/suites/data_model_p0/duplicate/storage/test_duplicate_quantile_state.groovy
 
b/regression-test/suites/data_model_p0/duplicate/storage/test_duplicate_quantile_state.groovy
new file mode 100644
index 00000000000..b83c37c1817
--- /dev/null
+++ 
b/regression-test/suites/data_model_p0/duplicate/storage/test_duplicate_quantile_state.groovy
@@ -0,0 +1,77 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_duplicate_table_quantile_state") {
+
+    for (def use_nereids : [true, false]) {
+        if (use_nereids) {
+            sql "set enable_nereids_planner=true"
+            sql "set enable_fallback_to_original_planner=false"
+            sql "set enable_nereids_dml=true;"
+        } else {
+            sql "set enable_nereids_planner=false"
+            sql "set enable_nereids_dml=false;"
+        }
+        sql "sync;"
+
+        def tbName = "test_duplicate_quantile_state1"
+        sql "DROP TABLE IF EXISTS ${tbName}"
+        sql """ CREATE TABLE IF NOT EXISTS ${tbName} ( k int, v QUANTILE_STATE 
) DUPLICATE KEY(k)
+                DISTRIBUTED BY HASH(k) BUCKETS 1 properties("replication_num" 
= "1"); """
+
+        def result = sql "show create table ${tbName}"
+        logger.info("${result}")
+        assertTrue(result.toString().containsIgnoreCase('`v` QUANTILE_STATE 
NOT NULL'))
+
+        def tbNameAgg = "test_duplicate_quantile_state_agg1"
+        sql "DROP TABLE IF EXISTS ${tbNameAgg}"
+        sql """ CREATE TABLE IF NOT EXISTS ${tbNameAgg} ( k int, v 
QUANTILE_STATE QUANTILE_UNION NOT NULL ) AGGREGATE KEY(k)
+                DISTRIBUTED BY HASH(k) BUCKETS 1 properties("replication_num" 
= "1"); """
+
+        sql """ insert into ${tbNameAgg} values
+                (1,to_quantile_state(-1, 2048)),(2,to_quantile_state(0, 2048)),
+                (2,to_quantile_state(1, 2048)),(3,to_quantile_state(0, 2048)),
+                (3,to_quantile_state(1, 2048)),(3,to_quantile_state(2, 
2048));"""
+        qt_sql "select k, quantile_percent(v, 0), quantile_percent(v, 0.5), 
quantile_percent(v, 1) from ${tbNameAgg} order by k;"
+
+        // 1. insert from aggregate table
+        sql "insert into ${tbName} select * from ${tbNameAgg};"
+        qt_from_agg "select k, quantile_percent(v, 0) c1, quantile_percent(v, 
0.5) c2, quantile_percent(v, 1) c3 from ${tbName} order by k, c1, c2, c3;"
+        // 2. insert into values
+        sql """ insert into ${tbName} values (1, to_quantile_state(-2, 2048)), 
(1, to_quantile_state(0, 2048)), (2, to_quantile_state(-100, 2048));"""
+        qt_from_values "select k, quantile_percent(v, 0) c1, 
quantile_percent(v, 0.5) c2, quantile_percent(v, 1) c3 from ${tbName} order by 
k, c1, c2, c3;"
+        qt_from_values """ select k, quantile_percent(QUANTILE_UNION(v), 0) 
c1, quantile_percent(QUANTILE_UNION(v), 0.5) c2, 
quantile_percent(QUANTILE_UNION(v), 1) c3
+                from ${tbName} group by k order by k, c1, c2, c3; """
+        // 3. insert from duplicate table
+        sql "insert into ${tbName} select * from ${tbName};"
+        qt_from_dup "select k, quantile_percent(v, 0) c1, quantile_percent(v, 
0.5) c2, quantile_percent(v, 1) c3 from ${tbName} order by k, c1, c2, c3;"
+        qt_from_dup """ select k, quantile_percent(QUANTILE_UNION(v), 0) c1, 
quantile_percent(QUANTILE_UNION(v), 0.5) c2, 
quantile_percent(QUANTILE_UNION(v), 1) c3
+                from ${tbName} group by k order by k, c1, c2, c3; """
+
+        sql "DROP TABLE IF EXISTS ${tbName};"
+        sql "DROP TABLE IF EXISTS ${tbNameAgg};"
+
+
+        tbName = "test_duplicate_quantile_state3"
+        sql "DROP TABLE IF EXISTS ${tbName}"
+        test {
+            sql """ CREATE TABLE IF NOT EXISTS ${tbName} (k QUANTILE_STATE, v 
int) DUPLICATE KEY(k)
+                DISTRIBUTED BY HASH(k) BUCKETS 1 properties("replication_num" 
= "1"); """
+            exception "Key column can not set complex type:k"
+        }
+    }
+}
diff --git 
a/regression-test/suites/data_model_p0/unique/test_unique_bitmap.groovy 
b/regression-test/suites/data_model_p0/unique/test_unique_bitmap.groovy
index 64cf809eeaa..a0fec954cb7 100644
--- a/regression-test/suites/data_model_p0/unique/test_unique_bitmap.groovy
+++ b/regression-test/suites/data_model_p0/unique/test_unique_bitmap.groovy
@@ -16,40 +16,43 @@
 // under the License.
 
 suite("test_unique_table_bitmap") {
-    def tbName = "test_uniq_table_bitmap1"
-    sql "DROP TABLE IF EXISTS ${tbName}"
-    sql """
-            CREATE TABLE IF NOT EXISTS ${tbName} (
-                k int,
-                id_bitmap bitmap
-            ) UNIQUE KEY(k)
-            DISTRIBUTED BY HASH(k) BUCKETS 1 properties("replication_num" = 
"1");
-        """
-    sql "insert into ${tbName} values(1,to_bitmap(1));"
-    sql "insert into ${tbName} 
values(2,bitmap_or(to_bitmap(3),to_bitmap(1000)));"
-    sql "insert into ${tbName} 
values(3,bitmap_or(to_bitmap(999),to_bitmap(1000),to_bitmap(888888)));"
-    qt_sql "select k,bitmap_count(id_bitmap),bitmap_to_string(id_bitmap) from 
${tbName} order by k;"
-    sql "insert into ${tbName} 
values(3,bitmap_from_string('1,0,1,2,3,1,5,99,876,2445'));"
-    sql "insert into ${tbName} 
values(1,bitmap_or(bitmap_from_string('90,5,876'),to_bitmap(1000)));"
-    qt_sql "select k,bitmap_count(id_bitmap),bitmap_to_string(id_bitmap) from 
${tbName} order by k;"
-    sql "DROP TABLE ${tbName};"
 
-    def tbName2 = "test_uniq_table_bitmap2"
-    sql "DROP TABLE IF EXISTS ${tbName2}"
-    sql """
-            CREATE TABLE IF NOT EXISTS ${tbName2} (
-                k int,
-                id_bitmap bitmap
-            ) UNIQUE KEY(k)
-            DISTRIBUTED BY HASH(k) BUCKETS 1 
-            properties("replication_num" = "1", 
"enable_unique_key_merge_on_write" = "true");
-        """
-    sql "insert into ${tbName2} values(1,to_bitmap(1));"
-    sql "insert into ${tbName2} 
values(2,bitmap_or(to_bitmap(3),to_bitmap(1000)));"
-    sql "insert into ${tbName2} 
values(3,bitmap_or(to_bitmap(999),to_bitmap(1000),to_bitmap(888888)));"
-    qt_sql "select k,bitmap_count(id_bitmap),bitmap_to_string(id_bitmap) from 
${tbName2} order by k;"
-    sql "insert into ${tbName2} 
values(3,bitmap_from_string('1,0,1,2,3,1,5,99,876,2445'));"
-    sql "insert into ${tbName2} 
values(1,bitmap_or(bitmap_from_string('90,5,876'),to_bitmap(1000)));"
-    qt_sql "select k,bitmap_count(id_bitmap),bitmap_to_string(id_bitmap) from 
${tbName2} order by k;"
-    sql "DROP TABLE ${tbName2};"
+    for (def enable_mow : [true, false]) {
+        for (def use_nereids : [true, false]) {
+            if (use_nereids) {
+                sql "set enable_nereids_planner=true"
+                sql "set enable_fallback_to_original_planner=false"
+                sql "set enable_nereids_dml=true;"
+            } else {
+                sql "set enable_nereids_planner=false"
+                sql "set enable_nereids_dml=false;"
+            }
+            sql "sync;"
+
+            def tbName = "test_uniq_table_bitmap"
+            sql "DROP TABLE IF EXISTS ${tbName}"
+            sql """
+                    CREATE TABLE IF NOT EXISTS ${tbName} (
+                        k int,
+                        id_bitmap bitmap
+                    ) UNIQUE KEY(k)
+                    DISTRIBUTED BY HASH(k) BUCKETS 1 
properties("replication_num" = "1", "enable_unique_key_merge_on_write" = 
"${enable_mow}");
+                """
+
+            def result = sql "show create table ${tbName}"
+            logger.info("${result}")
+            assertTrue(result.toString().containsIgnoreCase('`id_bitmap` 
BITMAP NOT NULL'))
+
+            sql "insert into ${tbName} values(1,to_bitmap(1));"
+            sql "insert into ${tbName} 
values(2,bitmap_or(to_bitmap(3),to_bitmap(1000)));"
+            sql "insert into ${tbName} 
values(3,bitmap_or(to_bitmap(999),to_bitmap(1000),to_bitmap(888888)));"
+            qt_sql "select 
k,bitmap_count(id_bitmap),bitmap_to_string(id_bitmap) from ${tbName} order by 
k;"
+
+            sql "insert into ${tbName} 
values(3,bitmap_from_string('1,0,1,2,3,1,5,99,876,2445'));"
+            sql "insert into ${tbName} 
values(1,bitmap_or(bitmap_from_string('90,5,876'),to_bitmap(1000)));"
+            qt_sql "select 
k,bitmap_count(id_bitmap),bitmap_to_string(id_bitmap) from ${tbName} order by 
k;"
+
+            sql "DROP TABLE ${tbName};"
+        }
+    }
 }
diff --git a/regression-test/suites/data_model_p0/unique/test_unique_hll.groovy 
b/regression-test/suites/data_model_p0/unique/test_unique_hll.groovy
new file mode 100644
index 00000000000..a4551e0ccff
--- /dev/null
+++ b/regression-test/suites/data_model_p0/unique/test_unique_hll.groovy
@@ -0,0 +1,83 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_unique_table_hll") {
+
+    for (def enable_mow : [true, false]) {
+        for (def use_nereids : [true, false]) {
+            if (use_nereids) {
+                sql "set enable_nereids_planner=true"
+                sql "set enable_fallback_to_original_planner=false"
+                sql "set enable_nereids_dml=true;"
+            } else {
+                sql "set enable_nereids_planner=false"
+                sql "set enable_nereids_dml=false;"
+            }
+            sql "sync;"
+
+            def tbName = "test_unique_hll1"
+            sql "DROP TABLE IF EXISTS ${tbName}"
+            sql """ CREATE TABLE IF NOT EXISTS ${tbName} ( k int, v hll ) 
UNIQUE KEY(k)
+                    DISTRIBUTED BY HASH(k) BUCKETS 1 
properties("replication_num" = "1", "enable_unique_key_merge_on_write" = 
"${enable_mow}"); """
+
+            def result = sql "show create table ${tbName}"
+            logger.info("${result}")
+            assertTrue(result.toString().containsIgnoreCase('`v` HLL NOT 
NULL'))
+
+            def tbNameAgg = "test_unique_hll_agg1"
+            sql "DROP TABLE IF EXISTS ${tbNameAgg}"
+            sql """ CREATE TABLE IF NOT EXISTS ${tbNameAgg} ( k int, v hll 
hll_union ) AGGREGATE KEY(k)
+                    DISTRIBUTED BY HASH(k) BUCKETS 1 
properties("replication_num" = "1"); """
+
+            sql """ insert into ${tbNameAgg} values
+                    (1,hll_empty()),(2, hll_hash(100)),
+                    (2,hll_hash(0)),(2, hll_hash(4875)),
+                    (2,hll_hash(9234)),(2, hll_hash(45)),
+                    (2,hll_hash(0)),(2,hll_hash(100000)),
+                    (3,hll_hash(0)),(3,hll_hash(1)); """
+            qt_sql "select k, hll_cardinality(v) from ${tbNameAgg} order by k;"
+            qt_sql "select HLL_UNION_AGG(v) from ${tbNameAgg};"
+
+            // 1. insert from aggregate table
+            sql "insert into ${tbName} select * from ${tbNameAgg};"
+            qt_from_agg "select k, hll_cardinality(v) from ${tbName} order by 
k, hll_cardinality(v);"
+            qt_from_agg "select HLL_UNION_AGG(v) from ${tbName};"
+            // 2. insert into values
+            sql """ insert into ${tbName} values (4, hll_hash(100)), (1, 
hll_hash(999)), (2, hll_hash(0));"""
+            qt_from_values "select k, hll_cardinality(v) from ${tbName} order 
by k, hll_cardinality(v);"
+            qt_from_values "select k, hll_cardinality(hll_union(v)) from 
${tbName} group by k order by k, hll_cardinality(hll_union(v));"
+            qt_from_values "select HLL_UNION_AGG(v) from ${tbName};"
+            // 3. insert from UNIQUE table
+            sql "insert into ${tbName} select * from ${tbName};"
+            qt_from_uniq "select k, hll_cardinality(v) from ${tbName} order by 
k, hll_cardinality(v);"
+            qt_from_uniq "select k, hll_cardinality(hll_union(v)) from 
${tbName} group by k order by k, hll_cardinality(hll_union(v));"
+            qt_from_uniq "select HLL_UNION_AGG(v) from ${tbName};"
+
+            sql "DROP TABLE IF EXISTS ${tbName};"
+            sql "DROP TABLE IF EXISTS ${tbNameAgg};"
+
+
+            tbName = "test_unique_hll3"
+            sql "DROP TABLE IF EXISTS ${tbName}"
+            test {
+                sql """ CREATE TABLE IF NOT EXISTS ${tbName} (k hll, v int) 
UNIQUE KEY(k)
+                    DISTRIBUTED BY HASH(k) BUCKETS 1 
properties("replication_num" = "1"); """
+                exception "Key column can not set complex type:k"
+            }
+        }
+    }
+}
diff --git 
a/regression-test/suites/data_model_p0/unique/test_unique_quantile_state.groovy 
b/regression-test/suites/data_model_p0/unique/test_unique_quantile_state.groovy
new file mode 100644
index 00000000000..f7eeed8d82d
--- /dev/null
+++ 
b/regression-test/suites/data_model_p0/unique/test_unique_quantile_state.groovy
@@ -0,0 +1,79 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_unique_table_quantile_state") {
+
+    for (def enable_mow : [true, false]) {
+        for (def use_nereids : [true, false]) {
+            if (use_nereids) {
+                sql "set enable_nereids_planner=true"
+                sql "set enable_fallback_to_original_planner=false"
+                sql "set enable_nereids_dml=true;"
+            } else {
+                sql "set enable_nereids_planner=false"
+                sql "set enable_nereids_dml=false;"
+            }
+            sql "sync;"
+
+            def tbName = "test_unique_quantile_state1"
+            sql "DROP TABLE IF EXISTS ${tbName}"
+            sql """ CREATE TABLE IF NOT EXISTS ${tbName} ( k int, v 
QUANTILE_STATE ) UNIQUE KEY(k)
+                    DISTRIBUTED BY HASH(k) BUCKETS 1 
properties("replication_num" = "1", "enable_unique_key_merge_on_write" = 
"${enable_mow}"); """
+
+            def result = sql "show create table ${tbName}"
+            logger.info("${result}")
+            assertTrue(result.toString().containsIgnoreCase('`v` 
QUANTILE_STATE NOT NULL'))
+
+            def tbNameAgg = "test_unique_quantile_state_agg1"
+            sql "DROP TABLE IF EXISTS ${tbNameAgg}"
+            sql """ CREATE TABLE IF NOT EXISTS ${tbNameAgg} ( k int, v 
QUANTILE_STATE QUANTILE_UNION NOT NULL ) AGGREGATE KEY(k)
+                    DISTRIBUTED BY HASH(k) BUCKETS 1 
properties("replication_num" = "1"); """
+
+            sql """ insert into ${tbNameAgg} values
+                    (1,to_quantile_state(-1, 2048)),(2,to_quantile_state(0, 
2048)),
+                    (2,to_quantile_state(1, 2048)),(3,to_quantile_state(0, 
2048)),
+                    (3,to_quantile_state(1, 2048)),(3,to_quantile_state(2, 
2048));"""
+            qt_sql "select k, quantile_percent(v, 0), quantile_percent(v, 
0.5), quantile_percent(v, 1) from ${tbNameAgg} order by k;"
+
+            // 1. insert from aggregate table
+            sql "insert into ${tbName} select * from ${tbNameAgg};"
+            qt_from_agg "select k, quantile_percent(v, 0) c1, 
quantile_percent(v, 0.5) c2, quantile_percent(v, 1) c3 from ${tbName} order by 
k, c1, c2, c3;"
+            // 2. insert into values
+            sql """ insert into ${tbName} values (1, to_quantile_state(-2, 
2048)), (1, to_quantile_state(0, 2048)), (2, to_quantile_state(-100, 2048));"""
+            qt_from_values "select k, quantile_percent(v, 0) c1, 
quantile_percent(v, 0.5) c2, quantile_percent(v, 1) c3 from ${tbName} order by 
k, c1, c2, c3;"
+            qt_from_values """ select k, quantile_percent(QUANTILE_UNION(v), 
0) c1, quantile_percent(QUANTILE_UNION(v), 0.5) c2, 
quantile_percent(QUANTILE_UNION(v), 1) c3
+                    from ${tbName} group by k order by k, c1, c2, c3; """
+            // 3. insert from UNIQUE table
+            sql "insert into ${tbName} select * from ${tbName};"
+            qt_from_uniq "select k, quantile_percent(v, 0) c1, 
quantile_percent(v, 0.5) c2, quantile_percent(v, 1) c3 from ${tbName} order by 
k, c1, c2, c3;"
+            qt_from_uniq """ select k, quantile_percent(QUANTILE_UNION(v), 0) 
c1, quantile_percent(QUANTILE_UNION(v), 0.5) c2, 
quantile_percent(QUANTILE_UNION(v), 1) c3
+                    from ${tbName} group by k order by k, c1, c2, c3; """
+
+            sql "DROP TABLE IF EXISTS ${tbName};"
+            sql "DROP TABLE IF EXISTS ${tbNameAgg};"
+
+
+            tbName = "test_unique_quantile_state3"
+            sql "DROP TABLE IF EXISTS ${tbName}"
+            test {
+                sql """ CREATE TABLE IF NOT EXISTS ${tbName} (k 
QUANTILE_STATE, v int) UNIQUE KEY(k)
+                    DISTRIBUTED BY HASH(k) BUCKETS 1 
properties("replication_num" = "1"); """
+                exception "Key column can not set complex type:k"
+            }
+        }
+    }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to