This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch branch-1.2-lts
in repository https://gitbox.apache.org/repos/asf/doris.git

commit cd62fb73c1e031e8defe922527b8e01dc1a662ff
Author: Xin Liao <[email protected]>
AuthorDate: Fri Feb 10 17:51:15 2023 +0800

    [fix](sequence-column) MergeIterator does not use the correct seq column 
for comparison (#16494)
---
 be/src/olap/rowset/beta_rowset_reader.cpp          | 11 +++-
 .../unique/test_unique_table_new_sequence.out      | 66 +++++++++++-----------
 .../unique/test_unique_table_sequence.out          | 41 +++++++++++---
 .../data/data_model_p0/unique/unique_key_data1.csv |  8 +--
 .../data/data_model_p0/unique/unique_key_data2.csv |  8 +--
 .../unique/test_unique_table_new_sequence.groovy   | 34 +++++------
 .../unique/test_unique_table_sequence.groovy       | 24 ++++++--
 7 files changed, 121 insertions(+), 71 deletions(-)

diff --git a/be/src/olap/rowset/beta_rowset_reader.cpp 
b/be/src/olap/rowset/beta_rowset_reader.cpp
index 8fb0fca4a9..d14cbed21d 100644
--- a/be/src/olap/rowset/beta_rowset_reader.cpp
+++ b/be/src/olap/rowset/beta_rowset_reader.cpp
@@ -202,8 +202,17 @@ Status BetaRowsetReader::init(RowsetReaderContext* 
read_context) {
     if (config::enable_storage_vectorization && read_context->is_vec) {
         if (read_context->need_ordered_result &&
             _rowset->rowset_meta()->is_segments_overlapping()) {
+            auto sequence_loc = -1;
+            if (read_context->sequence_id_idx != -1) {
+                for (size_t loc = 0; loc < 
read_context->return_columns->size(); loc++) {
+                    if (read_context->return_columns->at(loc) == 
read_context->sequence_id_idx) {
+                        sequence_loc = loc;
+                        break;
+                    }
+                }
+            }
             final_iterator = vectorized::new_merge_iterator(
-                    iterators, read_context->sequence_id_idx, 
read_context->is_unique,
+                    iterators, sequence_loc, read_context->is_unique,
                     read_context->read_orderby_key_reverse, 
read_context->merged_rows);
         } else {
             if (read_context->read_orderby_key_reverse) {
diff --git 
a/regression-test/data/data_model_p0/unique/test_unique_table_new_sequence.out 
b/regression-test/data/data_model_p0/unique/test_unique_table_new_sequence.out
index 825b7039c1..10a08446c6 100644
--- 
a/regression-test/data/data_model_p0/unique/test_unique_table_new_sequence.out
+++ 
b/regression-test/data/data_model_p0/unique/test_unique_table_new_sequence.out
@@ -1,45 +1,47 @@
 -- This file is automatically generated. You should know what you did if you 
want to edit this
 -- !all --
-1      4       11
-2      5       12
-3      6       13
+1      4       11      12      13
+2      5       12      13      14
+3      6       13      14      15
 
--- !all --
+-- !count --
+3
+
+-- !part --
 1      2       15
 2      5       12
 3      6       13
 
 -- !all --
-1      2       15
-15     8       19
-2      5       12
-3      6       13
+1      2       15      16      17
+2      5       12      13      14
+3      6       13      14      15
+
+-- !count --
+3
+
+-- !part --
+1      10      15
+2      5       14
+3      6       11
 
 -- !all --
-1      10      15      0       15
-15     8       19      0       19
-2      5       14      0       12
-3      6       11      0       13
-
--- !desc --
-k1     INT     Yes     true    \N      
-v1     TINYINT Yes     false   \N      REPLACE
-v2     INT     Yes     false   \N      REPLACE
-__DORIS_DELETE_SIGN__  TINYINT No      false   0       REPLACE
-__DORIS_SEQUENCE_COL__ INT     Yes     false   \N      REPLACE
-
--- !desc --
-k1     INT     Yes     true    \N      
-v1     TINYINT Yes     false   \N      REPLACE
-vv2    INT     Yes     false   \N      REPLACE
-__DORIS_DELETE_SIGN__  TINYINT No      false   0       REPLACE
-__DORIS_SEQUENCE_COL__ INT     Yes     false   \N      REPLACE
+1      10      15      16      17
+2      5       14      13      14
+3      6       11      14      15
+
+-- !count --
+4
+
+-- !part --
+1      10      15
+15     8       19
+2      5       14
+3      6       11
 
 -- !all --
-1      10      15      0       15
-15     8       19      0       19
-2      5       14      0       12
-21     8       22      0       22
-23     9       24      0       24
-3      6       11      0       13
+1      10      15      16      17      0       15
+15     8       19      20      21      0       19
+2      5       14      13      14      0       12
+3      6       11      14      15      0       13
 
diff --git 
a/regression-test/data/data_model_p0/unique/test_unique_table_sequence.out 
b/regression-test/data/data_model_p0/unique/test_unique_table_sequence.out
index da9bce4389..603e5b87aa 100644
--- a/regression-test/data/data_model_p0/unique/test_unique_table_sequence.out
+++ b/regression-test/data/data_model_p0/unique/test_unique_table_sequence.out
@@ -1,22 +1,47 @@
 -- This file is automatically generated. You should know what you did if you 
want to edit this
 -- !all --
-1      4       11
-2      5       12
-3      6       13
+1      4       11      12      13
+2      5       12      13      14
+3      6       13      14      15
 
--- !all --
+-- !count --
+3
+
+-- !part --
 1      2       15
 2      5       12
 3      6       13
 
 -- !all --
+1      2       15      16      17
+2      5       12      13      14
+3      6       13      14      15
+
+-- !count --
+3
+
+-- !part --
+1      10      15
+2      5       14
+3      6       11
+
+-- !all --
+1      10      15      16      17
+2      5       14      13      14
+3      6       11      14      15
+
+-- !count --
+4
+
+-- !part --
 1      10      15
+15     9       18
 2      5       14
 3      6       11
 
 -- !all --
-1      10      15      0       15
-15     9       18      0       \N
-2      5       14      0       12
-3      6       11      0       13
+1      10      15      16      17      0       15
+15     9       18      21      22      0       \N
+2      5       14      13      14      0       12
+3      6       11      14      15      0       13
 
diff --git a/regression-test/data/data_model_p0/unique/unique_key_data1.csv 
b/regression-test/data/data_model_p0/unique/unique_key_data1.csv
index 1a65108428..f6ff3a45c9 100644
--- a/regression-test/data/data_model_p0/unique/unique_key_data1.csv
+++ b/regression-test/data/data_model_p0/unique/unique_key_data1.csv
@@ -1,4 +1,4 @@
-1,4,11
-2,5,12
-3,6,13
-2,7,9
+1,4,11,12,13
+2,5,12,13,14
+3,6,13,14,15
+2,7,9,10,11
diff --git a/regression-test/data/data_model_p0/unique/unique_key_data2.csv 
b/regression-test/data/data_model_p0/unique/unique_key_data2.csv
index b6d15e945a..bd8a79e43b 100644
--- a/regression-test/data/data_model_p0/unique/unique_key_data2.csv
+++ b/regression-test/data/data_model_p0/unique/unique_key_data2.csv
@@ -1,4 +1,4 @@
-1,2,15
-2,3,2
-3,4,3
-1,9,14
+1,2,15,16,17
+2,3,2,3,4
+3,4,3,4,5
+1,9,14,15,16
diff --git 
a/regression-test/suites/data_model_p0/unique/test_unique_table_new_sequence.groovy
 
b/regression-test/suites/data_model_p0/unique/test_unique_table_new_sequence.groovy
index a1808b4d6f..7dc6d2e3e7 100644
--- 
a/regression-test/suites/data_model_p0/unique/test_unique_table_new_sequence.groovy
+++ 
b/regression-test/suites/data_model_p0/unique/test_unique_table_new_sequence.groovy
@@ -22,7 +22,9 @@ suite("test_unique_table_new_sequence") {
     CREATE TABLE IF NOT EXISTS ${tableName} (
       `k1` int NULL,
       `v1` tinyint NULL,
-      `v2` int
+      `v2` int,
+      `v3` int,
+      `v4` int
     ) ENGINE=OLAP
     UNIQUE KEY(k1)
     DISTRIBUTED BY HASH(`k1`) BUCKETS 3
@@ -37,7 +39,7 @@ suite("test_unique_table_new_sequence") {
         table "${tableName}"
 
         set 'column_separator', ','
-        set 'columns', 'k1,v1,v2'
+        set 'columns', 'k1,v1,v2,v3,v4'
 
         file 'unique_key_data1.csv'
         time 10000 // limit inflight 10s
@@ -63,7 +65,7 @@ suite("test_unique_table_new_sequence") {
         table "${tableName}"
 
         set 'column_separator', ','
-        set 'columns', 'k1,v1,v2'
+        set 'columns', 'k1,v1,v2,v3,v4'
 
         file 'unique_key_data2.csv'
         time 10000 // limit inflight 10s
@@ -83,13 +85,9 @@ suite("test_unique_table_new_sequence") {
     }
     sql "sync"
 
-    order_qt_all "SELECT * from ${tableName}"
-
-    sql "INSERT INTO ${tableName} values(15, 8, 19)"
-
-    sql "INSERT INTO ${tableName} values(15, 9, 18)"
+    qt_count "SELECT COUNT(*) from ${tableName}"
 
-    sql "sync"
+    order_qt_part "SELECT k1, v1, v2 from ${tableName}"
 
     order_qt_all "SELECT * from ${tableName}"
 
@@ -99,24 +97,26 @@ suite("test_unique_table_new_sequence") {
 
     sql "UPDATE ${tableName} SET v2 = 11 WHERE k1 = 3"
 
-    sql "SET show_hidden_columns=true"
-
     sql "sync"
 
-    order_qt_all "SELECT * from ${tableName}"
+    qt_count "SELECT COUNT(*) from ${tableName}"
 
-    qt_desc "desc ${tableName}"
+    order_qt_part "SELECT k1, v1, v2 from ${tableName}"
 
-    sql "ALTER TABLE ${tableName} RENAME COLUMN v2 vv2"
+    order_qt_all "SELECT * from ${tableName}"
 
-    qt_desc "desc ${tableName}"
+    sql "INSERT INTO ${tableName} values(15, 8, 19, 20, 21)"
 
-    sql "INSERT INTO ${tableName} values(21, 8, 22)"
+    sql "INSERT INTO ${tableName} values(15, 9, 18, 21, 22)"
 
-    sql "INSERT INTO ${tableName} values(23, 9, 24)"
+    sql "SET show_hidden_columns=true"
 
     sql "sync"
 
+    qt_count "SELECT COUNT(*) from ${tableName}"
+
+    order_qt_part "SELECT k1, v1, v2 from ${tableName}"
+
     order_qt_all "SELECT * from ${tableName}"
 
     sql "DROP TABLE ${tableName}"
diff --git 
a/regression-test/suites/data_model_p0/unique/test_unique_table_sequence.groovy 
b/regression-test/suites/data_model_p0/unique/test_unique_table_sequence.groovy
index 91645b8118..3753391f94 100644
--- 
a/regression-test/suites/data_model_p0/unique/test_unique_table_sequence.groovy
+++ 
b/regression-test/suites/data_model_p0/unique/test_unique_table_sequence.groovy
@@ -22,7 +22,9 @@ suite("test_unique_table_sequence") {
         CREATE TABLE IF NOT EXISTS ${tableName} (
           `k1` int NULL,
           `v1` tinyint NULL,
-          `v2` int
+          `v2` int,
+          `v3` int,
+          `v4` int
         ) ENGINE=OLAP
         UNIQUE KEY(k1)
         DISTRIBUTED BY HASH(`k1`) BUCKETS 3
@@ -36,7 +38,7 @@ suite("test_unique_table_sequence") {
         table "${tableName}"
 
         set 'column_separator', ','
-        set 'columns', 'k1,v1,v2'
+        set 'columns', 'k1,v1,v2,v3,v4'
         set 'function_column.sequence_col', 'v2'
 
         file 'unique_key_data1.csv'
@@ -63,7 +65,7 @@ suite("test_unique_table_sequence") {
         table "${tableName}"
 
         set 'column_separator', ','
-        set 'columns', 'k1,v1,v2'
+        set 'columns', 'k1,v1,v2,v3,v4'
         set 'function_column.sequence_col', 'v2'
 
         file 'unique_key_data2.csv'
@@ -84,6 +86,10 @@ suite("test_unique_table_sequence") {
     }
     sql "sync"
 
+    qt_count "SELECT COUNT(*) from ${tableName}"
+
+    order_qt_part "SELECT k1, v1, v2 from ${tableName}"
+
     order_qt_all "SELECT * from ${tableName}"
 
     sql "UPDATE ${tableName} SET v1 = 10 WHERE k1 = 1"
@@ -94,16 +100,24 @@ suite("test_unique_table_sequence") {
 
     sql "sync"
 
+    qt_count "SELECT COUNT(*) from ${tableName}"
+
+    order_qt_part "SELECT k1, v1, v2 from ${tableName}"
+
     order_qt_all "SELECT * from ${tableName}"
 
-    sql "INSERT INTO ${tableName} values(15, 8, 19)"
+    sql "INSERT INTO ${tableName} values(15, 8, 19, 20, 21)"
 
-    sql "INSERT INTO ${tableName} values(15, 9, 18)"
+    sql "INSERT INTO ${tableName} values(15, 9, 18, 21, 22)"
 
     sql "SET show_hidden_columns=true"
 
     sql "sync"
 
+    qt_count "SELECT COUNT(*) from ${tableName}"
+
+    order_qt_part "SELECT k1, v1, v2 from ${tableName}"
+
     order_qt_all "SELECT * from ${tableName}"
 
     sql "DROP TABLE ${tableName}"


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to