This is an automated email from the ASF dual-hosted git repository.

kxiao pushed a commit to branch branch-2.0
in repository https://gitbox.apache.org/repos/asf/doris.git

commit 76488d18ee0cdba40f4e8e95a579503069783697
Author: bobhan1 <[email protected]>
AuthorDate: Wed Oct 11 10:17:01 2023 +0800

    [regression-test](merge-on-write) Fix partial update concurrency conflict 
case (#25212)
---
 .../partial_update/partial_update_parallel1.csv    |  5 +++
 .../partial_update/partial_update_parallel2.csv    |  5 +++
 .../partial_update/partial_update_parallel3.csv    |  5 +++
 .../test_partial_update_parallel.out               |  8 +++++
 .../test_partial_update_parallel.groovy            | 41 ++++++++++++----------
 5 files changed, 45 insertions(+), 19 deletions(-)

diff --git 
a/regression-test/data/unique_with_mow_p0/partial_update/partial_update_parallel1.csv
 
b/regression-test/data/unique_with_mow_p0/partial_update/partial_update_parallel1.csv
new file mode 100644
index 00000000000..4ba84bb7785
--- /dev/null
+++ 
b/regression-test/data/unique_with_mow_p0/partial_update/partial_update_parallel1.csv
@@ -0,0 +1,5 @@
+1,"ddddddddddd"
+2,"eeeeee"
+3,"aaaaa"
+4,"bbbbbbbb"
+5,"cccccccccccc"
diff --git 
a/regression-test/data/unique_with_mow_p0/partial_update/partial_update_parallel2.csv
 
b/regression-test/data/unique_with_mow_p0/partial_update/partial_update_parallel2.csv
new file mode 100644
index 00000000000..1560d6d3261
--- /dev/null
+++ 
b/regression-test/data/unique_with_mow_p0/partial_update/partial_update_parallel2.csv
@@ -0,0 +1,5 @@
+1,1111,199
+2,2222,299
+3,3333,399
+4,4444,499
+5,5555,599
diff --git 
a/regression-test/data/unique_with_mow_p0/partial_update/partial_update_parallel3.csv
 
b/regression-test/data/unique_with_mow_p0/partial_update/partial_update_parallel3.csv
new file mode 100644
index 00000000000..c2366eb0a90
--- /dev/null
+++ 
b/regression-test/data/unique_with_mow_p0/partial_update/partial_update_parallel3.csv
@@ -0,0 +1,5 @@
+1,10
+2,20
+3,30
+4,40
+5,50
diff --git 
a/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_parallel.out
 
b/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_parallel.out
new file mode 100644
index 00000000000..bcd7e86c53c
--- /dev/null
+++ 
b/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_parallel.out
@@ -0,0 +1,8 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !sql --
+1      "ddddddddddd"   1111    199     10
+2      "eeeeee"        2222    299     20
+3      "aaaaa" 3333    399     30
+4      "bbbbbbbb"      4444    499     40
+5      "cccccccccccc"  5555    599     50
+
diff --git 
a/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_parallel.groovy
 
b/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_parallel.groovy
index 8875a5d6d47..19522e8064e 100644
--- 
a/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_parallel.groovy
+++ 
b/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_parallel.groovy
@@ -31,61 +31,64 @@ suite("test_primary_key_partial_update_parallel", "p0") {
                 UNIQUE KEY(`id`) DISTRIBUTED BY HASH(`id`) BUCKETS 1
                 PROPERTIES("replication_num" = "1", 
"enable_unique_key_merge_on_write" = "true")
     """
-    // insert 2 lines
-    sql """
-        insert into ${tableName} values(2, "doris2", 2000, 223, 1)
-    """
 
-    sql """
-        insert into ${tableName} values(1, "doris", 1000, 123, 1)
-    """
+    sql """insert into ${tableName} values
+        (2, "doris2", 2000, 223, 2),
+        (1, "doris", 1000, 123, 1),
+        (5, "doris5", 5000, 523, 5),
+        (4, "doris4", 4000, 423, 4),
+        (3, "doris3", 3000, 323, 3);"""
 
-    Thread.startDaemon {
+    t1 = Thread.startDaemon {
         streamLoad {
             table "${tableName}"
 
             set 'column_separator', ','
             set 'format', 'csv'
             set 'partial_columns', 'true'
-            set 'columns', 'id,score'
+            set 'columns', 'id,name'
 
-            file 'basic.csv'
+            file 'partial_update_parallel1.csv'
             time 10000 // limit inflight 10s
         }
     }
 
-    Thread.startDaemon {
+    t2 = Thread.startDaemon {
         streamLoad {
             table "${tableName}"
 
             set 'column_separator', ','
             set 'format', 'csv'
             set 'partial_columns', 'true'
-            set 'columns', 'id,score'
+            set 'columns', 'id,score,test'
 
-            file 'basic_with_duplicate.csv'
+            file 'partial_update_parallel2.csv'
             time 10000 // limit inflight 10s
         }
     }
 
-    Thread.startDaemon {
+    t3 = Thread.startDaemon {
         streamLoad {
             table "${tableName}"
 
             set 'column_separator', ','
             set 'format', 'csv'
             set 'partial_columns', 'true'
-            set 'columns', 'id,score'
+            set 'columns', 'id,dft'
 
-            file 'basic_with_duplicate2.csv'
+            file 'partial_update_parallel3.csv'
             time 10000 // limit inflight 10s
         }
     }
 
+    t1.join()
+    t2.join()
+    t3.join()
+
     sql "sync"
 
-    sql """
-        select * from ${tableName} order by id;
-    """
+    qt_sql """ select * from ${tableName} order by id;"""
+
+    sql """ DROP TABLE IF EXISTS ${tableName}; """
 }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to