This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new ef880166bb9 [regression-test](stream load)Invalid EXEC_MEM_LIMIT check 
(#26717)
ef880166bb9 is described below

commit ef880166bb94dc3848cbd5bd78ce3264f263f6cd
Author: Guangdong Liu <[email protected]>
AuthorDate: Sun Nov 12 11:55:44 2023 +0800

    [regression-test](stream load)Invalid EXEC_MEM_LIMIT check (#26717)
---
 .../stream_load/test_stream_load_properties.groovy | 50 +++++++++++++++++++---
 1 file changed, 44 insertions(+), 6 deletions(-)

diff --git 
a/regression-test/suites/load_p0/stream_load/test_stream_load_properties.groovy 
b/regression-test/suites/load_p0/stream_load/test_stream_load_properties.groovy
index 757a6f9f19e..13cd50064de 100644
--- 
a/regression-test/suites/load_p0/stream_load/test_stream_load_properties.groovy
+++ 
b/regression-test/suites/load_p0/stream_load/test_stream_load_properties.groovy
@@ -42,7 +42,7 @@ suite("test_stream_load_properties", "p0") {
                   "mow_tbl_array",
                  ]
 
-    def columns = [ 
+    def columns = [
                     
"k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18",
                     
"k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18",
                     
"k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18",
@@ -52,8 +52,8 @@ suite("test_stream_load_properties", "p0") {
                     
"k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17",
                   ]
 
-    def timezoneColumns = 
-                  [ 
+    def timezoneColumns =
+                  [
                     "k00=unix_timestamp('2007-11-30 
10:30:19'),k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18",
                     "k00=unix_timestamp('2007-11-30 
10:30:19'),k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18",
                     "k00=unix_timestamp('2007-11-30 
10:30:19'),k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18",
@@ -486,7 +486,7 @@ suite("test_stream_load_properties", "p0") {
             }
         }
         def tableName1 = "stream_load_" + tableName
-        qt_sql_merge_type "select * from ${tableName1} order by k00,k01"       
           
+        qt_sql_merge_type "select * from ${tableName1} order by k00,k01"
     } finally {
         sql new 
File("""${context.file.parent}/ddl/mow_tbl_basic_drop.sql""").text
     }
@@ -623,7 +623,7 @@ suite("test_stream_load_properties", "p0") {
                 sleep(1000)
                 count++
             }
-            
+
             if (i <= 3) {
                 qt_sql_2pc_commit "select * from ${tableName1} order by 
k00,k01"
             } else {
@@ -639,7 +639,7 @@ suite("test_stream_load_properties", "p0") {
         }
     }
 
-    // compress_type 
+    // compress_type
     // gz/bz2/lz4
     // todo lzo/deflate
     // i = 0
@@ -939,4 +939,42 @@ suite("test_stream_load_properties", "p0") {
             sql new 
File("""${context.file.parent}/ddl/${tableName}_drop.sql""").text
         }
     }
+
+    // EXEC_MEM_LIMIT illegal number
+    i = 0
+    try {
+        for (String tableName in tables) {
+            sql new 
File("""${context.file.parent}/ddl/${tableName}_drop.sql""").text
+            sql new 
File("""${context.file.parent}/ddl/${tableName}_create.sql""").text
+
+            streamLoad {
+                table "stream_load_" + tableName
+                set 'format', 'json'
+                set 'exec_mem_limit', 'a'
+                set 'columns', columns[i]
+                set 'read_json_by_line', 'true'
+                if (i <= 3) {
+                    file json_by_line_files[0]
+                } else {
+                    file json_by_line_files[1]
+                }
+                time 10000 // limit inflight 10s
+
+                check { result, exception, startTime, endTime ->
+                    if (exception != null) {
+                        throw exception
+                    }
+                    log.info("Stream load result: ${result}".toString())
+                    def json = parseJson(result)
+                    assertEquals("fail", json.Status.toLowerCase())
+                    assertEquals("[INVALID_ARGUMENT]Invalid mem limit format, 
stoll", json.Message)
+                }
+            }
+            i++
+        }
+    } finally {
+        for (String tableName in tables) {
+            sql new 
File("""${context.file.parent}/ddl/${tableName}_drop.sql""").text
+        }
+    }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to