This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 13e05c4a5d [Enhencement](stream load) add some regression test for 
json format streamload  (#17520)
13e05c4a5d is described below

commit 13e05c4a5d8bbc2fe4759c3a9f23f6c76a753850
Author: Tiewei Fang <[email protected]>
AuthorDate: Sun Mar 12 20:13:07 2023 +0800

    [Enhencement](stream load) add some regression test for json format 
streamload  (#17520)
---
 .../data/correctness_p0/test_current_timestamp.out | 35 ++++++++---
 .../test_current_timestamp_streamload.json         |  2 +
 .../load_p0/stream_load/case_sensitive_json.json   |  2 +
 .../data/load_p0/stream_load/test_json_load.out    |  5 ++
 .../correctness_p0/test_current_timestamp.groovy   | 69 +++++++++++++++++++---
 .../load_p0/stream_load/test_json_load.groovy      | 16 ++++-
 6 files changed, 111 insertions(+), 18 deletions(-)

diff --git a/regression-test/data/correctness_p0/test_current_timestamp.out 
b/regression-test/data/correctness_p0/test_current_timestamp.out
index bd22eb6828..3a0a4e0a8b 100644
--- a/regression-test/data/correctness_p0/test_current_timestamp.out
+++ b/regression-test/data/correctness_p0/test_current_timestamp.out
@@ -1,25 +1,44 @@
 -- This file is automatically generated. You should know what you did if you 
want to edit this
--- !insert_into --
+-- !insert_into1 --
 4
 
--- !insert_into --
+-- !insert_into2 --
 4
 
--- !insert_into --
+-- !insert_into3 --
 4
 
--- !insert_into --
+-- !insert_into4 --
 4
 
--- !stream_load --
+-- !stream_load_csv1 --
 4
 
--- !stream_load --
+-- !stream_load_csv2 --
 4
 
--- !stream_load --
+-- !stream_load_csv3 --
 4
 
--- !stream_load --
+-- !stream_load_csv4 --
 4
 
+-- !stream_load_json1 --
+2
+
+-- !stream_load_json2 --
+2
+
+-- !stream_load_json3 --
+2
+
+-- !stream_load_json4 --
+2
+
+-- !stream_load_json5 --
+9      apache  \N
+10     doris   \N
+
+-- !stream_load_json6 --
+2
+
diff --git 
a/regression-test/data/correctness_p0/test_current_timestamp_streamload.json 
b/regression-test/data/correctness_p0/test_current_timestamp_streamload.json
new file mode 100644
index 0000000000..9d27294e8f
--- /dev/null
+++ b/regression-test/data/correctness_p0/test_current_timestamp_streamload.json
@@ -0,0 +1,2 @@
+{"id":9,"name":"apache"}
+{"id":10,"name":"doris"}
diff --git a/regression-test/data/load_p0/stream_load/case_sensitive_json.json 
b/regression-test/data/load_p0/stream_load/case_sensitive_json.json
new file mode 100644
index 0000000000..d6b00c7d07
--- /dev/null
+++ b/regression-test/data/load_p0/stream_load/case_sensitive_json.json
@@ -0,0 +1,2 @@
+{"Id":1,"cIty":"beijing","CodE":2345671}
+{"Id":2,"cIty":"shanghai","CodE":2345672}
diff --git a/regression-test/data/load_p0/stream_load/test_json_load.out 
b/regression-test/data/load_p0/stream_load/test_json_load.out
index b297fa2236..6abf0cd771 100644
--- a/regression-test/data/load_p0/stream_load/test_json_load.out
+++ b/regression-test/data/load_p0/stream_load/test_json_load.out
@@ -181,3 +181,8 @@
 -- !select18 --
 200    changsha        3456789
 
+-- !select19 --
+1      beijing 2345671
+2      shanghai        2345672
+200    changsha        3456789
+
diff --git 
a/regression-test/suites/correctness_p0/test_current_timestamp.groovy 
b/regression-test/suites/correctness_p0/test_current_timestamp.groovy
index 53ac6d6838..8aca63fc06 100644
--- a/regression-test/suites/correctness_p0/test_current_timestamp.groovy
+++ b/regression-test/suites/correctness_p0/test_current_timestamp.groovy
@@ -37,6 +37,22 @@ suite("test_current_timestamp") {
         DISTRIBUTED BY HASH(id)
         PROPERTIES("replication_num" = "1");
     """
+
+    def tableName2 = "test_current_timestamp2"
+
+    sql """ DROP TABLE IF EXISTS ${tableName2} """
+    sql """
+        CREATE TABLE IF NOT EXISTS ${tableName2}
+        (
+            id TINYINT,
+            name CHAR(10) NOT NULL DEFAULT "zs",
+            dt_1 DATETIME DEFAULT CURRENT_TIMESTAMP,
+            dt_2 DATETIMEV2 DEFAULT CURRENT_TIMESTAMP
+        )
+        COMMENT "test current_timestamp table2"
+        DISTRIBUTED BY HASH(id)
+        PROPERTIES("replication_num" = "1");
+    """
     
     // test insert into.
     sql " insert into ${tableName} (id,name,dt_0,dt_2,dt_4,dt_6) values 
(1,'aa',current_timestamp(),current_timestamp(),current_timestamp(),current_timestamp());
 "
@@ -44,14 +60,14 @@ suite("test_current_timestamp") {
     sql " insert into ${tableName} (id,name,dt_0,dt_2,dt_4,dt_6) values 
(3,'cc',current_timestamp(),current_timestamp(),current_timestamp(),current_timestamp());
 "
     sql " insert into ${tableName} (id,name,dt_0,dt_2,dt_4,dt_6) values 
(4,'dd',current_timestamp(),current_timestamp(),current_timestamp(),current_timestamp());
 "
 
-    qt_insert_into """ select count(*) from ${tableName} where to_date(dt_0) = 
to_date(dt_1); """
-    qt_insert_into """ select count(*) from ${tableName} where to_date(dt_2) = 
to_date(dt_3); """
-    qt_insert_into """ select count(*) from ${tableName} where to_date(dt_4) = 
to_date(dt_5); """
-    qt_insert_into """ select count(*) from ${tableName} where to_date(dt_6) = 
to_date(dt_7); """
+    qt_insert_into1 """ select count(*) from ${tableName} where to_date(dt_0) 
= to_date(dt_1); """
+    qt_insert_into2 """ select count(*) from ${tableName} where to_date(dt_2) 
= to_date(dt_3); """
+    qt_insert_into3 """ select count(*) from ${tableName} where to_date(dt_4) 
= to_date(dt_5); """
+    qt_insert_into4 """ select count(*) from ${tableName} where to_date(dt_6) 
= to_date(dt_7); """
 
     sql """select now()"""
 
-    // test stream load.
+    // test csv stream load.
     streamLoad {
         table "${tableName}"
 
@@ -62,10 +78,45 @@ suite("test_current_timestamp") {
 
         time 10000 // limit inflight 10s
     }
-    qt_stream_load """ select count(*) from ${tableName} where id > 4 and 
to_date(dt_0) = to_date(dt_1); """
-    qt_stream_load """ select count(*) from ${tableName} where id > 4 and 
to_date(dt_2) = to_date(dt_3); """
-    qt_stream_load """ select count(*) from ${tableName} where id > 4 and 
to_date(dt_4) = to_date(dt_5); """
-    qt_stream_load """ select count(*) from ${tableName} where id > 4 and 
to_date(dt_6) = to_date(dt_7); """
+    qt_stream_load_csv1 """ select count(*) from ${tableName} where id > 4 and 
to_date(dt_0) = to_date(dt_1); """
+    qt_stream_load_csv2 """ select count(*) from ${tableName} where id > 4 and 
to_date(dt_2) = to_date(dt_3); """
+    qt_stream_load_csv3 """ select count(*) from ${tableName} where id > 4 and 
to_date(dt_4) = to_date(dt_5); """
+    qt_stream_load_csv4 """ select count(*) from ${tableName} where id > 4 and 
to_date(dt_6) = to_date(dt_7); """
 
     sql """select now()"""
+
+    // test json stream load
+    streamLoad {
+        table "${tableName}"
+
+        set 'columns', 'id, name, dt_0 = current_timestamp(), dt_2 = 
current_timestamp(), dt_4 = current_timestamp(), dt_6 = current_timestamp()'
+        set 'format', 'json'
+        set 'read_json_by_line', 'true'
+        set 'strip_outer_array', 'false'
+
+        file 'test_current_timestamp_streamload.json'
+
+        time 10000 // limit inflight 10s
+    }
+    qt_stream_load_json1 """ select count(*) from ${tableName} where id > 8 
and to_date(dt_0) = to_date(dt_1); """
+    qt_stream_load_json2 """ select count(*) from ${tableName} where id > 8 
and to_date(dt_2) = to_date(dt_3); """
+    qt_stream_load_json3 """ select count(*) from ${tableName} where id > 8 
and to_date(dt_4) = to_date(dt_5); """
+    qt_stream_load_json4 """ select count(*) from ${tableName} where id > 8 
and to_date(dt_6) = to_date(dt_7); """
+
+    // test json stream load 2
+    // if we set column param but the column do not exist in json file
+    // stream load json file will set it to NULL, just like dt_1
+    streamLoad {
+        table "${tableName2}"
+
+        set 'columns', 'id, name, dt_1'
+        set 'format', 'json'
+        set 'read_json_by_line', 'true'
+        set 'strip_outer_array', 'false'
+
+        file 'test_current_timestamp_streamload.json'
+        time 10000 // limit inflight 10s
+    }
+    qt_stream_load_json5 """ select id, name, dt_1  from ${tableName2} order 
by id; """
+    qt_stream_load_json6 """ select count(*) from ${tableName2} where dt_2 is 
not NULL; """
  }
diff --git a/regression-test/suites/load_p0/stream_load/test_json_load.groovy 
b/regression-test/suites/load_p0/stream_load/test_json_load.groovy
index f6326129dc..35bb9ffa5d 100644
--- a/regression-test/suites/load_p0/stream_load/test_json_load.groovy
+++ b/regression-test/suites/load_p0/stream_load/test_json_load.groovy
@@ -531,7 +531,21 @@ suite("test_json_load", "p0") {
     } finally {
         try_sql("DROP TABLE IF EXISTS ${testTable}")
     }
-    
+
+    // case19: test case sensitive json load
+     try {
+        sql "DROP TABLE IF EXISTS ${testTable}"
+
+        create_test_table1.call(testTable)
+        load_json_data.call('test_json_load_case19', 'false', 'true', 'json', 
'Id, cIty, CodE', '',
+                '', '', '', 'case_sensitive_json.json', false, 2)
+        sql "sync"
+        qt_select19 "select * from ${testTable} order by id"
+
+    } finally {
+        try_sql("DROP TABLE IF EXISTS ${testTable}")
+    }
+
     // if 'enableHdfs' in regression-conf.groovy has been set to true,
     // the test will run these case as below.
     if (enableHdfs()) {


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to