This is an automated email from the ASF dual-hosted git repository.
liaoxin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/master by this push:
new d178a65f5d8 [test](cast) add test for stream load cast (#33189)
d178a65f5d8 is described below
commit d178a65f5d8834777ad895280851b6299c664a68
Author: HHoflittlefish777 <[email protected]>
AuthorDate: Wed Apr 3 21:04:44 2024 +0800
[test](cast) add test for stream load cast (#33189)
---
.../data/load_p0/stream_load/test_cast1.csv | 1 +
.../data/load_p0/stream_load/test_cast1.json | 1 +
.../data/load_p0/stream_load/test_cast2.csv | 1 +
.../data/load_p0/stream_load/test_cast2.json | 1 +
.../data/load_p0/stream_load/test_cast3.csv | 1 +
.../data/load_p0/stream_load/test_cast3.json | 1 +
.../load_p0/stream_load/test_stream_load_cast.out | 19 ++
.../stream_load/test_stream_load_cast.groovy | 336 +++++++++++++++++++++
8 files changed, 361 insertions(+)
diff --git a/regression-test/data/load_p0/stream_load/test_cast1.csv
b/regression-test/data/load_p0/stream_load/test_cast1.csv
new file mode 100644
index 00000000000..e7fe19bce20
--- /dev/null
+++ b/regression-test/data/load_p0/stream_load/test_cast1.csv
@@ -0,0 +1 @@
+3.12,3.12,3.12,3.12,3.12
\ No newline at end of file
diff --git a/regression-test/data/load_p0/stream_load/test_cast1.json
b/regression-test/data/load_p0/stream_load/test_cast1.json
new file mode 100644
index 00000000000..3c88572d34e
--- /dev/null
+++ b/regression-test/data/load_p0/stream_load/test_cast1.json
@@ -0,0 +1 @@
+{"k0": 3.12, "k1": 3.12, "k2": 3.12, "k3": 3.12, "k4": 3.12}
\ No newline at end of file
diff --git a/regression-test/data/load_p0/stream_load/test_cast2.csv
b/regression-test/data/load_p0/stream_load/test_cast2.csv
new file mode 100644
index 00000000000..dab93718a8a
--- /dev/null
+++ b/regression-test/data/load_p0/stream_load/test_cast2.csv
@@ -0,0 +1 @@
+"3.12","3.12","3.12","3.12","3.12","3.12","3.12","3.12","2024-4-2","2024-4-2","2024-4-2
17:00:00","2024-4-2 17:00:00"
\ No newline at end of file
diff --git a/regression-test/data/load_p0/stream_load/test_cast2.json
b/regression-test/data/load_p0/stream_load/test_cast2.json
new file mode 100644
index 00000000000..6ba01a409e6
--- /dev/null
+++ b/regression-test/data/load_p0/stream_load/test_cast2.json
@@ -0,0 +1 @@
+{"k0": "3.12", "k1": "3.12", "k2": "3.12", "k3": "3.12", "k4": "3.12","k5":
"3.12","k6": "3.12","k7": "3.12","k8": "2024-4-2","k9": "2024-4-2","k10":
"2024-4-2 17:00:00","k11": "2024-4-2 17:00:00"}
\ No newline at end of file
diff --git a/regression-test/data/load_p0/stream_load/test_cast3.csv
b/regression-test/data/load_p0/stream_load/test_cast3.csv
new file mode 100644
index 00000000000..f4e1ab1e6cb
--- /dev/null
+++ b/regression-test/data/load_p0/stream_load/test_cast3.csv
@@ -0,0 +1 @@
+2147483648,129,32768,9223372036854775808,2e+128,3.40282e+39,1.79769e+309,100000000.11111111111,2024-14-2,2024-4-222,2024-14-2
17:00:00,2024-4-2 27:00:00
\ No newline at end of file
diff --git a/regression-test/data/load_p0/stream_load/test_cast3.json
b/regression-test/data/load_p0/stream_load/test_cast3.json
new file mode 100644
index 00000000000..6f41ac81e3b
--- /dev/null
+++ b/regression-test/data/load_p0/stream_load/test_cast3.json
@@ -0,0 +1 @@
+{"k0": 2147483648, "k1": 128, "k2": 32768, "k3": 9223372036854775808, "k4":
2e+128,"k5": 3.40282e+39,"k6": 1.79769e+309,"k7": 100000000.11111111111,"k8":
"2024-14-2","k9": "2024-4-222","k10": "2024-14-2 17:00:00","k11": "2024-4-2
27:00:00"}
\ No newline at end of file
diff --git a/regression-test/data/load_p0/stream_load/test_stream_load_cast.out
b/regression-test/data/load_p0/stream_load/test_stream_load_cast.out
new file mode 100644
index 00000000000..23256d7aeb0
--- /dev/null
+++ b/regression-test/data/load_p0/stream_load/test_stream_load_cast.out
@@ -0,0 +1,19 @@
+-- This file is automatically generated. You should know what you did if you
want to edit this
+-- !sql1 --
+\N \N \N \N \N \N \N \N \N \N
\N \N
+
+-- !sql2 --
+\N \N \N \N \N \N \N \N \N \N
\N \N
+
+-- !sql3 --
+\N \N \N \N \N \N \N \N \N \N
\N \N
+
+-- !sql4 --
+\N \N \N \N \N 3.12 3.12 3.1 2024-04-02
2024-04-02 2024-04-02T17:00 2024-04-02T17:00
+
+-- !sql5 --
+\N \N \N \N \N \N \N 99999999.9 \N
\N \N \N
+
+-- !sql6 --
+\N \N \N \N \N \N \N 99999999.9 \N
\N \N \N
+
diff --git
a/regression-test/suites/load_p0/stream_load/test_stream_load_cast.groovy
b/regression-test/suites/load_p0/stream_load/test_stream_load_cast.groovy
new file mode 100644
index 00000000000..1f3c8e24f78
--- /dev/null
+++ b/regression-test/suites/load_p0/stream_load/test_stream_load_cast.groovy
@@ -0,0 +1,336 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_stream_load_cast", "p0") {
+ def tableName = "test_stream_load_cast"
+ sql """ DROP TABLE IF EXISTS ${tableName} """
+ sql """
+ CREATE TABLE IF NOT EXISTS ${tableName} (
+ k0 INT NULL,
+ k1 TINYINT NULL,
+ k2 SMALLINT NULL,
+ k3 BIGINT NULL,
+ k4 LARGEINT NULL,
+ k5 FLOAT NULL,
+ k6 DOUBLE NULL,
+ k7 DECIMAL(9,1) NULL,
+ k8 DATE NULL,
+ k9 DATEV2 NULL,
+ k10 DATETIME NULL,
+ k11 DATETIMEV2 NULL,
+ )
+ DISTRIBUTED BY HASH(k1) BUCKETS 32
+ PROPERTIES (
+ "replication_num" = "1"
+ );
+ """
+
+ // test decimal cast to integer
+ streamLoad {
+ table "${tableName}"
+
+ set 'column_separator', ','
+ set 'columns', 'k0,k1,k2,k3,k4'
+ set 'strict_mode', 'false'
+
+ file 'test_cast1.csv'
+ time 10000 // limit inflight 10s
+
+ check { result, exception, startTime, endTime ->
+ if (exception != null) {
+ throw exception
+ }
+ log.info("Stream load result: ${result}".toString())
+ def json = parseJson(result)
+ assertEquals("success", json.Status.toLowerCase())
+ assertEquals(1, json.NumberTotalRows)
+ assertEquals(0, json.NumberFilteredRows)
+ }
+ }
+ sql "sync"
+ qt_sql1 "select * from ${tableName}"
+ sql "sync"
+ sql "truncate table ${tableName}"
+ sql "sync"
+
+ streamLoad {
+ table "${tableName}"
+
+ set 'column_separator', ','
+ set 'columns', 'k0,k1,k2,k3,k4'
+ set 'strict_mode', 'true'
+
+ file 'test_cast1.csv'
+ time 10000 // limit inflight 10s
+
+ check { result, exception, startTime, endTime ->
+ if (exception != null) {
+ throw exception
+ }
+ log.info("Stream load result: ${result}".toString())
+ def json = parseJson(result)
+ assertEquals("fail", json.Status.toLowerCase())
+ assertEquals(1, json.NumberTotalRows)
+ assertEquals(1, json.NumberFilteredRows)
+ }
+ }
+
+ streamLoad {
+ table "${tableName}"
+
+ set 'column_separator', ','
+ set 'columns', 'k0,k1,k2,k3,k4'
+ set 'strict_mode', 'false'
+ set 'format', 'json'
+
+ file 'test_cast1.json'
+ time 10000 // limit inflight 10s
+
+ check { result, exception, startTime, endTime ->
+ if (exception != null) {
+ throw exception
+ }
+ log.info("Stream load result: ${result}".toString())
+ def json = parseJson(result)
+ assertEquals("success", json.Status.toLowerCase())
+ assertEquals(1, json.NumberTotalRows)
+ assertEquals(0, json.NumberFilteredRows)
+ }
+ }
+ sql "sync"
+ qt_sql2 "select * from ${tableName}"
+ sql "sync"
+ sql "truncate table ${tableName}"
+ sql "sync"
+
+ streamLoad {
+ table "${tableName}"
+
+ set 'column_separator', ','
+ set 'columns', 'k0,k1,k2,k3,k4'
+ set 'strict_mode', 'true'
+ set 'format', 'json'
+
+ file 'test_cast1.json'
+ time 10000 // limit inflight 10s
+
+ check { result, exception, startTime, endTime ->
+ if (exception != null) {
+ throw exception
+ }
+ log.info("Stream load result: ${result}".toString())
+ def json = parseJson(result)
+ assertEquals("fail", json.Status.toLowerCase())
+ assertEquals(1, json.NumberTotalRows)
+ assertEquals(1, json.NumberFilteredRows)
+ }
+ }
+
+ // test invaild
+ streamLoad {
+ table "${tableName}"
+
+ set 'column_separator', ','
+ set 'strict_mode', 'false'
+
+ file 'test_cast2.csv'
+ time 10000 // limit inflight 10s
+
+ check { result, exception, startTime, endTime ->
+ if (exception != null) {
+ throw exception
+ }
+ log.info("Stream load result: ${result}".toString())
+ def json = parseJson(result)
+ assertEquals("success", json.Status.toLowerCase())
+ assertEquals(1, json.NumberTotalRows)
+ assertEquals(0, json.NumberFilteredRows)
+ }
+ }
+ sql "sync"
+ qt_sql3 "select * from ${tableName}"
+ sql "sync"
+ sql "truncate table ${tableName}"
+ sql "sync"
+
+ streamLoad {
+ table "${tableName}"
+
+ set 'column_separator', ','
+ set 'strict_mode', 'true'
+
+ file 'test_cast2.csv'
+ time 10000 // limit inflight 10s
+
+ check { result, exception, startTime, endTime ->
+ if (exception != null) {
+ throw exception
+ }
+ log.info("Stream load result: ${result}".toString())
+ def json = parseJson(result)
+ assertEquals("fail", json.Status.toLowerCase())
+ assertEquals(1, json.NumberTotalRows)
+ assertEquals(1, json.NumberFilteredRows)
+ }
+ }
+
+ streamLoad {
+ table "${tableName}"
+
+ set 'column_separator', ','
+ set 'strict_mode', 'false'
+ set 'format', 'json'
+
+ file 'test_cast2.json'
+ time 10000 // limit inflight 10s
+
+ check { result, exception, startTime, endTime ->
+ if (exception != null) {
+ throw exception
+ }
+ log.info("Stream load result: ${result}".toString())
+ def json = parseJson(result)
+ assertEquals("success", json.Status.toLowerCase())
+ assertEquals(1, json.NumberTotalRows)
+ assertEquals(0, json.NumberFilteredRows)
+ }
+ }
+ sql "sync"
+ qt_sql4 "select * from ${tableName}"
+ sql "sync"
+ sql "truncate table ${tableName}"
+ sql "sync"
+
+ streamLoad {
+ table "${tableName}"
+
+ set 'column_separator', ','
+ set 'strict_mode', 'true'
+ set 'format', 'json'
+
+ file 'test_cast2.json'
+ time 10000 // limit inflight 10s
+
+ check { result, exception, startTime, endTime ->
+ if (exception != null) {
+ throw exception
+ }
+ log.info("Stream load result: ${result}".toString())
+ def json = parseJson(result)
+ assertEquals("fail", json.Status.toLowerCase())
+ assertEquals(1, json.NumberTotalRows)
+ assertEquals(1, json.NumberFilteredRows)
+ }
+ }
+
+ // test over limit
+ streamLoad {
+ table "${tableName}"
+
+ set 'column_separator', ','
+ set 'strict_mode', 'false'
+
+ file 'test_cast3.csv'
+ time 10000 // limit inflight 10s
+
+ check { result, exception, startTime, endTime ->
+ if (exception != null) {
+ throw exception
+ }
+ log.info("Stream load result: ${result}".toString())
+ def json = parseJson(result)
+ assertEquals("success", json.Status.toLowerCase())
+ assertEquals(1, json.NumberTotalRows)
+ assertEquals(0, json.NumberFilteredRows)
+ }
+ }
+ sql "sync"
+ qt_sql5 "select * from ${tableName}"
+ sql "sync"
+ sql "truncate table ${tableName}"
+ sql "sync"
+
+ streamLoad {
+ table "${tableName}"
+
+ set 'column_separator', ','
+ set 'strict_mode', 'true'
+
+ file 'test_cast3.csv'
+ time 10000 // limit inflight 10s
+
+ check { result, exception, startTime, endTime ->
+ if (exception != null) {
+ throw exception
+ }
+ log.info("Stream load result: ${result}".toString())
+ def json = parseJson(result)
+ assertEquals("fail", json.Status.toLowerCase())
+ assertEquals(1, json.NumberTotalRows)
+ assertEquals(1, json.NumberFilteredRows)
+ }
+ }
+
+ streamLoad {
+ table "${tableName}"
+
+ set 'column_separator', ','
+ set 'strict_mode', 'false'
+ set 'format', 'json'
+
+ file 'test_cast3.json'
+ time 10000 // limit inflight 10s
+
+ check { result, exception, startTime, endTime ->
+ if (exception != null) {
+ throw exception
+ }
+ log.info("Stream load result: ${result}".toString())
+ def json = parseJson(result)
+ assertEquals("success", json.Status.toLowerCase())
+ assertEquals(1, json.NumberTotalRows)
+ assertEquals(0, json.NumberFilteredRows)
+ }
+ }
+ sql "sync"
+ qt_sql6 "select * from ${tableName}"
+ sql "sync"
+ sql "truncate table ${tableName}"
+ sql "sync"
+
+ streamLoad {
+ table "${tableName}"
+
+ set 'column_separator', ','
+ set 'strict_mode', 'true'
+ set 'format', 'json'
+
+ file 'test_cast3.json'
+ time 10000 // limit inflight 10s
+
+ check { result, exception, startTime, endTime ->
+ if (exception != null) {
+ throw exception
+ }
+ log.info("Stream load result: ${result}".toString())
+ def json = parseJson(result)
+ assertEquals("fail", json.Status.toLowerCase())
+ assertEquals(1, json.NumberTotalRows)
+ assertEquals(1, json.NumberFilteredRows)
+ }
+ }
+}
\ No newline at end of file
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]