This is an automated email from the ASF dual-hosted git repository.
dataroaring pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/master by this push:
new 4ea69ed3902 [regression test](broker load) add case for num_as_string
(#27588)
4ea69ed3902 is described below
commit 4ea69ed3902fda1b8d433f136fa283bbcf4c71d0
Author: Guangdong Liu <[email protected]>
AuthorDate: Mon Nov 27 21:25:59 2023 +0800
[regression test](broker load) add case for num_as_string (#27588)
---
.../broker_load/broker_load_with_properties.json | 6 ++
.../broker_load/broker_load_without_seq.csv | 6 ++
.../test_broker_load_with_properties.out | 10 ++
.../test_broker_load_with_properties.groovy | 118 +++++++++++++++++++++
.../test_broker_load_without_seq.groovy | 101 ++++++++++++++++++
5 files changed, 241 insertions(+)
diff --git
a/regression-test/data/load_p0/broker_load/broker_load_with_properties.json
b/regression-test/data/load_p0/broker_load/broker_load_with_properties.json
new file mode 100644
index 00000000000..6d22ec54c5b
--- /dev/null
+++ b/regression-test/data/load_p0/broker_load/broker_load_with_properties.json
@@ -0,0 +1,6 @@
+{"user_id": 1, "date": "2020-08-22", "group_id": 1, "modify_date":
"2020-02-22", "keyword": "a"}
+{"user_id": 1, "date": "2020-03-22", "group_id": 1, "modify_date":
"2020-03-05", "keyword": "b"}
+{"user_id": 1, "date": "2020-04-22", "group_id": 1, "modify_date":
"2020-02-26", "keyword": "c"}
+{"user_id": 1, "date": "2020-05-22", "group_id": 1, "modify_date":
"2020-02-23", "keyword": "d"}
+{"user_id": 1, "date": "2020-06-22", "group_id": 1, "modify_date":
"2020-02-24", "keyword": "e"}
+{"user_id": 1, "date": "2020-07-22", "group_id": 1, "modify_date":
"2020-02-25", "keyword": "b"}
diff --git
a/regression-test/data/load_p0/broker_load/broker_load_without_seq.csv
b/regression-test/data/load_p0/broker_load/broker_load_without_seq.csv
new file mode 100644
index 00000000000..6717da56c7f
--- /dev/null
+++ b/regression-test/data/load_p0/broker_load/broker_load_without_seq.csv
@@ -0,0 +1,6 @@
+11001,2023-09-01,1,1,10
+11001,2023-09-01,2,1,10
+11001,2023-09-01,1,2,10
+11001,2023-09-01,2,2,10
+11001,2023-09-01,1,3,10
+11001,2023-09-01,2,3,10
\ No newline at end of file
diff --git
a/regression-test/data/load_p0/broker_load/test_broker_load_with_properties.out
b/regression-test/data/load_p0/broker_load/test_broker_load_with_properties.out
new file mode 100644
index 00000000000..2ca74837d98
--- /dev/null
+++
b/regression-test/data/load_p0/broker_load/test_broker_load_with_properties.out
@@ -0,0 +1,10 @@
+-- This file is automatically generated. You should know what you did if you
want to edit this
+-- !select --
+1 2020-02-22 1 2020-02-21 a
+1 2020-03-22 1 2020-03-05 b
+1 2020-04-22 1 2020-02-26 c
+1 2020-05-22 1 2020-02-23 d
+1 2020-06-22 1 2020-02-24 e
+1 2020-07-22 1 2020-02-25 b
+1 2020-08-22 1 2020-02-22 a
+
diff --git
a/regression-test/suites/load_p0/broker_load/test_broker_load_with_properties.groovy
b/regression-test/suites/load_p0/broker_load/test_broker_load_with_properties.groovy
new file mode 100644
index 00000000000..852a3ef0ae7
--- /dev/null
+++
b/regression-test/suites/load_p0/broker_load/test_broker_load_with_properties.groovy
@@ -0,0 +1,118 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_broker_load_with_properties", "load_p0") {
+ // define a sql table
+ def testTable = "tbl_test_load_with_properties"
+
+ def create_test_table = {testTablex ->
+ def result1 = sql """
+ CREATE TABLE IF NOT EXISTS ${testTable} (
+ user_id bigint,
+ date date,
+ group_id bigint,
+ modify_date date,
+ keyword VARCHAR(128)
+ ) ENGINE=OLAP
+ UNIQUE KEY(user_id, date, group_id)
+ COMMENT 'OLAP'
+ DISTRIBUTED BY HASH (user_id) BUCKETS 32
+ PROPERTIES (
+ "replication_num" = "1",
+ "in_memory" = "false"
+ );
+ """
+
+ // DDL/DML return 1 row and 3 column, the only value is update row
count
+ log.info("result1: ${result1}")
+ assertTrue(result1.size() == 1)
+ assertTrue(result1[0].size() == 1)
+ assertTrue(result1[0][0] == 0, "Create table should update 0 rows")
+
+ // insert 1 row to check whether the table is ok
+ def result2 = sql """ INSERT INTO ${testTable} VALUES
+ (1,'2020-02-22',1,'2020-02-21','a')
+ """
+ assertTrue(result2.size() == 1)
+ assertTrue(result2[0].size() == 1)
+ assertTrue(result2[0][0] == 1, "Insert should update 1 rows")
+ }
+
+ def load_from_hdfs_norm = {testTablex, label, hdfsFilePath, format,
brokerName, hdfsUser, hdfsPasswd ->
+ def result1= sql """
+ LOAD LABEL ${label} (
+ DATA INFILE("${hdfsFilePath}")
+ INTO TABLE ${testTablex}
+ FORMAT as "${format}"
+ PROPERTIES ("num_as_string"="true")
+ )
+ with BROKER "${brokerName}" (
+ "username"="${hdfsUser}",
+ "password"="${hdfsPasswd}")
+ PROPERTIES (
+ "timeout"="1200",
+ "max_filter_ratio"="0.1");
+ """
+ log.info("result1: ${result1}")
+ assertTrue(result1.size() == 1)
+ assertTrue(result1[0].size() == 1)
+ assertTrue(result1[0][0] == 0, "Query OK, 0 rows affected")
+ }
+
+ def check_load_result = {checklabel, testTablex ->
+ max_try_milli_secs = 10000
+ while(max_try_milli_secs) {
+ result = sql "show load where label = '${checklabel}'"
+ log.info("result: ${result}")
+ if(result[0][2] == "FINISHED") {
+ //sql "sync"
+ qt_select "select * from ${testTablex} order by user_id, date,
group_id desc"
+ break
+ } else {
+ sleep(1000) // wait 1 second every time
+ max_try_milli_secs -= 1000
+ if(max_try_milli_secs <= 0) {
+ assertEquals(1, 2)
+ }
+ }
+ }
+ }
+
+ // if 'enableHdfs' in regression-conf.groovy has been set to true,
+ // the test will run these case as below.
+ if (enableHdfs()) {
+ brokerName = getBrokerName()
+ hdfsUser = getHdfsUser()
+ hdfsPasswd = getHdfsPasswd()
+ def hdfs_csv_file_path = uploadToHdfs
"load_p0/broker_load/broker_load_with_properties.json"
+ //def hdfs_csv_file_path = "hdfs://ip:port/testfile"
+
+ // case1: import csv data from hdfs with out where
+ try {
+ sql "DROP TABLE IF EXISTS ${testTable}"
+ create_test_table.call(testTable)
+
+ def test_load_label = UUID.randomUUID().toString().replaceAll("-",
"")
+ load_from_hdfs_norm.call(testTable, test_load_label,
hdfs_csv_file_path, "json",
+ brokerName, hdfsUser, hdfsPasswd)
+
+ check_load_result.call(test_load_label, testTable)
+ } finally {
+ try_sql("DROP TABLE IF EXISTS ${testTable}")
+ }
+ }
+}
diff --git
a/regression-test/suites/load_p0/broker_load/test_broker_load_without_seq.groovy
b/regression-test/suites/load_p0/broker_load/test_broker_load_without_seq.groovy
new file mode 100644
index 00000000000..9d34c0c0978
--- /dev/null
+++
b/regression-test/suites/load_p0/broker_load/test_broker_load_without_seq.groovy
@@ -0,0 +1,101 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_broker_load_without_seq", "load_p0") {
+ // define a sql table
+ def testTable = "tbl_test_broker_load_without_seq"
+
+ def create_test_table = {testTablex ->
+ def result1 = sql """
+ CREATE TABLE IF NOT EXISTS ${testTable} (
+ user_id bigint,
+ date date,
+ group_id bigint,
+ modify_date date,
+ keyword VARCHAR(128)
+ ) ENGINE=OLAP
+ UNIQUE KEY(user_id, date, group_id)
+ COMMENT 'OLAP'
+ DISTRIBUTED BY HASH (user_id) BUCKETS 32
+ PROPERTIES (
+ "replication_num" = "1",
+ "in_memory" = "false"
+ );
+ """
+
+ // DDL/DML return 1 row and 3 column, the only value is update row
count
+ log.info("result1: ${result1}")
+ assertTrue(result1.size() == 1)
+ assertTrue(result1[0].size() == 1)
+ assertTrue(result1[0][0] == 0, "Create table should update 0 rows")
+
+ // insert 1 row to check whether the table is ok
+ def result2 = sql """ INSERT INTO ${testTable} VALUES
+ (1,'2020-02-22',1,'2020-02-21','a')
+ """
+ assertTrue(result2.size() == 1)
+ assertTrue(result2[0].size() == 1)
+ assertTrue(result2[0][0] == 1, "Insert should update 1 rows")
+ }
+
+ def load_from_hdfs_norm = {testTablex, label, hdfsFilePath, format,
brokerName, hdfsUser, hdfsPasswd ->
+ try {
+ sql """
+ LOAD LABEL ${label} (
+ DATA INFILE("${hdfsFilePath}")
+ INTO TABLE ${testTablex}
+ COLUMNS TERMINATED BY ","
+ FORMAT as "${format}"
+ ORDER BY modify_date
+ )
+ with BROKER "${brokerName}" (
+ "username"="${hdfsUser}",
+ "password"="${hdfsPasswd}")
+ PROPERTIES (
+ "timeout"="1200",
+ "max_filter_ratio"="0.1");
+ """
+ } catch (Exception e) {
+ assertEquals(e.toString().contains("There is no sequence column in
the table tbl_test_broker_load_without_seq"), true)
+ }
+
+
+ }
+
+ // if 'enableHdfs' in regression-conf.groovy has been set to true,
+ // the test will run these case as below.
+ if (enableHdfs()) {
+ brokerName = getBrokerName()
+ hdfsUser = getHdfsUser()
+ hdfsPasswd = getHdfsPasswd()
+ def hdfs_csv_file_path = uploadToHdfs
"load_p0/broker_load/broker_load_without_seq.csv"
+ //def hdfs_csv_file_path = "hdfs://ip:port/testfile"
+
+ // case1: import csv data from hdfs with out where
+ try {
+ sql "DROP TABLE IF EXISTS ${testTable}"
+ create_test_table.call(testTable)
+
+ def test_load_label = UUID.randomUUID().toString().replaceAll("-",
"")
+ load_from_hdfs_norm.call(testTable, test_load_label,
hdfs_csv_file_path, "csv",
+ brokerName, hdfsUser, hdfsPasswd)
+
+ } finally {
+ try_sql("DROP TABLE IF EXISTS ${testTable}")
+ }
+ }
+}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]