This is an automated email from the ASF dual-hosted git repository.

kxiao pushed a commit to branch branch-2.0
in repository https://gitbox.apache.org/repos/asf/doris.git

commit d09a9a35682d275dd52780953f292bd73d007c02
Author: XuJianxu <[email protected]>
AuthorDate: Tue Sep 12 18:00:01 2023 +0800

    [test](regression) add routine load cases (#24194)
    
    add routine load cases
---
 regression-test/conf/regression-conf.groovy        |  6 +++
 .../load_p0/routine_load/test_routine_load.groovy  | 44 +++++++++++++++++++++-
 2 files changed, 48 insertions(+), 2 deletions(-)

diff --git a/regression-test/conf/regression-conf.groovy 
b/regression-test/conf/regression-conf.groovy
index 89ddbf3065..1501b72bcf 100644
--- a/regression-test/conf/regression-conf.groovy
+++ b/regression-test/conf/regression-conf.groovy
@@ -94,6 +94,12 @@ enableHiveTest=false
 hms_port=9183
 hdfs_port=8120
 
+// kafka test config
+// to enable kafka test, you need firstly to start kafka container
+// See `docker/thirdparties/start-thirdparties-docker.sh`
+enableKafkaTest=false
+kafka_port=19193
+
 // elasticsearch catalog test config
 // See `docker/thirdparties/start-thirdparties-docker.sh`
 enableEsTest=false
diff --git 
a/regression-test/suites/load_p0/routine_load/test_routine_load.groovy 
b/regression-test/suites/load_p0/routine_load/test_routine_load.groovy
index 8362854ab3..751a8d0280 100644
--- a/regression-test/suites/load_p0/routine_load/test_routine_load.groovy
+++ b/regression-test/suites/load_p0/routine_load/test_routine_load.groovy
@@ -15,7 +15,47 @@
 // specific language governing permissions and limitations
 // under the License.
 
-suite("test_routine_load") {
+suite("test_routine_load","p0,external,external_docker,external_docker_routine_load")
 {
     // todo: test routine load, need kafka
-    sql "show routine load"
+    // sql "show routine load"
+    // define a sql table
+    def testTable = "tbl_test_routine_load_basic"
+
+    def result1 = sql """
+        CREATE TABLE IF NOT EXISTS ${testTable} (
+        `k1` int(20) NULL COMMENT "",
+        `k2` int(20) NULL COMMENT "",
+        `k3` int(20) NULL COMMENT "",
+        ) ENGINE=OLAP
+        DUPLICATE KEY(`k1`)
+        DISTRIBUTED BY HASH(`k1`) BUCKETS 1
+        PROPERTIES (
+        "replication_allocation" = "tag.location.default: 1",
+        "storage_format" = "V2"
+        )
+    """
+
+    String enabled = context.config.otherConfigs.get("enableKafkaTest")
+    if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        String kafka_port = context.config.otherConfigs.get("kafka_port")
+        String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+
+        def topic1 = "test"
+        sql """
+            CREATE ROUTINE LOAD ${topic1} ON ${testTable}
+            COLUMNS TERMINATED BY ","
+            PROPERTIES
+            (
+            "max_batch_interval" = "5",
+            "max_batch_rows" = "200000",
+            "max_batch_size" = "209715200"
+            )
+            FROM KAFKA
+            (
+            "kafka_broker_list" = "${externalEnvIp}:${kafka_port}",
+            "kafka_topic" = "${topic1}",
+            "property.kafka_default_offsets" = "OFFSET_BEGINNING"
+            );
+        """
+    }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to