This is an automated email from the ASF dual-hosted git repository.

dataroaring pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new bb8fe3d0237 [fix](s3-load) refactor s3 parallelism case and remove it 
to p2 (#35629)
bb8fe3d0237 is described below

commit bb8fe3d02379b566b8b6295eab20e5f7c07b2c3f
Author: HHoflittlefish777 <[email protected]>
AuthorDate: Thu May 30 13:48:12 2024 +0800

    [fix](s3-load) refactor s3 parallelism case and remove it to p2 (#35629)
---
 .../broker_load/test_s3_load_parallelism.out       |  7 --
 .../test_s3_load_with_load_parallelism.out         |  6 ++
 .../broker_load/test_s3_load_parallelism.groovy    | 99 ----------------------
 .../test_s3_load_with_load_parallelism.groovy      | 37 +++++---
 4 files changed, 33 insertions(+), 116 deletions(-)

diff --git 
a/regression-test/data/load_p1/broker_load/test_s3_load_parallelism.out 
b/regression-test/data/load_p1/broker_load/test_s3_load_parallelism.out
deleted file mode 100644
index 8b3c5ef38eb..00000000000
--- a/regression-test/data/load_p1/broker_load/test_s3_load_parallelism.out
+++ /dev/null
@@ -1,7 +0,0 @@
--- This file is automatically generated. You should know what you did if you 
want to edit this
--- !paral_load_csv --
-6000000
-
--- !paral_load_json --
-3000000
-
diff --git 
a/regression-test/data/load_p0/broker_load/test_s3_load_with_load_parallelism.out
 
b/regression-test/data/load_p2/broker_load/test_s3_load_with_load_parallelism.out
similarity index 74%
rename from 
regression-test/data/load_p0/broker_load/test_s3_load_with_load_parallelism.out
rename to 
regression-test/data/load_p2/broker_load/test_s3_load_with_load_parallelism.out
index 7f63e400c3d..d501e5eedf2 100644
--- 
a/regression-test/data/load_p0/broker_load/test_s3_load_with_load_parallelism.out
+++ 
b/regression-test/data/load_p2/broker_load/test_s3_load_with_load_parallelism.out
@@ -2,3 +2,9 @@
 -- !sql --
 5000000
 
+-- !sql --
+6000000
+
+-- !sql --
+3000000
+
diff --git 
a/regression-test/suites/load_p1/broker_load/test_s3_load_parallelism.groovy 
b/regression-test/suites/load_p1/broker_load/test_s3_load_parallelism.groovy
deleted file mode 100644
index 024ba497bc9..00000000000
--- a/regression-test/suites/load_p1/broker_load/test_s3_load_parallelism.groovy
+++ /dev/null
@@ -1,99 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-suite("test_s3_load_parallelism", "p1") {
-
-    // Load file from S3 with load_parallelism specified
-    def s3load_paral_wait = {tbl, fmt, path, paral ->
-        String ak = getS3AK()
-        String sk = getS3SK()
-        String s3BucketName = getS3BucketName()
-        String s3Endpoint = getS3Endpoint()
-        String s3Region = getS3Region()
-        def load_label = "part_" + UUID.randomUUID().toString().replace("-", 
"0")
-        sql """
-            LOAD LABEL ${load_label} (
-                DATA INFILE("s3://${s3BucketName}/${path}")
-                INTO TABLE ${tbl}
-                COLUMNS TERMINATED BY ","
-                FORMAT AS "${fmt}"
-            )
-            WITH S3 (
-                "AWS_ACCESS_KEY" = "$ak",
-                "AWS_SECRET_KEY" = "$sk",
-                "AWS_ENDPOINT" = "${s3Endpoint}",
-                "AWS_REGION" = "${s3Region}"
-            )
-            PROPERTIES(
-                "load_parallelism" = "${paral}"
-            );
-        """
-        // Waiting for job finished or cancelled
-        def max_try_milli_secs = 600000
-        while (max_try_milli_secs > 0) {
-            String[][] result = sql """ show load where label="$load_label" 
order by createtime desc limit 1; """
-            if (result[0][2].equals("FINISHED")) {
-                logger.info("Load FINISHED " + load_label)
-                break;
-            }
-            if (result[0][2].equals("CANCELLED")) {
-                assertTrue(false, "load failed: $result")
-                break;
-            }
-            Thread.sleep(6000)
-            max_try_milli_secs -= 6000
-            if(max_try_milli_secs <= 0) {
-                assertTrue(1 == 2, "load Timeout: $load_label")
-            }
-        }
-    }
-
-    String enabled = context.config.otherConfigs.get("enableBrokerLoad")
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
-        def tableName = "paral_load"
-        try {
-            sql """drop table if exists ${tableName} force;"""
-            sql """
-                CREATE TABLE ${tableName} (
-                    id BIGINT NOT NULL,
-                    clientip VARCHAR(32),
-                    request VARCHAR(256),
-                    status INT,
-                    size INT
-                )
-                ENGINE=OLAP
-                DUPLICATE KEY(id)
-                DISTRIBUTED BY HASH(id) BUCKETS 3
-                PROPERTIES
-                (
-                    "replication_num" = "1"
-                );
-            """
-            // Parallelly load csv from S3
-            s3load_paral_wait.call(tableName, "CSV", 
"regression/load/data/test_load_parallelism.csv", 3)
-            qt_paral_load_csv """ select count(1) from ${tableName}; """
-
-            //Parallelly load json from S3
-            sql """truncate table ${tableName};"""
-            s3load_paral_wait.call(tableName, "JSON", 
"regression/load/data/test_load_parallelism.json", 3)
-            qt_paral_load_json """ select count(1) from ${tableName}; """
-        } finally {
-            sql """drop table if exists ${tableName} force;"""
-        }
-    }
-}
-
diff --git 
a/regression-test/suites/load_p0/broker_load/test_s3_load_with_load_parallelism.groovy
 
b/regression-test/suites/load_p2/broker_load/test_s3_load_with_load_parallelism.groovy
similarity index 87%
rename from 
regression-test/suites/load_p0/broker_load/test_s3_load_with_load_parallelism.groovy
rename to 
regression-test/suites/load_p2/broker_load/test_s3_load_with_load_parallelism.groovy
index b1d1782b501..c96a8948b1e 100644
--- 
a/regression-test/suites/load_p0/broker_load/test_s3_load_with_load_parallelism.groovy
+++ 
b/regression-test/suites/load_p2/broker_load/test_s3_load_with_load_parallelism.groovy
@@ -16,9 +16,10 @@
 // specific language governing permissions and limitations
 // under the License.
 
-suite("test_s3_load_with_load_parallelism", "load_p0") {
+suite("test_s3_load_with_load_parallelism", "p2") {
 
     def tableName = "test_load_parallelism"
+    def tableName1 = "paral_load"
 
     sql """ DROP TABLE IF EXISTS ${tableName} """
     sql """
@@ -38,19 +39,39 @@ suite("test_s3_load_with_load_parallelism", "load_p0") {
             PROPERTIES ( "replication_num" = "1" );
     """
 
+    sql """drop table if exists ${tableName1} force;"""
+    sql """
+        CREATE TABLE ${tableName1} (
+            id BIGINT NOT NULL,
+            clientip VARCHAR(32),
+            request VARCHAR(256),
+            status INT,
+            size INT
+        )
+        ENGINE=OLAP
+        DUPLICATE KEY(id)
+        DISTRIBUTED BY HASH(id) BUCKETS 3
+        PROPERTIES
+        (
+            "replication_num" = "1"
+        );
+    """
+
     def attributesList = [
 
     ]
 
-    // attributesList.add(new 
LoadAttributes("s3://doris-build-1308700295/regression/load/data/enclose_not_trim_quotes.csv",
-    //     "${tableName}", "", "COLUMNS TERMINATED BY \",\"", "FORMAT AS 
\"CSV\"", "(k1,k2,v1,v2,v3,v4)", 
-    //     "PROPERTIES (\"enclose\" = \"\\\"\", \"escape\" = 
\"\\\\\")").addProperties("trim_double_quotes", "false"))
-
     def bucket = getS3BucketName()
     
     attributesList.add(new 
LoadAttributes("s3://${bucket}/regression/segcompaction/segcompaction.orc",
         "${tableName}", "", "", "FORMAT AS \"ORC\"", "(col_0, col_1, col_2, 
col_3, col_4, col_5, col_6, col_7, col_8, col_9, col_10, col_11, col_12, 
col_13, col_14, col_15, col_16, col_17, col_18, col_19, col_20, col_21, col_22, 
col_23, col_24, col_25, col_26, col_27, col_28, col_29, col_30, col_31, col_32, 
col_33, col_34, col_35, col_36, col_37, col_38, col_39, col_40, col_41, col_42, 
col_43, col_44, col_45, col_46, col_47, col_48, col_49)", 
"").addProperties("load_parallelism", "3"))
 
+    attributesList.add(new 
LoadAttributes("s3://${bucket}/regression/load/data/test_load_parallelism.csv",
+        "${tableName1}", "", "COLUMNS TERMINATED BY \",\"", "FORMAT AS 
\"CSV\"", "", "").addProperties("load_parallelism", "3"))
+
+    attributesList.add(new 
LoadAttributes("s3://${bucket}/regression/load/data/test_load_parallelism.json",
+        "${tableName1}", "", "COLUMNS TERMINATED BY \",\"", "FORMAT AS 
\"JSON\"", "", "").addProperties("load_parallelism", "3"))
+
     def ak = getS3AK()
     def sk = getS3SK()
 
@@ -79,12 +100,8 @@ suite("test_s3_load_with_load_parallelism", "load_p0") {
            )
             ${prop}
             """
-     //   "AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com",
-    //   "AWS_ACCESS_KEY" = "AKIDd9RVMzIOI0V7Wlnbr9JG0WrhJk28zc2H",
-    //   "AWS_SECRET_KEY"="4uWxMhqnW3Plz97sPjqlSUXO1RhokRuO",
-    //   "AWS_REGION" = "ap-beijing"
  
-        def max_try_milli_secs = 600000
+        def max_try_milli_secs = 1200000
         while (max_try_milli_secs > 0) {
             String[][] result = sql """ show load where 
label="$attributes.label" order by createtime desc limit 1; """
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to