This is an automated email from the ASF dual-hosted git repository.

dataroaring pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
     new 9971c93605e [ci](branch-3.0) support run cloud_p* and performance 
(#38696)
9971c93605e is described below

commit 9971c93605eec2972116fa12a2cd9dcbf01d35c4
Author: Dongyang Li <[email protected]>
AuthorDate: Sun Aug 4 10:55:28 2024 +0800

    [ci](branch-3.0) support run cloud_p* and performance (#38696)
    
    ## Proposed changes
    
    Issue Number: close #xxx
    
    <!--Describe your changes.-->
    
    ---------
    
    Co-authored-by: stephen <[email protected]>
---
 regression-test/conf/regression-conf.groovy        | 16 ++--
 .../org/apache/doris/regression/Config.groovy      | 92 +++++++++++++++++++++-
 .../org/apache/doris/regression/suite/Suite.groovy |  2 +-
 .../cloud_p0/conf/regression-conf-custom.groovy    |  2 +-
 regression-test/pipeline/cloud_p0/deploy.sh        |  4 +
 regression-test/pipeline/cloud_p0/prepare.sh       | 26 +++---
 regression-test/pipeline/cloud_p0/run.sh           |  6 +-
 .../cloud_p1/conf/regression-conf-custom.groovy    |  2 +-
 regression-test/pipeline/common/doris-utils.sh     | 81 ++++++++++++++++++-
 .../pipeline/external/conf/regression-conf.groovy  |  5 +-
 .../pipeline/p0/conf/regression-conf.groovy        |  8 +-
 .../pipeline/p1/conf/regression-conf.groovy        |  5 +-
 regression-test/pipeline/performance/compile.sh    |  4 +-
 regression-test/pipeline/performance/deploy.sh     |  4 +
 regression-test/pipeline/performance/prepare.sh    |  8 +-
 .../spark_connector/spark_connector.groovy         |  3 +-
 ...test_export_table_with_materialized_view.groovy |  2 +-
 .../hive/test_trino_different_parquet_types.groovy | 20 ++---
 .../hive/test_trino_hive_orc.groovy                | 20 ++---
 .../hive/test_trino_hive_other.groovy              | 20 ++---
 .../hive/test_trino_hive_parquet.groovy            | 22 +++---
 .../hive/test_trino_hive_schema_evolution.groovy   | 23 +++---
 .../hive/test_trino_hive_serde_prop.groovy         | 22 +++---
 .../hive/test_trino_hive_tablesample_p0.groovy     | 20 ++---
 .../hive/test_trino_hive_tpch_sf1_orc.groovy       | 20 ++---
 .../hive/test_trino_hive_tpch_sf1_parquet.groovy   | 20 ++---
 .../test_trino_prepare_hive_data_in_case.groovy    | 22 +++---
 .../kafka/test_trino_kafka_base.groovy             | 23 +++---
 .../trino_connector/test_plugins_download.groovy   | 21 ++---
 .../suites/github_events_p2/load.groovy            |  2 +-
 .../suites/variant_log_data_p2/load.groovy         |  6 +-
 regression-test/suites/variant_p0/load.groovy      |  4 +-
 32 files changed, 357 insertions(+), 178 deletions(-)

diff --git a/regression-test/conf/regression-conf.groovy 
b/regression-test/conf/regression-conf.groovy
index 527b0231394..08d03632c37 100644
--- a/regression-test/conf/regression-conf.groovy
+++ b/regression-test/conf/regression-conf.groovy
@@ -104,8 +104,16 @@ brokerName = "broker_name"
 
 // broker load test config
 enableBrokerLoad=true
-ak=""
-sk=""
+
+// for s3 releated cases, "aliyun" or "aliyun-internal" or "tencent" or 
"huawei" or "azure" or "gcp"
+// if s3Source is set,  s3Endpoint s3BucketName s3Region s3Provider will be 
filled with default value if not set
+s3Source="aliyun"
+// s3Endpoint = ""
+// s3BucketName = ""
+// s3Region = ""
+// s3Provider = ""
+ak="***********"
+sk="***********"
 
 // jdbc connector test config
 // To enable jdbc test, you need first start mysql/pg container.
@@ -194,10 +202,6 @@ aliYunSk="***********"
 hwYunAk="***********"
 hwYunSk="***********"
 
-s3Endpoint = "cos.ap-hongkong.myqcloud.com"
-s3BucketName = "doris-build-hk-1308700295"
-s3Region = "ap-hongkong"
-
 //arrow flight sql test config
 extArrowFlightSqlHost = "127.0.0.1"
 extArrowFlightSqlPort = 8080
diff --git 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/Config.groovy
 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/Config.groovy
index b901095eac1..b7c3090e0b8 100644
--- 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/Config.groovy
+++ 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/Config.groovy
@@ -144,9 +144,12 @@ class Config {
     public String kafkaBrokerList
     public String cloudVersion
 
+    public String s3Source
+
     Config() {}
 
     Config(
+            String s3Source,
             String caseNamePrefix,
             String defaultDb, 
             String jdbcUrl, 
@@ -199,6 +202,7 @@ class Config {
             String clusterDir, 
             String kafkaBrokerList, 
             String cloudVersion) {
+        this.s3Source = s3Source
         this.caseNamePrefix = caseNamePrefix
         this.defaultDb = defaultDb
         this.jdbcUrl = jdbcUrl
@@ -449,7 +453,6 @@ class Config {
         }
         log.info("recycleAddr : $config.recycleServiceHttpAddress, socketAddr 
: $config.recycleServiceHttpInetSocketAddress")
 
-
         config.defaultDb = cmd.getOptionValue(defaultDbOpt, config.defaultDb)
         config.jdbcUrl = cmd.getOptionValue(jdbcOpt, config.jdbcUrl)
         config.jdbcUser = cmd.getOptionValue(userOpt, config.jdbcUser)
@@ -478,6 +481,16 @@ class Config {
         log.info("withOutLoadData is ${config.withOutLoadData}".toString())
         log.info("caseNamePrefix is ${config.caseNamePrefix}".toString())
         log.info("dryRun is ${config.dryRun}".toString())
+        def s3SourceList = ["aliyun", "aliyun-internal", "tencent", "huawei", 
"azure", "gcp"]
+        if (s3SourceList.contains(config.s3Source)) {
+            log.info("s3Source is ${config.s3Source}".toString())
+            log.info("s3Provider is 
${config.otherConfigs.get("s3Provider")}".toString())
+            log.info("s3BucketName is 
${config.otherConfigs.get("s3BucketName")}".toString())
+            log.info("s3Region is 
${config.otherConfigs.get("s3Region")}".toString())
+            log.info("s3Endpoint is 
${config.otherConfigs.get("s3Endpoint")}".toString())
+        } else {
+            throw new Exception("The s3Source '${config.s3Source}' is invalid, 
optional values ${s3SourceList}")
+        }
 
         Properties props = cmd.getOptionProperties("conf")
         config.otherConfigs.putAll(props)
@@ -490,6 +503,7 @@ class Config {
 
     static Config fromConfigObject(ConfigObject obj) {
         def config = new Config(
+            configToString(obj.s3Source),
             configToString(obj.caseNamePrefix),
             configToString(obj.defaultDb),
             configToString(obj.jdbcUrl),
@@ -602,6 +616,82 @@ class Config {
     }
 
     static void fillDefaultConfig(Config config) {
+        if (config.s3Source == null) {
+            config.s3Source = "aliyun"
+            log.info("Set s3Source to 'aliyun' because not 
specify.".toString())
+        }
+
+        if (config.otherConfigs.get("s3Provider") == null) {
+            def s3Provider = "OSS"
+            if (config.s3Source == "aliyun" || config.s3Source == 
"aliyun-internal") {
+                s3Provider = "OSS"
+            } else if (config.s3Source == "tencent") {
+                s3Provider = "COS"
+            } else if (config.s3Source == "huawei") {
+                s3Provider = "OBS"
+            } else if (config.s3Source == "azure") {
+                s3Provider = "AZURE"
+            } else if (config.s3Source == "gcp") {
+                s3Provider = "GCP"
+            }
+            config.otherConfigs.put("s3Provider", "${s3Provider}")
+            log.info("Set s3Provider to '${s3Provider}' because not 
specify.".toString())
+        }
+        if (config.otherConfigs.get("s3BucketName") == null) {
+            def s3BucketName = "doris-regression-hk"
+            if (config.s3Source == "aliyun") {
+                s3BucketName = "doris-regression-hk"
+            } else if (config.s3Source == "aliyun-internal") {
+                s3BucketName = "doris-regression"
+            } else if (config.s3Source == "tencent") {
+                s3BucketName = "doris-build-1308700295"
+            } else if (config.s3Source == "huawei") {
+                s3BucketName = "doris-build"
+            } else if (config.s3Source == "azure") {
+                s3BucketName = "qa-build"
+            } else if (config.s3Source == "gcp") {
+                s3BucketName = "doris-regression"
+            }
+            config.otherConfigs.put("s3BucketName", "${s3BucketName}")
+            log.info("Set s3BucketName to '${s3BucketName}' because not 
specify.".toString())
+        }
+        if (config.otherConfigs.get("s3Region") == null) {
+            def s3Region = "oss-cn-hongkong"
+            if (config.s3Source == "aliyun") {
+                s3Region = "oss-cn-hongkong"
+            } else if (config.s3Source == "aliyun-internal") {
+                s3Region = "oss-cn-beijing"
+            } else if (config.s3Source == "tencent") {
+                s3Region = "ap-beijing"
+            } else if (config.s3Source == "huawei") {
+                s3Region = "cn-north-4"
+            } else if (config.s3Source == "azure") {
+                s3Region = "azure-region"
+            } else if (config.s3Source == "gcp") {
+                s3Region = "us-central1"
+            }
+            config.otherConfigs.put("s3Region", "${s3Region}")
+            log.info("Set s3Region to '${s3Region}' because not 
specify.".toString())
+        }
+        if (config.otherConfigs.get("s3Endpoint") == null) {
+            def s3Endpoint = "oss-cn-hongkong.aliyuncs.com"
+            if (config.s3Source == "aliyun") {
+                s3Endpoint = "oss-cn-hongkong.aliyuncs.com"
+            } else if (config.s3Source == "aliyun-internal") {
+                s3Endpoint = "oss-cn-beijing-internal.aliyuncs.com"
+            } else if (config.s3Source == "tencent") {
+                s3Endpoint = "cos.ap-beijing.myqcloud.com"
+            } else if (config.s3Source == "huawei") {
+                s3Endpoint = "obs.cn-north-4.myhuaweicloud.com"
+            } else if (config.s3Source == "azure") {
+                s3Endpoint = "azure-endpoint"
+            } else if (config.s3Source == "gcp") {
+                s3Endpoint = "storage.googleapis.com"
+            }
+            config.otherConfigs.put("s3Endpoint", "${s3Endpoint}")
+            log.info("Set s3Endpoint to '${s3Endpoint}' because not 
specify.".toString())
+        }
+
         if (config.caseNamePrefix == null) {
             config.caseNamePrefix = ""
             log.info("set caseNamePrefix to '' because not 
specify.".toString())
diff --git 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
index 4d5127b8644..2cd27b0968d 100644
--- 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
+++ 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
@@ -877,7 +877,7 @@ class Suite implements GroovyInterceptable {
                 if (exitcode != 0) {
                     staticLogger.info("exit code: ${exitcode}, output\n: 
${proc.text}")
                     if (mustSuc == true) {
-                       Assert.assertEquals(0, exitCode)
+                       Assert.assertEquals(0, exitcode)
                     }
                 }
             } catch (IOException e) {
diff --git 
a/regression-test/pipeline/cloud_p0/conf/regression-conf-custom.groovy 
b/regression-test/pipeline/cloud_p0/conf/regression-conf-custom.groovy
index 9127189e28c..a19abc3367b 100644
--- a/regression-test/pipeline/cloud_p0/conf/regression-conf-custom.groovy
+++ b/regression-test/pipeline/cloud_p0/conf/regression-conf-custom.groovy
@@ -76,4 +76,4 @@ enableKafkaTest=true
 // trino-connector catalog test config
 enableTrinoConnectorTest = false
 
-s3Provider = "COS"
+s3Source = "aliyun"
diff --git a/regression-test/pipeline/cloud_p0/deploy.sh 
b/regression-test/pipeline/cloud_p0/deploy.sh
index c65de2b39c6..5a5186b16ee 100644
--- a/regression-test/pipeline/cloud_p0/deploy.sh
+++ b/regression-test/pipeline/cloud_p0/deploy.sh
@@ -76,8 +76,12 @@ exit_flag=0
     if ! create_warehouse; then exit 1; fi
     if ! warehouse_add_fe; then exit 1; fi
     if ! warehouse_add_be; then exit 1; fi
+    if ! prepare_java_udf; then exit 1; fi
     if ! start_doris_fe; then exit 1; fi
     if ! start_doris_be; then exit 1; fi
+    if ! deploy_doris_sql_converter; then exit 1; else
+        set_session_variable sql_converter_service_url 
"http://127.0.0.1:${doris_sql_converter_port:-5001}/api/v1/convert";
+    fi
     if ! check_doris_ready; then exit 1; fi
 
     echo "#### 5. set session variables"
diff --git a/regression-test/pipeline/cloud_p0/prepare.sh 
b/regression-test/pipeline/cloud_p0/prepare.sh
index 89bfa3150bc..ada520a95ed 100644
--- a/regression-test/pipeline/cloud_p0/prepare.sh
+++ b/regression-test/pipeline/cloud_p0/prepare.sh
@@ -47,7 +47,7 @@ if [[ -z "${pr_num_from_trigger}" ]]; then echo "ERROR: env 
pr_num_from_trigger
 if [[ -z "${commit_id_from_trigger}" ]]; then echo "ERROR: env 
commit_id_from_trigger not set" && exit 1; fi
 if [[ -z "${commit_id_from_checkout}" ]]; then echo "ERROR: env 
commit_id_from_checkout not set" && exit 1; fi
 if [[ -z "${target_branch}" ]]; then echo "ERROR: env target_branch not set" 
&& exit 1; fi
-if [[ -z "${cos_ak}" || -z "${cos_sk}" ]]; then echo "ERROR: env cos_ak or 
cos_sk not set" && exit 1; fi
+if [[ -z "${s3SourceAk}" || -z "${s3SourceSk}" ]]; then echo "ERROR: env 
s3SourceAk or s3SourceSk not set" && exit 1; fi
 if [[ -z "${oss_ak}" || -z "${oss_sk}" ]]; then echo "ERROR: env oss_ak or 
oss_sk not set." && exit 1; fi
 
 echo "#### 1. check if need run"
@@ -67,11 +67,11 @@ fi
 # shellcheck source=/dev/null
 source "$(bash 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
 'get')"
 if ${skip_pipeline:=false}; then echo "INFO: skip build pipline" && exit 0; 
else echo "INFO: no skip"; fi
-if [[ "${target_branch}" == "master" ]]; then
+if [[ "${target_branch}" == "master" || "${target_branch}" == "branch-3.0" ]]; 
then
     echo "INFO: PR target branch ${target_branch}"
     install_java
 else
-    echo "WARNING: PR target branch ${target_branch} is NOT in (master), skip 
pipeline."
+    echo "WARNING: PR target branch ${target_branch} is NOT in (master, 
branch-3.0), skip pipeline."
     bash 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
 'set' "export skip_pipeline=true"
     exit 0
 fi
@@ -106,11 +106,11 @@ install_fdb
 clean_fdb "cloud_instance_0"
 
 echo "#### 5. check if binary package ready"
-merge_pr_to_master_commit() {
+merge_pr_to_target_branch_compiled_commit() {
     local pr_num_from_trigger="$1"
     local target_branch="$2"
-    local master_commit="$3"
-    echo "INFO: merge pull request into ${target_branch} ${master_commit}"
+    local target_branch_compiled_commit="$3"
+    echo "INFO: merge pull request into ${target_branch} 
${target_branch_compiled_commit}"
     if [[ -z "${teamcity_build_checkoutDir}" ]]; then
         echo "ERROR: env teamcity_build_checkoutDir not set" && return 1
     fi
@@ -119,10 +119,10 @@ merge_pr_to_master_commit() {
     git fetch origin "${target_branch}"
     git checkout "${target_branch}"
     git reset --hard origin/"${target_branch}"
-    git checkout "${master_commit}"
+    git checkout "${target_branch_compiled_commit}"
     returnValue=$?
     if [[ ${returnValue} -ne 0 ]]; then
-        echo "ERROR: checkout ${target_branch} ${master_commit} failed. please 
rebase to the newest version."
+        echo "ERROR: checkout ${target_branch} 
${target_branch_compiled_commit} failed. please rebase to the newest version."
         return 1
     fi
     git rev-parse HEAD
@@ -131,7 +131,7 @@ merge_pr_to_master_commit() {
     echo "git fetch origin refs/pull/${pr_num_from_trigger}/head"
     git fetch origin "refs/pull/${pr_num_from_trigger}/head"
     git merge --no-edit --allow-unrelated-histories FETCH_HEAD
-    echo "INFO: merge refs/pull/${pr_num_from_trigger}/head into 
${target_branch} ${master_commit}"
+    echo "INFO: merge refs/pull/${pr_num_from_trigger}/head into 
${target_branch} ${target_branch_compiled_commit}"
     # CONFLICTS=$(git ls-files -u | wc -l)
     if [[ $(git ls-files -u | wc -l) -gt 0 ]]; then
         echo "ERROR: merge refs/pull/${pr_num_from_trigger}/head into  failed. 
Aborting"
@@ -144,11 +144,11 @@ if ! check_oss_file_exist 
"${pr_num_from_trigger}_${commit_id_from_trigger}.tar.
 if download_oss_file 
"${pr_num_from_trigger}_${commit_id_from_trigger}.tar.gz"; then
     rm -rf "${teamcity_build_checkoutDir}"/output
     tar -I pigz -xf "${pr_num_from_trigger}_${commit_id_from_trigger}.tar.gz"
-    master_commit_file="master.commit"
-    if [[ -e output/${master_commit_file} ]]; then
+    target_branch_compiled_commit_file="master.commit"
+    if [[ -e output/${target_branch_compiled_commit_file} ]]; then
         # checkout to master commit and merge this pr, to ensure binary and 
case are same version
-        master_commit=$(cat output/"${master_commit_file}")
-        if merge_pr_to_master_commit "${pr_num_from_trigger}" 
"${target_branch}" "${master_commit}"; then
+        target_branch_compiled_commit=$(cat 
output/"${target_branch_compiled_commit_file}")
+        if merge_pr_to_target_branch_compiled_commit "${pr_num_from_trigger}" 
"${target_branch}" "${target_branch_compiled_commit}"; then
             echo "INFO: merged done"
             if [[ "${teamcity_buildType_id:-}" == 
"Doris_DorisCloudRegression_CloudP1" ]]; then
                 echo "INFO: 用cloud_p1/conf覆盖cloud_p0/conf"
diff --git a/regression-test/pipeline/cloud_p0/run.sh 
b/regression-test/pipeline/cloud_p0/run.sh
index 3c126460b56..2079d182ef3 100644
--- a/regression-test/pipeline/cloud_p0/run.sh
+++ b/regression-test/pipeline/cloud_p0/run.sh
@@ -33,7 +33,7 @@ echo "#### Check env"
 if [[ -z "${teamcity_build_checkoutDir}" ]]; then echo "ERROR: env 
teamcity_build_checkoutDir not set" && exit 1; fi
 if [[ -z "${pr_num_from_trigger}" ]]; then echo "ERROR: env 
pr_num_from_trigger not set" && exit 1; fi
 if [[ -z "${commit_id_from_trigger}" ]]; then echo "ERROR: env 
commit_id_from_trigger not set" && exit 1; fi
-if [[ -z "${cos_ak}" || -z "${cos_sk}" ]]; then echo "ERROR: env cos_ak or 
cos_sk not set" && exit 1; fi
+if [[ -z "${s3SourceAk}" || -z "${s3SourceSk}" ]]; then echo "ERROR: env 
s3SourceAk or s3SourceSk not set" && exit 1; fi
 
 # shellcheck source=/dev/null
 source "$(bash 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
 'get')"
@@ -53,8 +53,8 @@ run() {
     cd "${teamcity_build_checkoutDir}" || return 1
     {
         echo # add a new line to prevent two config items from being combined, 
which will cause the error "No signature of method"
-        echo "ak='${cos_ak}'"
-        echo "sk='${cos_sk}'"
+        echo "ak='${s3SourceAk}'"
+        echo "sk='${s3SourceSk}'"
     } 
>>"${teamcity_build_checkoutDir}"/regression-test/pipeline/cloud_p0/conf/regression-conf-custom.groovy
     cp -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/cloud_p0/conf/regression-conf-custom.groovy
 \
         "${teamcity_build_checkoutDir}"/regression-test/conf/
diff --git 
a/regression-test/pipeline/cloud_p1/conf/regression-conf-custom.groovy 
b/regression-test/pipeline/cloud_p1/conf/regression-conf-custom.groovy
index 42a18b7f22e..2662090b401 100644
--- a/regression-test/pipeline/cloud_p1/conf/regression-conf-custom.groovy
+++ b/regression-test/pipeline/cloud_p1/conf/regression-conf-custom.groovy
@@ -21,4 +21,4 @@ excludeDirectories = "000_the_start_sentinel_do_not_touch," + 
// keep this line
 
 max_failure_num = 50
 
-s3Provider = "COS"
+s3Source = "aliyun"
diff --git a/regression-test/pipeline/common/doris-utils.sh 
b/regression-test/pipeline/common/doris-utils.sh
index 2f436e95846..5ece3d872d6 100644
--- a/regression-test/pipeline/common/doris-utils.sh
+++ b/regression-test/pipeline/common/doris-utils.sh
@@ -94,13 +94,30 @@ function install_java() {
         [[ -z "$(find /usr/lib/jvm -maxdepth 1 -type d -name 'java-8-*')" ]]; 
then
         sudo apt update && sudo apt install openjdk-8-jdk -y >/dev/null
     fi
-    # doris master branch use java-17
+    # doris master and branch-3.0 use java-17
     if ! java -version >/dev/null ||
         [[ -z "$(find /usr/lib/jvm -maxdepth 1 -type d -name 'java-17-*')" ]]; 
then
         sudo apt update && sudo apt install openjdk-17-jdk -y >/dev/null
     fi
 }
 
+install_maven() {
+    if ! mvn -v >/dev/null; then
+        sudo apt update && sudo apt install maven -y >/dev/null
+        PATH="/usr/share/maven/bin:${PATH}"
+        export PATH
+    fi
+    if ! mvn -v >/dev/null; then
+        wget -c -t3 -q 
"${MAVEN_DOWNLOAD_URL:-https://dlcdn.apache.org/maven/maven-3/3.9.8/binaries/apache-maven-3.9.8-bin.tar.gz}";
+        tar -xf apache-maven-3.9.8-bin.tar.gz -C /usr/share/
+        PATH="/usr/share/apache-maven-3.9.8/bin:${PATH}"
+        export PATH
+    fi
+    if ! mvn -v >/dev/null; then
+        echo "ERROR: install maven failed" && return 1
+    fi
+}
+
 function start_doris_fe() {
     if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
     if install_java && [[ -z "${JAVA_HOME}" ]]; then
@@ -231,6 +248,40 @@ function install_fdb() {
     fi
 }
 
+deploy_doris_sql_converter() {
+    # https://doris.apache.org/zh-CN/docs/dev/lakehouse/sql-dialect/
+    if ${DEBUG:-false}; then
+        
download_url="https://selectdb-doris.oss-cn-beijing.aliyuncs.com/doris-sql-convertor/doris-sql-convertor-1.0.6-bin-x86.tar.gz";
+    else
+        download_url="${doris_sql_converter_download_url}"
+    fi
+    if [[ -z "${doris_sql_converter_download_url}" ]]; then
+        echo "INFO: doris_sql_converter_download_url not set, skip download 
doris-sql-converter." && return 0
+    fi
+    if wget -c -t3 -q "${download_url}"; then
+        download_file_name="$(basename "${download_url}")"
+        extract_dir_name="doris_sql_converter"
+        mkdir -p "${extract_dir_name}"
+        tar -xf "${download_file_name}" --strip-components 1 -C 
"${extract_dir_name}"
+        if [[ ! -f "${extract_dir_name}"/conf/config.conf ]]; then
+            echo "ERROR: miss file ${extract_dir_name}/conf/config.conf" && 
return 1
+        fi
+        doris_sql_converter_port="${doris_sql_converter_port:-5001}"
+        sed -i "/port=.*/d" "${extract_dir_name}"/conf/config.conf
+        echo "port=${doris_sql_converter_port}" 
>>"${extract_dir_name}"/conf/config.conf
+        echo "INFO: changed doris-sql-converter port to 
${doris_sql_converter_port}"
+        if bash "${extract_dir_name}"/bin/stop.sh && fuser -k 5002/tcp; then 
echo; fi
+        if bash "${extract_dir_name}"/bin/start.sh &&
+            sleep 2s && lsof -i:"${doris_sql_converter_port}"; then
+            echo "INFO: doris-sql-converter start success."
+        else
+            echo "ERROR: doris-sql-converter start failed." && return 1
+        fi
+    else
+        echo "ERROR: download doris-sql-converter ${download_url} failed." && 
return 1
+    fi
+}
+
 function restart_doris() {
     if stop_doris; then echo; fi
     if ! start_doris_fe; then return 1; fi
@@ -415,7 +466,7 @@ set_session_variable() {
     if [[ -z "${v}" ]]; then return 1; fi
     query_port=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf 
query_port)
     cl="mysql -h127.0.0.1 -P${query_port} -uroot "
-    if ${cl} -e"set global ${k}=${v};"; then
+    if ${cl} -e"set global ${k}='${v}';"; then
         if [[ "$(get_session_variable "${k}" | tr '[:upper:]' '[:lower:]')" == 
"${v}" ]]; then
             echo "INFO:      set global ${k}=${v};"
         else
@@ -729,6 +780,32 @@ function check_if_need_gcore() {
     fi
 }
 
+prepare_java_udf() {
+    if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
+    # custom_lib相关的case需要在fe启动前把编译好的jar放到 $DORIS_HOME/fe/custom_lib/
+    install_java
+    install_maven
+    OLD_JAVA_HOME=${JAVA_HOME}
+    JAVA_HOME="$(find /usr/lib/jvm -maxdepth 1 -type d -name 'java-8-*' | sed 
-n '1p')"
+    export JAVA_HOME
+    if bash "${DORIS_HOME}"/../run-regression-test.sh --clean &&
+        bash "${DORIS_HOME}"/../run-regression-test.sh --compile; then
+        echo
+    else
+        echo "ERROR: failed to compile java udf"
+    fi
+    JAVA_HOME=${OLD_JAVA_HOME}
+    export JAVA_HOME
+
+    if ls "${DORIS_HOME}"/fe/custom_lib/*.jar &&
+        ls "${DORIS_HOME}"/be/custom_lib/*.jar; then
+        echo "INFO: java udf prepared."
+    else
+        echo "ERROR: failed to prepare java udf"
+        return 1
+    fi
+}
+
 function print_running_pipeline_tasks() {
     webserver_port=$(get_doris_conf_value "${DORIS_HOME}"/be/conf/be.conf 
webserver_port)
     mkdir -p "${DORIS_HOME}"/be/log/
diff --git a/regression-test/pipeline/external/conf/regression-conf.groovy 
b/regression-test/pipeline/external/conf/regression-conf.groovy
index 28956568e58..93965b84219 100644
--- a/regression-test/pipeline/external/conf/regression-conf.groovy
+++ b/regression-test/pipeline/external/conf/regression-conf.groovy
@@ -138,10 +138,7 @@ es_8_port=39200
 
 cacheDataPath = "/data/regression/"
 
-s3Endpoint = "cos.ap-hongkong.myqcloud.com"
-s3BucketName = "doris-build-hk-1308700295"
-s3Region = "ap-hongkong"
-s3Provider = "COS"
+s3Source="aliyun"
 
 max_failure_num=50
 
diff --git a/regression-test/pipeline/p0/conf/regression-conf.groovy 
b/regression-test/pipeline/p0/conf/regression-conf.groovy
index c30dd20481c..9472ed50cbf 100644
--- a/regression-test/pipeline/p0/conf/regression-conf.groovy
+++ b/regression-test/pipeline/p0/conf/regression-conf.groovy
@@ -80,8 +80,7 @@ excludeDirectories = "000_the_start_sentinel_do_not_touch," + 
// keep this line
     "cloud_p0," +
     "nereids_rules_p0/subquery," +
     "workload_manager_p1," +
-    "zzz_the_end_sentinel_do_not_touch," +
-    "dialect_compatible"// keep this line as the last line
+    "zzz_the_end_sentinel_do_not_touch"// keep this line as the last line
 
 customConf1 = "test_custom_conf_value"
 
@@ -137,10 +136,7 @@ es_8_port=39200
 
 cacheDataPath = "/data/regression/"
 
-s3Endpoint = "cos.ap-hongkong.myqcloud.com"
-s3BucketName = "doris-build-hk-1308700295"
-s3Region = "ap-hongkong"
-s3Provider = "COS"
+s3Source="aliyun"
 
 //arrow flight sql test config
 extArrowFlightSqlHost = "127.0.0.1"
diff --git a/regression-test/pipeline/p1/conf/regression-conf.groovy 
b/regression-test/pipeline/p1/conf/regression-conf.groovy
index d4ecd55d38f..49f52c1fa32 100644
--- a/regression-test/pipeline/p1/conf/regression-conf.groovy
+++ b/regression-test/pipeline/p1/conf/regression-conf.groovy
@@ -70,10 +70,7 @@ excludeDirectories = "000_the_start_sentinel_do_not_touch," 
+ // keep this line
 
 cacheDataPath="/data/regression/"
 
-s3Endpoint = "cos.ap-hongkong.myqcloud.com"
-s3BucketName = "doris-build-hk-1308700295"
-s3Region = "ap-hongkong"
-s3Provider = "COS"
+s3Source="aliyun"
 
 max_failure_num=0
 
diff --git a/regression-test/pipeline/performance/compile.sh 
b/regression-test/pipeline/performance/compile.sh
index 3a668cd3fdf..34f49db0c10 100644
--- a/regression-test/pipeline/performance/compile.sh
+++ b/regression-test/pipeline/performance/compile.sh
@@ -76,7 +76,7 @@ merge_pr_to_target_branch_latest() {
     fi
 }
 
-if [[ "${target_branch}" == "master" ]]; then
+if [[ "${target_branch}" == "master" || "${target_branch}" == "branch-3.0" ]]; 
then
     REMOTE_CCACHE='/mnt/remote_ccache_master'
     docker_image="apache/doris:build-env-ldb-toolchain-0.19-latest"
 elif [[ "${target_branch}" == "branch-2.0" ]]; then
@@ -109,7 +109,7 @@ if sudo docker ps -a --no-trunc | grep "${docker_name}"; 
then
 fi
 rm -f custom_env.sh
 cp 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/conf/custom_env.sh
 .
-if [[ "${target_branch}" == "master" ]]; then
+if [[ "${target_branch}" == "master" || "${target_branch}" == "branch-3.0" ]]; 
then
     echo "export JAVA_HOME=/usr/lib/jvm/jdk-17.0.2" >>custom_env.sh
 fi
 rm -rf "${teamcity_build_checkoutDir}"/output
diff --git a/regression-test/pipeline/performance/deploy.sh 
b/regression-test/pipeline/performance/deploy.sh
index 9f29fd8d256..36870058641 100644
--- a/regression-test/pipeline/performance/deploy.sh
+++ b/regression-test/pipeline/performance/deploy.sh
@@ -68,6 +68,10 @@ exit_flag=0
     cp -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/conf/fe_custom.conf
 "${DORIS_HOME}"/fe/conf/
     cp -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/conf/be_custom.conf
 "${DORIS_HOME}"/be/conf/
     target_branch="$(echo "${target_branch}" | sed 's| ||g;s|\.||g;s|-||g')" # 
remove space、dot、hyphen from branch name
+    if [[ "${target_branch}" == "branch30" ]]; then
+        # branch-3.0 also use master data
+        target_branch="master"
+    fi
     sed -i 
"s|^meta_dir=/data/doris-meta-\${branch_name}|meta_dir=/data/doris-meta-${target_branch}${meta_changed_suffix:-}|g"
 "${DORIS_HOME}"/fe/conf/fe_custom.conf
     sed -i 
"s|^storage_root_path=/data/doris-storage-\${branch_name}|storage_root_path=/data/doris-storage-${target_branch}${meta_changed_suffix:-}|g"
 "${DORIS_HOME}"/be/conf/be_custom.conf
 
diff --git a/regression-test/pipeline/performance/prepare.sh 
b/regression-test/pipeline/performance/prepare.sh
index 19a3ae58ae9..635b4510eba 100644
--- a/regression-test/pipeline/performance/prepare.sh
+++ b/regression-test/pipeline/performance/prepare.sh
@@ -74,7 +74,7 @@ source "$(bash 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/g
 source 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/doris-utils.sh
 
 if ${skip_pipeline:=false}; then echo "INFO: skip build pipline" && exit 0; 
else echo "INFO: no skip"; fi
-if [[ "${target_branch}" == "master" ]]; then
+if [[ "${target_branch}" == "master" || "${target_branch}" == "branch-3.0" ]]; 
then
     echo "INFO: PR target branch ${target_branch}"
     install_java
     JAVA_HOME="${JAVA_HOME:-$(find /usr/lib/jvm -maxdepth 1 -type d -name 
'java-17-*' | sed -n '1p')}"
@@ -82,7 +82,7 @@ if [[ "${target_branch}" == "master" ]]; then
 elif [[ "${target_branch}" == "branch-2.0" ]]; then
     echo "INFO: PR target branch ${target_branch}"
 else
-    echo "WARNING: PR target branch ${target_branch} is NOT in (master, 
branch-2.0), skip pipeline."
+    echo "WARNING: PR target branch ${target_branch} is NOT in (master, 
branch-3.0, branch-2.0), skip pipeline."
     bash 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
 'set' "export skip_pipeline=true"
     exit 0
 fi
@@ -98,6 +98,10 @@ if _get_pr_changed_files "${pr_num_from_trigger}"; then
         # if PR changed the doris meta file, the next PR deployment on the 
same mechine which built this PR will fail.
         # make a copy of the meta file for the meta changed PR.
         target_branch="$(echo "${target_branch}" | sed 's| 
||g;s|\.||g;s|-||g')" # remove space、dot、hyphen from branch name
+        if [[ "${target_branch}" == "branch30" ]]; then
+            # branch-3.0 also use master data
+            target_branch="master"
+        fi
         meta_changed_suffix="_2"
         rsync -a --delete "/data/doris-meta-${target_branch}/" 
"/data/doris-meta-${target_branch}${meta_changed_suffix}"
         rsync -a --delete "/data/doris-storage-${target_branch}/" 
"/data/doris-storage-${target_branch}${meta_changed_suffix}"
diff --git 
a/regression-test/suites/connector_p0/spark_connector/spark_connector.groovy 
b/regression-test/suites/connector_p0/spark_connector/spark_connector.groovy
index 2bd618fcc3c..06699d7c8ff 100644
--- a/regression-test/suites/connector_p0/spark_connector/spark_connector.groovy
+++ b/regression-test/suites/connector_p0/spark_connector/spark_connector.groovy
@@ -25,7 +25,8 @@ suite("spark_connector", "connector") {
     logger.info("start download spark doris demo ...")
     logger.info("getS3Url ==== ${getS3Url()}")
     def download_spark_jar = "/usr/bin/curl 
${getS3Url()}/regression/spark-doris-connector-demo-jar-with-dependencies.jar 
--output spark-doris-demo.jar".execute().getText()
-    logger.info("finish download spark doris demo ...")
+    def out = "/usr/bin/ls -al spark-doris-demo.jar".execute().getText()
+    logger.info("finish download spark doris demo, out: ${out}")
     def run_cmd = "java -jar spark-doris-demo.jar 
$context.config.feHttpAddress $context.config.feHttpUser 
regression_test_connector_p0_spark_connector.$tableName"
     logger.info("run_cmd : $run_cmd")
     def proc = run_cmd.execute()
diff --git 
a/regression-test/suites/export_p0/test_export_table_with_materialized_view.groovy
 
b/regression-test/suites/export_p0/test_export_table_with_materialized_view.groovy
index bc51b99a991..adba998b16f 100644
--- 
a/regression-test/suites/export_p0/test_export_table_with_materialized_view.groovy
+++ 
b/regression-test/suites/export_p0/test_export_table_with_materialized_view.groovy
@@ -114,7 +114,7 @@ suite("test_export_table_with_materialized_view", "p0") {
         def outfile_url = waiting_export.call(label)
 
         qt_select_load1 """ select * from s3(
-                "uri" = "http://${s3_endpoint}${outfile_url.substring(4, 
outfile_url.length() - 1)}0.parquet",
+                "uri" = 
"http://${bucket}.${s3_endpoint}${outfile_url.substring(5+bucket.length(), 
outfile_url.length() - 1)}0.parquet",
                 "s3.access_key"= "${ak}",
                 "s3.secret_key" = "${sk}",
                 "format" = "parquet",
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_different_parquet_types.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_different_parquet_types.groovy
index 4a450b99901..fbf60ed2497 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_different_parquet_types.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_different_parquet_types.groovy
@@ -16,16 +16,6 @@
 // under the License.
 
 suite("test_trino_different_parquet_types", 
"p0,external,hive,external_docker,external_docker_hive") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
 
     String hms_port = context.config.otherConfigs.get("hive2HmsPort")
     String hdfs_port = context.config.otherConfigs.get("hive2HdfsPort")
@@ -185,6 +175,16 @@ suite("test_trino_different_parquet_types", 
"p0,external,hive,external_docker,ex
 
     String enabled = context.config.otherConfigs.get("enableHiveTest")
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
         try {
             String catalog_name = "test_trino_different_parquet_types"
             sql """drop catalog if exists ${catalog_name}"""
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_orc.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_orc.groovy
index 89255cfbc56..d4cbcbe409b 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_orc.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_orc.groovy
@@ -16,16 +16,6 @@
 // under the License.
 
 suite("test_trino_hive_orc", 
"all_types,external,hive,external_docker,external_docker_hive") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
 
     // Ensure that all types are parsed correctly
     def select_top50 = {
@@ -88,6 +78,16 @@ suite("test_trino_hive_orc", 
"all_types,external,hive,external_docker,external_d
 
     String enabled = context.config.otherConfigs.get("enableHiveTest")
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
         try {
             String hms_port = context.config.otherConfigs.get("hive2HmsPort")
             String catalog_name = "test_trino_hive_orc"
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_other.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_other.groovy
index 9d3430d1ad0..6d410b2cb9a 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_other.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_other.groovy
@@ -16,16 +16,6 @@
 // under the License.
 
 suite("test_trino_hive_other", 
"external,hive,external_docker,external_docker_hive") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
 
     def q01 = {
         qt_q24 """ select name, count(1) as c from student group by name order 
by name desc;"""
@@ -62,6 +52,16 @@ suite("test_trino_hive_other", 
"external,hive,external_docker,external_docker_hi
 
     String enabled = context.config.otherConfigs.get("enableHiveTest")
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
         String hms_port = context.config.otherConfigs.get("hive2HmsPort")
         String hdfs_port = context.config.otherConfigs.get("hive2HdfsPort")
         String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_parquet.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_parquet.groovy
index 658b342dff0..748ac02b651 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_parquet.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_parquet.groovy
@@ -16,17 +16,7 @@
 // under the License.
 
 suite("test_trino_hive_parquet", 
"p0,external,hive,external_docker,external_docker_hive") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
-    
+
     def q01 = {
         qt_q01 """
         select * from partition_table order by l_orderkey, l_partkey, 
l_suppkey;
@@ -182,6 +172,16 @@ suite("test_trino_hive_parquet", 
"p0,external,hive,external_docker,external_dock
 
     String enabled = context.config.otherConfigs.get("enableHiveTest")
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
         try {
             String hms_port = context.config.otherConfigs.get("hive2HmsPort")
             String catalog_name = "test_trino_hive_parquet"
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_schema_evolution.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_schema_evolution.groovy
index 1eb4a0f1b9e..c803a988574 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_schema_evolution.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_schema_evolution.groovy
@@ -16,17 +16,7 @@
 // under the License.
 
 suite("test_trino_hive_schema_evolution", 
"p0,external,hive,external_docker,external_docker_hive") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
-    
+
     def q_text = {
         qt_q01 """
         select * from schema_evo_test_text order by id;
@@ -67,6 +57,17 @@ suite("test_trino_hive_schema_evolution", 
"p0,external,hive,external_docker,exte
     String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
 
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
+            
         try {
             String hms_port = context.config.otherConfigs.get("hive2HmsPort")
             String catalog_name = "test_trino_hive_schema_evolution"
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_serde_prop.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_serde_prop.groovy
index 8479c14fcbc..b996d94f95d 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_serde_prop.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_serde_prop.groovy
@@ -16,19 +16,19 @@
 // under the License.
 
 suite("test_trino_hive_serde_prop", 
"external_docker,hive,external_docker_hive,p0,external") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
-    
+
     String enabled = context.config.otherConfigs.get("enableHiveTest")
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
         String catalog_name = "test_trino_hive_serde_prop"
         String ex_db_name = "`stats_test`"
         String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tablesample_p0.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tablesample_p0.groovy
index be760d381df..8752f2f4b41 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tablesample_p0.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tablesample_p0.groovy
@@ -16,19 +16,19 @@
 // under the License.
 
 suite("test_trino_hive_tablesample_p0", 
"all_types,p0,external,hive,external_docker,external_docker_hive") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
 
     String enabled = context.config.otherConfigs.get("enableHiveTest")
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
         try {
             String hms_port = context.config.otherConfigs.get("hive2HmsPort")
             String catalog_name = "test_trino_hive_tablesample_p0"
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_orc.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_orc.groovy
index cafd9301753..6ee38e0021d 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_orc.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_orc.groovy
@@ -16,16 +16,6 @@
 // under the License.
 
 suite("test_trino_hive_tpch_sf1_orc", 
"p0,external,hive,external_docker,external_docker_hive") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
 
     String enable_file_cache = "false"
     def q01 = { 
@@ -857,6 +847,16 @@ order by
 
     String enabled = context.config.otherConfigs.get("enableHiveTest")
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
         String hms_port = context.config.otherConfigs.get("hive2HmsPort")
         String catalog_name = "test_trino_hive_tpch_sf1_orc"
         String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_parquet.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_parquet.groovy
index 4034ac805ef..8bd86c7def1 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_parquet.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_parquet.groovy
@@ -16,16 +16,6 @@
 // under the License.
 
 suite("test_trino_hive_tpch_sf1_parquet", 
"p0,external,hive,external_docker,external_docker_hive") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
 
     String enable_file_cache = "false"
     def q01 = { 
@@ -857,6 +847,16 @@ order by
 
     String enabled = context.config.otherConfigs.get("enableHiveTest")
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
         String hms_port = context.config.otherConfigs.get("hive2HmsPort")
         String catalog_name = "test_trino_hive_tpch_sf1_parquet"
         String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_prepare_hive_data_in_case.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_prepare_hive_data_in_case.groovy
index 84049ac1db6..a8b86014aa9 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_prepare_hive_data_in_case.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_prepare_hive_data_in_case.groovy
@@ -16,20 +16,20 @@
 // under the License.
 
 suite("test_trino_prepare_hive_data_in_case", 
"p0,external,hive,external_docker,external_docker_hive") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
-    
+
     String enabled = context.config.otherConfigs.get("enableHiveTest")
     def catalog_name = "test_trino_prepare_hive_data_in_case"
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
         try {
             String externalEnvIp = 
context.config.otherConfigs.get("externalEnvIp")
             String hms_port = context.config.otherConfigs.get("hive2HmsPort")
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/kafka/test_trino_kafka_base.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/kafka/test_trino_kafka_base.groovy
index b1da4641866..2f61764ec12 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/kafka/test_trino_kafka_base.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/kafka/test_trino_kafka_base.groovy
@@ -21,17 +21,6 @@ import org.apache.kafka.clients.producer.ProducerRecord
 import org.apache.kafka.clients.producer.ProducerConfig
 
 suite("test_trino_kafka_base", 
"external,kafka,external_docker,external_docker_kafka") {
-    // set up trino-connector plugins
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
-    }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
 
     // Ensure that all types are parsed correctly
     def select_top50 = {
@@ -43,6 +32,18 @@ suite("test_trino_kafka_base", 
"external,kafka,external_docker,external_docker_k
     String enabled_trino_connector = 
context.config.otherConfigs.get("enableTrinoConnectorTest")
     if (enabled != null && enabled.equalsIgnoreCase("true")
         && enabled_trino_connector!= null && 
enabled_trino_connector.equalsIgnoreCase("true")) {
+        // set up trino-connector plugins
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
+
         def kafkaCsvTpoics = [
                 "trino_kafka_basic_data"
             ]
diff --git 
a/regression-test/suites/external_table_p0/trino_connector/test_plugins_download.groovy
 
b/regression-test/suites/external_table_p0/trino_connector/test_plugins_download.groovy
index 3d28612cf62..028383b6609 100644
--- 
a/regression-test/suites/external_table_p0/trino_connector/test_plugins_download.groovy
+++ 
b/regression-test/suites/external_table_p0/trino_connector/test_plugins_download.groovy
@@ -16,14 +16,17 @@
 // under the License.
 
 suite("test_plugins_download", 
"external,hive,external_docker,external_docker_hive") {
-    def host_ips = new ArrayList()
-    String[][] backends = sql """ show backends """
-    for (def b in backends) {
-        host_ips.add(b[1])
+    String enabled = 
context.config.otherConfigs.get("enableTrinoConnectorTest")
+    if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def host_ips = new ArrayList()
+        String[][] backends = sql """ show backends """
+        for (def b in backends) {
+            host_ips.add(b[1])
+        }
+        String [][] frontends = sql """ show frontends """
+        for (def f in frontends) {
+            host_ips.add(f[1])
+        }
+        dispatchTrinoConnectors(host_ips.unique())
     }
-    String [][] frontends = sql """ show frontends """
-    for (def f in frontends) {
-        host_ips.add(f[1])
-    }
-    dispatchTrinoConnectors(host_ips.unique())
 }
\ No newline at end of file
diff --git a/regression-test/suites/github_events_p2/load.groovy 
b/regression-test/suites/github_events_p2/load.groovy
index 92a588a2214..251d8f76fd0 100644
--- a/regression-test/suites/github_events_p2/load.groovy
+++ b/regression-test/suites/github_events_p2/load.groovy
@@ -27,7 +27,7 @@
  */
 suite("load") {
     restore {
-        location 
"s3://${getS3BucketName()}/regression_backup/clickhouse/github_events"
+        location 
"s3://${getS3BucketName()}/regression/clickhouse/github_events"
         ak "${getS3AK()}"
         sk "${getS3SK()}"
         endpoint "http://${getS3Endpoint()}"
diff --git a/regression-test/suites/variant_log_data_p2/load.groovy 
b/regression-test/suites/variant_log_data_p2/load.groovy
index cbb45c46e21..b277c7ef4a9 100644
--- a/regression-test/suites/variant_log_data_p2/load.groovy
+++ b/regression-test/suites/variant_log_data_p2/load.groovy
@@ -73,21 +73,21 @@ suite("regression_test_variant_logdata", 
"nonConcurrent,p2"){
     // sql "set enable_two_phase_read_opt = false;"
     // no sparse columns
     set_be_config.call("variant_ratio_of_defaults_as_sparse_column", "1.0")
-    load_json_data.call(table_name, """${getS3Url() + '/load/logdata.json'}""")
+    load_json_data.call(table_name, """${getS3Url() + 
'/regression/load/logdata.json'}""")
     qt_sql_32 """ select json_extract(v, "\$.json.parseFailed") from logdata 
where  json_extract(v, "\$.json.parseFailed") != 'null' order by k limit 1;"""
     qt_sql_32_1 """select cast(v['json']['parseFailed'] as string) from  
logdata where cast(v['json']['parseFailed'] as string) is not null and k = 162 
limit 1;"""
     sql "truncate table ${table_name}"
 
     // 0.95 default ratio    
     set_be_config.call("variant_ratio_of_defaults_as_sparse_column", "0.95")
-    load_json_data.call(table_name, """${getS3Url() + '/load/logdata.json'}""")
+    load_json_data.call(table_name, """${getS3Url() + 
'/regression/load/logdata.json'}""")
     qt_sql_33 """ select json_extract(v,"\$.json.parseFailed") from logdata 
where  json_extract(v,"\$.json.parseFailed") != 'null' order by k limit 1;"""
     qt_sql_33_1 """select cast(v['json']['parseFailed'] as string) from  
logdata where cast(v['json']['parseFailed'] as string) is not null and k = 162 
limit 1;"""
     sql "truncate table ${table_name}"
 
     // always sparse column
     set_be_config.call("variant_ratio_of_defaults_as_sparse_column", "0.95")
-    load_json_data.call(table_name, """${getS3Url() + '/load/logdata.json'}""")
+    load_json_data.call(table_name, """${getS3Url() + 
'/regression/load/logdata.json'}""")
     qt_sql_34 """ select json_extract(v, "\$.json.parseFailed") from logdata 
where  json_extract(v,"\$.json.parseFailed") != 'null' order by k limit 1;"""
     sql "truncate table ${table_name}"
     qt_sql_35 """select json_extract(v,"\$.json.parseFailed")  from logdata 
where k = 162 and  json_extract(v,"\$.json.parseFailed") != 'null';"""
diff --git a/regression-test/suites/variant_p0/load.groovy 
b/regression-test/suites/variant_p0/load.groovy
index 2b9ec514031..d74155b182d 100644
--- a/regression-test/suites/variant_p0/load.groovy
+++ b/regression-test/suites/variant_p0/load.groovy
@@ -203,7 +203,7 @@ suite("regression_test_variant", "p0"){
         // 7. gh data
         table_name = "ghdata"
         create_table table_name
-        load_json_data.call(table_name, """${getS3Url() + 
'/load/ghdata_sample.json'}""")
+        load_json_data.call(table_name, """${getS3Url() + 
'/regression/load/ghdata_sample.json'}""")
         qt_sql_26 "select count() from ${table_name}"
 
         // 8. json empty string
@@ -218,7 +218,7 @@ suite("regression_test_variant", "p0"){
         // // // 9. btc data
         // // table_name = "btcdata"
         // // create_table table_name
-        // // load_json_data.call(table_name, """${getS3Url() + 
'/load/btc_transactions.json'}""")
+        // // load_json_data.call(table_name, """${getS3Url() + 
'/regression/load/btc_transactions.json'}""")
         // // qt_sql_28 "select count() from ${table_name}"
 
         // 10. alter add variant


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to