This is an automated email from the ASF dual-hosted git repository.

morrysnow pushed a commit to branch branch-3.1
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/branch-3.1 by this push:
     new c93ff4e222f [chore](ci) fix some pipeline script (#51914)
c93ff4e222f is described below

commit c93ff4e222fcee894dae38bdfa1069ce9f17063e
Author: Dongyang Li <[email protected]>
AuthorDate: Thu Jun 19 19:29:44 2025 +0800

    [chore](ci) fix some pipeline script (#51914)
---
 regression-test/pipeline/cloud_p0/prepare.sh       |   4 +-
 .../pipeline/nonConcurrent/conf/be.conf            |  90 +++++++++++++
 .../pipeline/nonConcurrent/conf/fe.conf            |  94 ++++++++++++++
 .../nonConcurrent/conf/regression-conf.groovy      | 143 +++++++++++++++++++++
 regression-test/pipeline/performance/compile.sh    |   4 +-
 regression-test/pipeline/performance/deploy.sh     |   6 +-
 regression-test/pipeline/performance/prepare.sh    |  10 +-
 .../pipeline/performance/run-clickbench.sh         |   6 +-
 regression-test/pipeline/performance/run-load.sh   |   6 +-
 regression-test/pipeline/vault_p0/prepare.sh       |   4 +-
 10 files changed, 347 insertions(+), 20 deletions(-)

diff --git a/regression-test/pipeline/cloud_p0/prepare.sh 
b/regression-test/pipeline/cloud_p0/prepare.sh
index 44154c2ee26..bfd3ebb2246 100644
--- a/regression-test/pipeline/cloud_p0/prepare.sh
+++ b/regression-test/pipeline/cloud_p0/prepare.sh
@@ -67,11 +67,11 @@ fi
 # shellcheck source=/dev/null
 source "$(bash 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
 'get')"
 if ${skip_pipeline:=false}; then echo "INFO: skip build pipline" && exit 0; 
else echo "INFO: no skip"; fi
-if [[ "${target_branch}" == "master" || "${target_branch}" == "branch-3.0" ]]; 
then
+if [[ "${target_branch}" == "master" || "${target_branch}" == "branch-3.1" ]]; 
then
     echo "INFO: PR target branch ${target_branch}"
     install_java
 else
-    echo "WARNING: PR target branch ${target_branch} is NOT in (master, 
branch-3.0), skip pipeline."
+    echo "WARNING: PR target branch ${target_branch} is NOT in (master, 
branch-3.1), skip pipeline."
     bash 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
 'set' "export skip_pipeline=true"
     exit 0
 fi
diff --git a/regression-test/pipeline/nonConcurrent/conf/be.conf 
b/regression-test/pipeline/nonConcurrent/conf/be.conf
new file mode 100644
index 00000000000..961756b2e71
--- /dev/null
+++ b/regression-test/pipeline/nonConcurrent/conf/be.conf
@@ -0,0 +1,90 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+CUR_DATE=`date +%Y%m%d-%H%M%S`
+
+PPROF_TMPDIR="$DORIS_HOME/log/"
+
+# For jdk 8
+JAVA_OPTS="-Xmx1024m -DlogPath=$DORIS_HOME/log/jni.log 
-Xloggc:$DORIS_HOME/log/be.gc.log.$CUR_DATE -XX:+UseGCLogFileRotation 
-XX:NumberOfGCLogFiles=10 -XX:GCLogFileSize=50M 
-Djavax.security.auth.useSubjectCredsOnly=false -Dsun.security.krb5.debug=true 
-Dsun.java.command=DorisBE -XX:-CriticalJNINatives 
-Dcom.mysql.cj.disableAbandonedConnectionCleanup=true"
+
+# For jdk 17, this JAVA_OPTS will be used as default JVM options
+JAVA_OPTS_FOR_JDK_17="-Xmx1024m -DlogPath=$DORIS_HOME/log/jni.log 
-Xlog:gc*:$DORIS_HOME/log/be.gc.log.$CUR_DATE:time,uptime:filecount=10,filesize=50M
 -Djavax.security.auth.useSubjectCredsOnly=false -Dsun.security.krb5.debug=true 
-Dsun.java.command=DorisBE -XX:-CriticalJNINatives 
-XX:+IgnoreUnrecognizedVMOptions --add-opens=java.base/java.lang=ALL-UNNAMED 
--add-opens=java.base/java.lang.invoke=ALL-UNNAMED 
--add-opens=java.base/java.lang.reflect=ALL-UNNAMED 
--add-opens=java.base/java.io=AL [...]
+
+# Set your own JAVA_HOME
+# JAVA_HOME=/path/to/jdk/
+
+# 
https://github.com/apache/doris/blob/master/docs/zh-CN/community/developer-guide/debug-tool.md#jemalloc-heap-profile
+# https://jemalloc.net/jemalloc.3.html
+JEMALLOC_CONF="percpu_arena:percpu,background_thread:true,metadata_thp:auto,muzzy_decay_ms:5000,dirty_decay_ms:5000,oversize_threshold:0,prof:true,prof_active:false,lg_prof_interval:-1"
+JEMALLOC_PROF_PRFIX="jemalloc_heap_profile_"
+
+# INFO, WARNING, ERROR, FATAL
+sys_log_level = INFO
+sys_log_verbose_modules=query_context,runtime_query_statistics_mgr
+be_port = 9161
+webserver_port = 8141
+heartbeat_service_port = 9151
+brpc_port = 8161
+arrow_flight_sql_port = 8181
+
+path_gc_check_interval_second=1
+max_garbage_sweep_interval=180
+
+log_buffer_level = -1
+
+enable_stream_load_record = true
+stream_load_record_batch_size = 500
+storage_root_path=/mnt/ssd01/cluster_storage/doris.SSD/P0/cluster1;/mnt/ssd01/cluster_storage/doris.SSD
+
+priority_networks=172.19.0.0/24
+enable_fuzzy_mode=true
+max_depth_of_expr_tree=200
+enable_feature_binlog=true
+max_sys_mem_available_low_water_mark_bytes=69206016
+user_files_secure_path=/
+enable_debug_points=true
+# debug scanner context dead loop
+enable_debug_log_timeout_secs=0
+enable_missing_rows_correctness_check=true
+
+flush_thread_num_per_store = 24
+high_priority_flush_thread_num_per_store = 24
+
+trino_connector_plugin_dir=/tmp/trino_connector/connectors
+
+enable_jvm_monitor = true
+
+enable_be_proc_monitor = true
+be_proc_monitor_interval_ms = 30000
+webserver_num_workers = 128
+pipeline_task_leakage_detect_period_sec=1
+crash_in_memory_tracker_inaccurate = true
+#enable_table_size_correctness_check=true
+enable_brpc_connection_check=true
+enable_write_index_searcher_cache=true
+
+# enable download small files in batch, see apache/doris#45061 for details
+enable_batch_download = true
+
+remove_unused_remote_files_interval_sec=60
+cold_data_compaction_interval_sec=60
+large_cumu_compaction_task_min_thread_num=3
+
+# So feature has bug, so by default is false, only open it in pipeline to 
observe
+enable_parquet_page_index=true
+
diff --git a/regression-test/pipeline/nonConcurrent/conf/fe.conf 
b/regression-test/pipeline/nonConcurrent/conf/fe.conf
new file mode 100644
index 00000000000..85d7a878155
--- /dev/null
+++ b/regression-test/pipeline/nonConcurrent/conf/fe.conf
@@ -0,0 +1,94 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+#####################################################################
+## The uppercase properties are read and exported by bin/start_fe.sh.
+## To see all Frontend configurations,
+## see fe/src/org/apache/doris/common/Config.java
+#####################################################################
+
+CUR_DATE=`date +%Y%m%d-%H%M%S`
+
+# the output dir of stderr and stdout 
+LOG_DIR = ${DORIS_HOME}/log
+
+# For jdk 8
+JAVA_OPTS="-Djavax.security.auth.useSubjectCredsOnly=false -Xss4m -Xmx4096m 
-XX:+HeapDumpOnOutOfMemoryError -XX:+UnlockExperimentalVMOptions -XX:+UseG1GC 
-XX:MaxGCPauseMillis=200 -XX:+PrintGCDateStamps -XX:+PrintGCDetails 
-XX:+PrintClassHistogramAfterFullGC -Xloggc:$DORIS_HOME/log/fe.gc.log.$CUR_DATE 
-XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=10 -XX:GCLogFileSize=50M 
-Dlog4j2.formatMsgNoLookups=true 
-Dcom.mysql.cj.disableAbandonedConnectionCleanup=true"
+
+# For jdk 17, this JAVA_OPTS will be used as default JVM options
+JAVA_OPTS_FOR_JDK_17="-Djavax.security.auth.useSubjectCredsOnly=false 
-Xmx8192m -Xms8192m -XX:+HeapDumpOnOutOfMemoryError 
-XX:HeapDumpPath=$DORIS_HOME/log/ 
-Xlog:gc*,classhisto*=trace:$DORIS_HOME/log/fe.gc.log.$CUR_DATE:time,uptime:filecount=10,filesize=50M
 -Dcom.mysql.cj.disableAbandonedConnectionCleanup=true 
--add-opens=java.base/java.nio=ALL-UNNAMED --add-opens 
java.base/jdk.internal.ref=ALL-UNNAMED"
+
+sys_log_level = INFO
+sys_log_mode = NORMAL
+sys_log_verbose_modules = 
org.apache.doris.master.MasterImpl,org.apache.doris.common.profile,org.apache.doris.qe.QeProcessorImpl,org.apache.doris.load.ExportTaskExecutor,org.apache.doris.planner.OlapScanNode
+arrow_flight_sql_port = 8081
+catalog_trash_expire_second=1
+#enable ssl for test
+enable_ssl = true
+
+enable_outfile_to_local = true
+tablet_create_timeout_second=100
+remote_fragment_exec_timeout_ms=120000
+fuzzy_test_type=p0
+use_fuzzy_session_variable=true
+
+enable_feature_binlog=true
+
+enable_debug_points=true
+
+# enable mtmv
+enable_mtmv = true
+
+dynamic_partition_check_interval_seconds=3
+
+desired_max_waiting_jobs=200
+
+# make checkpoint more frequent
+edit_log_roll_num = 1000
+
+# make job/label clean more frequent
+history_job_keep_max_second = 300
+streaming_label_keep_max_second = 300
+label_keep_max_second = 300
+
+# job test configurations
+#allows the creation of jobs with an interval of second
+enable_job_schedule_second_for_test = true
+mtmv_task_queue_size = 4096
+
+enable_workload_group = true
+publish_topic_info_interval_ms = 1000
+workload_sched_policy_interval_ms = 1000
+
+disable_decimalv2 = false
+disable_datev1 = false
+
+master_sync_policy = WRITE_NO_SYNC
+replica_sync_policy = WRITE_NO_SYNC
+
+enable_advance_next_id = true
+# enable deadlock detection
+enable_deadlock_detection = true
+max_lock_hold_threshold_seconds = 10
+
+force_olap_table_replication_allocation=tag.location.default:1
+
+# profile related
+max_query_profile_num = 2000
+max_spilled_profile_num = 2000
+
+check_table_lock_leaky=true
diff --git a/regression-test/pipeline/nonConcurrent/conf/regression-conf.groovy 
b/regression-test/pipeline/nonConcurrent/conf/regression-conf.groovy
new file mode 100644
index 00000000000..c1c1ad91865
--- /dev/null
+++ b/regression-test/pipeline/nonConcurrent/conf/regression-conf.groovy
@@ -0,0 +1,143 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+/* ******* Do not commit this file unless you know what you are doing ******* 
*/
+
+// **Note**: default db will be create if not exist
+defaultDb = "regression_test"
+
+jdbcUrl = 
"jdbc:mysql://172.19.0.2:9131/?useLocalSessionState=true&allowLoadLocalInfile=true&zeroDateTimeBehavior=round"
+targetJdbcUrl = 
"jdbc:mysql://172.19.0.2:9131/?useLocalSessionState=true&allowLoadLocalInfile=true&zeroDateTimeBehavior=round"
+jdbcUser = "root"
+jdbcPassword = ""
+
+ccrDownstreamUrl = 
"jdbc:mysql://172.19.0.2:9131/?useLocalSessionState=true&allowLoadLocalInfile=true"
+ccrDownstreamUser = "root"
+ccrDownstreamPassword = ""
+ccrDownstreamFeThriftAddress = "127.0.0.1:9020"
+
+feSourceThriftAddress = "127.0.0.1:9020"
+feTargetThriftAddress = "127.0.0.1:9020"
+feSyncerUser = "root"
+feSyncerPassword = ""
+
+feHttpAddress = "172.19.0.2:8131"
+feHttpUser = "root"
+feHttpPassword = ""
+
+// set DORIS_HOME by system properties
+// e.g. java -DDORIS_HOME=./
+suitePath = "${DORIS_HOME}/regression-test/suites"
+dataPath = "${DORIS_HOME}/regression-test/data"
+pluginPath = "${DORIS_HOME}/regression-test/plugins"
+realDataPath = "${DORIS_HOME}/regression-test/realdata"
+trinoPluginsPath = "/tmp/trino_connector"
+// sf1DataPath can be url like 
"https://doris-community-test-1308700295.cos.ap-hongkong.myqcloud.com"; or local 
path like "/data"
+//sf1DataPath = 
"https://doris-community-test-1308700295.cos.ap-hongkong.myqcloud.com";
+
+// will test <group>/<suite>.groovy
+// empty group will test all group
+testGroups = "nonConcurrent"
+// empty suite will test all suite
+testSuites = ""
+// empty directories will test all directories
+testDirectories = ""
+
+// this groups will not be executed
+excludeGroups = "p1,p2"
+// this suites will not be executed
+
+// this suites will not be executed
+excludeSuites = "000_the_start_sentinel_do_not_touch," + // keep this line as 
the first line
+    "test_write_inverted_index_exception_fault_injection," + // cause core dump
+    "zzz_the_end_sentinel_do_not_touch"// keep this line as the last line
+
+// this directories will not be executed
+excludeDirectories = "000_the_start_sentinel_do_not_touch," + // keep this 
line as the first line
+    "variant_github_events_nonConcurrent_p2," +
+    "variant_github_events_new_p2," +
+    "hdfs_vault_p2," +
+    "nereids_p0/hbo," +
+    "zzz_the_end_sentinel_do_not_touch"// keep this line as the last line
+
+// for test csv with header
+enableHdfs=false // set to true if hdfs is ready
+hdfsFs = "hdfs://127.0.0.1:9000"
+hdfsUser = "doris-test"
+hdfsPasswd = ""
+brokerName = "broker_name"
+
+// broker load test config
+enableBrokerLoad=true
+
+// jdbc connector test config
+// To enable jdbc test, you need first start mysql/pg container.
+// See `docker/thirdparties/start-thirdparties-docker.sh`
+enableJdbcTest=false
+mysql_57_port=7111
+pg_14_port=7121
+mariadb_10_port=3326
+// hive catalog test config
+// To enable jdbc test, you need first start hive container.
+// See `docker/thirdparties/start-thirdparties-docker.sh`
+enableHiveTest=false
+enablePaimonTest=false
+
+// port of hive2 docker
+hive2HmsPort=9083
+hive2HdfsPort=8020
+hive2ServerPort=10000
+hive2PgPort=5432
+
+// port of hive3 docker
+hive3HmsPort=9383
+hive3HdfsPort=8320
+hive3ServerPort=13000
+hive3PgPort=5732
+
+// kafka test config
+// to enable kafka test, you need firstly to start kafka container
+// See `docker/thirdparties/start-thirdparties-docker.sh`
+enableKafkaTest=true
+kafka_port=19193
+
+// iceberg test config
+iceberg_rest_uri_port=18181
+iceberg_minio_port=19001
+
+enableEsTest=false
+es_6_port=19200
+es_7_port=29200
+es_8_port=39200
+
+cacheDataPath = "/data/regression/"
+
+s3Source = "aliyun"
+s3Endpoint = "oss-cn-hongkong-internal.aliyuncs.com"
+
+//arrow flight sql test config
+extArrowFlightSqlHost = "127.0.0.1"
+extArrowFlightSqlPort = 8081
+extArrowFlightSqlUser = "root"
+extArrowFlightSqlPassword= ""
+
+max_failure_num=50
+
+externalEnvIp="127.0.0.1"
+
+// trino-connector catalog test config
+enableTrinoConnectorTest = false
diff --git a/regression-test/pipeline/performance/compile.sh 
b/regression-test/pipeline/performance/compile.sh
index 34f49db0c10..05c6544ca2f 100644
--- a/regression-test/pipeline/performance/compile.sh
+++ b/regression-test/pipeline/performance/compile.sh
@@ -76,7 +76,7 @@ merge_pr_to_target_branch_latest() {
     fi
 }
 
-if [[ "${target_branch}" == "master" || "${target_branch}" == "branch-3.0" ]]; 
then
+if [[ "${target_branch}" == "master" || "${target_branch}" == "branch-3.1" ]]; 
then
     REMOTE_CCACHE='/mnt/remote_ccache_master'
     docker_image="apache/doris:build-env-ldb-toolchain-0.19-latest"
 elif [[ "${target_branch}" == "branch-2.0" ]]; then
@@ -109,7 +109,7 @@ if sudo docker ps -a --no-trunc | grep "${docker_name}"; 
then
 fi
 rm -f custom_env.sh
 cp 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/conf/custom_env.sh
 .
-if [[ "${target_branch}" == "master" || "${target_branch}" == "branch-3.0" ]]; 
then
+if [[ "${target_branch}" == "master" || "${target_branch}" == "branch-3.1" ]]; 
then
     echo "export JAVA_HOME=/usr/lib/jvm/jdk-17.0.2" >>custom_env.sh
 fi
 rm -rf "${teamcity_build_checkoutDir}"/output
diff --git a/regression-test/pipeline/performance/deploy.sh 
b/regression-test/pipeline/performance/deploy.sh
index 36870058641..be872df09f5 100644
--- a/regression-test/pipeline/performance/deploy.sh
+++ b/regression-test/pipeline/performance/deploy.sh
@@ -68,9 +68,9 @@ exit_flag=0
     cp -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/conf/fe_custom.conf
 "${DORIS_HOME}"/fe/conf/
     cp -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/conf/be_custom.conf
 "${DORIS_HOME}"/be/conf/
     target_branch="$(echo "${target_branch}" | sed 's| ||g;s|\.||g;s|-||g')" # 
remove space、dot、hyphen from branch name
-    if [[ "${target_branch}" == "branch30" ]]; then
-        # branch-3.0 also use master data
-        target_branch="master"
+    if [[ "${target_branch}" == "branch31" ]]; then
+        # branch-3.1 also use branch30 data
+        target_branch="branch30"
     fi
     sed -i 
"s|^meta_dir=/data/doris-meta-\${branch_name}|meta_dir=/data/doris-meta-${target_branch}${meta_changed_suffix:-}|g"
 "${DORIS_HOME}"/fe/conf/fe_custom.conf
     sed -i 
"s|^storage_root_path=/data/doris-storage-\${branch_name}|storage_root_path=/data/doris-storage-${target_branch}${meta_changed_suffix:-}|g"
 "${DORIS_HOME}"/be/conf/be_custom.conf
diff --git a/regression-test/pipeline/performance/prepare.sh 
b/regression-test/pipeline/performance/prepare.sh
index 635b4510eba..b621a8d61e5 100644
--- a/regression-test/pipeline/performance/prepare.sh
+++ b/regression-test/pipeline/performance/prepare.sh
@@ -74,7 +74,7 @@ source "$(bash 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/g
 source 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/doris-utils.sh
 
 if ${skip_pipeline:=false}; then echo "INFO: skip build pipline" && exit 0; 
else echo "INFO: no skip"; fi
-if [[ "${target_branch}" == "master" || "${target_branch}" == "branch-3.0" ]]; 
then
+if [[ "${target_branch}" == "master" || "${target_branch}" == "branch-3.1" ]]; 
then
     echo "INFO: PR target branch ${target_branch}"
     install_java
     JAVA_HOME="${JAVA_HOME:-$(find /usr/lib/jvm -maxdepth 1 -type d -name 
'java-17-*' | sed -n '1p')}"
@@ -82,7 +82,7 @@ if [[ "${target_branch}" == "master" || "${target_branch}" == 
"branch-3.0" ]]; t
 elif [[ "${target_branch}" == "branch-2.0" ]]; then
     echo "INFO: PR target branch ${target_branch}"
 else
-    echo "WARNING: PR target branch ${target_branch} is NOT in (master, 
branch-3.0, branch-2.0), skip pipeline."
+    echo "WARNING: PR target branch ${target_branch} is NOT in (master, 
branch-3.1, branch-3.0), skip pipeline."
     bash 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
 'set' "export skip_pipeline=true"
     exit 0
 fi
@@ -98,9 +98,9 @@ if _get_pr_changed_files "${pr_num_from_trigger}"; then
         # if PR changed the doris meta file, the next PR deployment on the 
same mechine which built this PR will fail.
         # make a copy of the meta file for the meta changed PR.
         target_branch="$(echo "${target_branch}" | sed 's| 
||g;s|\.||g;s|-||g')" # remove space、dot、hyphen from branch name
-        if [[ "${target_branch}" == "branch30" ]]; then
-            # branch-3.0 also use master data
-            target_branch="master"
+        if [[ "${target_branch}" == "branch31" ]]; then
+            # branch-3.1 also use branch30 data
+            target_branch="branch30"
         fi
         meta_changed_suffix="_2"
         rsync -a --delete "/data/doris-meta-${target_branch}/" 
"/data/doris-meta-${target_branch}${meta_changed_suffix}"
diff --git a/regression-test/pipeline/performance/run-clickbench.sh 
b/regression-test/pipeline/performance/run-clickbench.sh
index 98f8e34ffd6..d2e9a704c2c 100644
--- a/regression-test/pipeline/performance/run-clickbench.sh
+++ b/regression-test/pipeline/performance/run-clickbench.sh
@@ -91,9 +91,9 @@ exit_flag=0
     cp -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/clickbench/conf/fe_custom.conf
 "${DORIS_HOME}"/fe/conf/
     cp -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/clickbench/conf/be_custom.conf
 "${DORIS_HOME}"/be/conf/
     target_branch="$(echo "${target_branch}" | sed 's| ||g;s|\.||g;s|-||g')" # 
remove space、dot、hyphen from branch name
-    if [[ "${target_branch}" == "branch30" ]]; then
-        # branch-3.0 also use master data
-        target_branch="master"
+    if [[ "${target_branch}" == "branch31" ]]; then
+        # branch-3.1 also use branch30 data
+        target_branch="branch30"
     fi
     sed -i 
"s|^meta_dir=/data/doris-meta-\${branch_name}|meta_dir=/data/doris-meta-${target_branch}|g"
 "${DORIS_HOME}"/fe/conf/fe_custom.conf
     sed -i 
"s|^storage_root_path=/data/doris-storage-\${branch_name}|storage_root_path=/data/doris-storage-${target_branch}|g"
 "${DORIS_HOME}"/be/conf/be_custom.conf
diff --git a/regression-test/pipeline/performance/run-load.sh 
b/regression-test/pipeline/performance/run-load.sh
index 46b89f09d76..1ae24d14a14 100644
--- a/regression-test/pipeline/performance/run-load.sh
+++ b/regression-test/pipeline/performance/run-load.sh
@@ -649,9 +649,9 @@ exit_flag=0
     cp -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/conf/fe_custom.conf
 "${DORIS_HOME}"/fe/conf/
     cp -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/conf/be_custom.conf
 "${DORIS_HOME}"/be/conf/
     target_branch="$(echo "${target_branch}" | sed 's| ||g;s|\.||g;s|-||g')" # 
remove space、dot、hyphen from branch name
-    if [[ "${target_branch}" == "branch30" ]]; then
-        # branch-3.0 also use master data
-        target_branch="master"
+    if [[ "${target_branch}" == "branch31" ]]; then
+        # branch-3.1 also use branch30 data
+        target_branch="branch30"
     fi
     sed -i 
"s|^meta_dir=/data/doris-meta-\${branch_name}|meta_dir=/data/doris-meta-${target_branch}|g"
 "${DORIS_HOME}"/fe/conf/fe_custom.conf
     sed -i 
"s|^storage_root_path=/data/doris-storage-\${branch_name}|storage_root_path=/data/doris-storage-${target_branch}|g"
 "${DORIS_HOME}"/be/conf/be_custom.conf
diff --git a/regression-test/pipeline/vault_p0/prepare.sh 
b/regression-test/pipeline/vault_p0/prepare.sh
index 00f6878ee59..a42cb50fc33 100644
--- a/regression-test/pipeline/vault_p0/prepare.sh
+++ b/regression-test/pipeline/vault_p0/prepare.sh
@@ -67,11 +67,11 @@ fi
 # shellcheck source=/dev/null
 source "$(bash 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
 'get')"
 if ${skip_pipeline:=false}; then echo "INFO: skip build pipline" && exit 0; 
else echo "INFO: no skip"; fi
-if [[ "${target_branch}" == "master" || "${target_branch}" == "branch-3.0" ]]; 
then
+if [[ "${target_branch}" == "master" || "${target_branch}" == "branch-3.1" ]]; 
then
     echo "INFO: PR target branch ${target_branch}"
     install_java
 else
-    echo "WARNING: PR target branch ${target_branch} is NOT in (master, 
branch-3.0), skip pipeline."
+    echo "WARNING: PR target branch ${target_branch} is NOT in (master, 
branch-3.1), skip pipeline."
     bash 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
 'set' "export skip_pipeline=true"
     exit 0
 fi


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to