This is an automated email from the ASF dual-hosted git repository.
hellostephen pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-3.0 by this push:
new dd531548469 branch-3.0: [ci](vault) add vault_p0 pipeline #45964
(#46356)
dd531548469 is described below
commit dd5315484691d4290cc8f20d38e19b6e87006fbb
Author: github-actions[bot]
<41898282+github-actions[bot]@users.noreply.github.com>
AuthorDate: Fri Jan 3 16:12:14 2025 +0800
branch-3.0: [ci](vault) add vault_p0 pipeline #45964 (#46356)
Cherry-picked from #45964
Co-authored-by: Dongyang Li <[email protected]>
---
.github/workflows/comment-to-trigger-teamcity.yml | 34 +++-
regression-test/pipeline/cloud_p0/README.md | 3 +
regression-test/pipeline/common/doris-utils.sh | 38 +++++
regression-test/pipeline/common/teamcity-utils.sh | 3 +
regression-test/pipeline/vault_p0/README.md | 5 +
regression-test/pipeline/vault_p0/clean.sh | 33 ++++
.../pipeline/vault_p0/conf/be_custom.conf | 41 +++++
.../pipeline/vault_p0/conf/fe_custom.conf | 43 +++++
.../pipeline/vault_p0/conf/ms_custom.conf | 2 +
.../pipeline/vault_p0/conf/recycler_custom.conf | 2 +
.../vault_p0/conf/regression-conf-custom.groovy | 36 +++++
.../pipeline/vault_p0/conf/session_variables.sql | 1 +
regression-test/pipeline/vault_p0/deploy.sh | 110 +++++++++++++
regression-test/pipeline/vault_p0/prepare.sh | 176 +++++++++++++++++++++
regression-test/pipeline/vault_p0/run.sh | 167 +++++++++++++++++++
15 files changed, 693 insertions(+), 1 deletion(-)
diff --git a/.github/workflows/comment-to-trigger-teamcity.yml
b/.github/workflows/comment-to-trigger-teamcity.yml
index e99dd1b4d29..7872c216d7c 100644
--- a/.github/workflows/comment-to-trigger-teamcity.yml
+++ b/.github/workflows/comment-to-trigger-teamcity.yml
@@ -56,6 +56,7 @@ jobs:
"${COMMENT_BODY}" == *'run external'* ||
"${COMMENT_BODY}" == *'run cloud_p0'* ||
"${COMMENT_BODY}" == *'run cloud_p1'* ||
+ "${COMMENT_BODY}" == *'run vault_p0'* ||
"${COMMENT_BODY}" == *'run arm'* ||
"${COMMENT_BODY}" == *'run performance'* ]]; then
echo "comment_trigger=true" | tee -a "$GITHUB_OUTPUT"
@@ -86,7 +87,7 @@ jobs:
echo "TARGET_BRANCH='${TARGET_BRANCH}'" | tee -a "$GITHUB_OUTPUT"
echo "COMMENT_BODY='${COMMENT_BODY}'" | tee -a "$GITHUB_OUTPUT"
- reg="run
(buildall|compile|p0|p1|feut|beut|cloudut|external|clickbench|cloud_p0|cloud_p1|arm|performance)(
[1-9]*[0-9]+)*"
+ reg="run
(buildall|compile|p0|p1|feut|beut|cloudut|external|clickbench|cloud_p0|cloud_p1|vault_p0|arm|performance)(
[1-9]*[0-9]+)*"
COMMENT_TRIGGER_TYPE="$(echo -e "${COMMENT_BODY}" | xargs | grep -E
"${reg}" | awk -F' ' '{print $2}' | sed -n 1p | sed 's/\r//g')"
COMMENT_REPEAT_TIMES="$(echo -e "${COMMENT_BODY}" | xargs | grep -E
"${reg}" | awk -F' ' '{print $3}' | sed -n 1p | sed 's/\r//g')"
echo "COMMENT_TRIGGER_TYPE=${COMMENT_TRIGGER_TYPE}" | tee -a
"$GITHUB_OUTPUT"
@@ -139,8 +140,10 @@ jobs:
fi
if file_changed_cloud_p0; then
echo "changed_cloud_p0=true" | tee -a "$GITHUB_OUTPUT"
+ echo "changed_vault_p0=true" | tee -a "$GITHUB_OUTPUT"
else
echo "changed_cloud_p0=false" | tee -a "$GITHUB_OUTPUT"
+ echo "changed_vault_p0=false" | tee -a "$GITHUB_OUTPUT"
fi
if file_changed_cloud_p1; then
echo "changed_cloud_p1=true" | tee -a "$GITHUB_OUTPUT"
@@ -159,6 +162,7 @@ jobs:
echo "changed_performance=true" | tee -a "$GITHUB_OUTPUT"
echo "changed_cloud_p0=true" | tee -a "$GITHUB_OUTPUT"
echo "changed_cloud_p1=true" | tee -a "$GITHUB_OUTPUT"
+ echo "changed_vault_p0=true" | tee -a "$GITHUB_OUTPUT"
fi
# - name: "Setup tmate session"
@@ -323,6 +327,33 @@ jobs:
"${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
fi
+ - name: "Trigger or Skip vault_p0"
+ if: ${{ fromJSON(steps.parse.outputs.comment_trigger) &&
contains(fromJSON('["vault_p0", "buildall"]'),
steps.parse.outputs.COMMENT_TRIGGER_TYPE) }}
+ run: |
+ source ./regression-test/pipeline/common/teamcity-utils.sh
+ if [[ ${{ steps.parse.outputs.COMMENT_TRIGGER_TYPE }} == "buildall"
]]; then
+ echo "COMMENT_TRIGGER_TYPE is buildall, trigger compile is enough,
compile will trigger vault_p0" && exit
+ fi
+ set -x
+ if [[ "${{ steps.parse.outputs.TARGET_BRANCH }}" == "'master'" ||
+ "${{ steps.parse.outputs.TARGET_BRANCH }}" == "'branch-3.0'" ]];
then
+ echo "PR target branch is in (master, branch-3.0), need run vault_p0"
+ trigger_or_skip_build \
+ "${{ steps.changes.outputs.changed_vault_p0 }}" \
+ "${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \
+ "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
+ "vault_p0" \
+ "${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
+ else
+ echo "PR target branch is not in (master, branch-3.0), skip run
vault_p0"
+ trigger_or_skip_build \
+ "false" \
+ "${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \
+ "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
+ "vault_p0" \
+ "${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
+ fi
+
- name: "Trigger or Skip cloud_p1"
if: ${{ fromJSON(steps.parse.outputs.comment_trigger) &&
contains(fromJSON('["cloud_p1", "buildall"]'),
steps.parse.outputs.COMMENT_TRIGGER_TYPE) }}
run: |
@@ -402,3 +433,4 @@ jobs:
skip_build "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" cloud_p0
skip_build "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" cloud_p1
skip_build "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" cloudut
+ skip_build "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" vault_p0
diff --git a/regression-test/pipeline/cloud_p0/README.md
b/regression-test/pipeline/cloud_p0/README.md
new file mode 100644
index 00000000000..d5661794b3d
--- /dev/null
+++ b/regression-test/pipeline/cloud_p0/README.md
@@ -0,0 +1,3 @@
+## CLOUD P0 CI Pipeline
+
+This pipeline deploys Doris in cloud mode with S3 storage on a single machine
and runs P0 test cases.
\ No newline at end of file
diff --git a/regression-test/pipeline/common/doris-utils.sh
b/regression-test/pipeline/common/doris-utils.sh
index 5ece3d872d6..ed1ea086734 100644
--- a/regression-test/pipeline/common/doris-utils.sh
+++ b/regression-test/pipeline/common/doris-utils.sh
@@ -477,6 +477,16 @@ set_session_variable() {
fi
}
+set_default_storage_vault() {
+ query_port=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf
query_port)
+ cl="mysql -h127.0.0.1 -P${query_port} -uroot "
+ if ${cl} -e"set built_in_storage_vault as default storage vault;"; then
+ echo "INFO: set built_in_storage_vault as default storage vault;"
+ else
+ echo "ERROR: set built_in_storage_vault as default storage vault;"
&& return 1
+ fi
+}
+
function reset_doris_session_variables() {
# reset all session variables to default
if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
@@ -702,6 +712,34 @@ function create_warehouse() {
fi
}
+function create_warehouse_vault() {
+ if [[ -z ${oss_ak} || -z ${oss_sk} ]]; then
+ echo "ERROR: env oss_ak and oss_sk are required." && return 1
+ fi
+
+ if curl
"127.0.0.1:5000/MetaService/http/create_instance?token=greedisgood9999" -d "{
+ \"instance_id\": \"cloud_instance_0\",
+ \"name\":\"cloud_instance_0\",
+ \"user_id\":\"user-id\",
+ \"vault\": {
+ \"obj_info\": {
+ \"provider\": \"OSS\",
+ \"region\": \"oss-cn-hongkong\",
+ \"bucket\": \"doris-community-test\",
+ \"prefix\": \"cloud_regression_vault\",
+ \"endpoint\": \"oss-cn-hongkong-internal.aliyuncs.com\",
+ \"external_endpoint\":
\"oss-cn-hongkong-internal.aliyuncs.com\",
+ \"ak\": \"${oss_ak}\",
+ \"sk\": \"${oss_sk}\"
+ }
+ }
+ }"; then
+ echo
+ else
+ return 1
+ fi
+}
+
function warehouse_add_fe() {
local ret
if curl
"127.0.0.1:5000/MetaService/http/add_cluster?token=greedisgood9999" -d "{
diff --git a/regression-test/pipeline/common/teamcity-utils.sh
b/regression-test/pipeline/common/teamcity-utils.sh
index 0b9004a383b..94d04152ba5 100644
--- a/regression-test/pipeline/common/teamcity-utils.sh
+++ b/regression-test/pipeline/common/teamcity-utils.sh
@@ -36,6 +36,7 @@ comment_to_pipeline=(
['performance']='Doris_DorisPerformance_Performance'
['cloud_p0']='Doris_DorisRegression_CloudP0'
['cloud_p1']='Doris_DorisCloudRegression_CloudP1'
+ ['vault_p0']='Doris_DorisCloudRegression_VaultP0'
)
# github中评论的要触发的流水线名字
@@ -56,6 +57,7 @@ conment_to_context=(
['performance']='performance (Doris Performance)'
['cloud_p0']='cloud_p0 (Doris Cloud Regression)'
['cloud_p1']='cloud_p1 (Doris Cloud Regression)'
+ ['vault_p0']='vault_p0 (Doris Cloud Regression)'
)
get_commit_id_of_build() {
@@ -280,6 +282,7 @@ trigger_or_skip_build() {
skip_build "${COMMIT_ID_FROM_TRIGGER}" "external"
skip_build "${COMMIT_ID_FROM_TRIGGER}" "cloud_p0"
skip_build "${COMMIT_ID_FROM_TRIGGER}" "cloud_p1"
+ skip_build "${COMMIT_ID_FROM_TRIGGER}" "vault_p0"
fi
fi
}
diff --git a/regression-test/pipeline/vault_p0/README.md
b/regression-test/pipeline/vault_p0/README.md
new file mode 100644
index 00000000000..6c9754f4f2e
--- /dev/null
+++ b/regression-test/pipeline/vault_p0/README.md
@@ -0,0 +1,5 @@
+## Vault P0 CI Pipeline
+
+This pipeline deploys Doris in cloud mode with s3 storage vault on a single
machine and runs cases in `regression-test/suites/vault_p0/`.
+
+The test case relies on an HDFS Docker container, which is set up using Docker
Compose.
diff --git a/regression-test/pipeline/vault_p0/clean.sh
b/regression-test/pipeline/vault_p0/clean.sh
new file mode 100644
index 00000000000..309c9b8e533
--- /dev/null
+++ b/regression-test/pipeline/vault_p0/clean.sh
@@ -0,0 +1,33 @@
+#!/usr/bin/env bash
+
+########################### Teamcity Build Step: Command Line
#######################
+: <<EOF
+#!/bin/bash
+export PATH=/usr/local/software/apache-maven-3.6.3/bin:${PATH}
+if [[ -f
"${teamcity_build_checkoutDir:-}"/regression-test/pipeline/vault_p0/clean.sh
]]; then
+ cd "${teamcity_build_checkoutDir}"/regression-test/pipeline/vault_p0/
+ bash -x clean.sh
+else
+ echo "Build Step file missing: regression-test/pipeline/vault_p0/clean.sh"
&& exit 1
+fi
+EOF
+############################# clean.sh content
########################################
+# shellcheck source=/dev/null
+# stop_doris, clean_fdb
+source
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/doris-utils.sh
+
+echo "#### Check env"
+if [[ -z "${teamcity_build_checkoutDir}" ]]; then echo "ERROR: env
teamcity_build_checkoutDir not set" && exit 1; fi
+
+# shellcheck source=/dev/null
+source "$(bash
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
'get')"
+if ${skip_pipeline:=false}; then echo "INFO: skip build pipline" && exit 0;
else echo "INFO: no skip"; fi
+
+echo "#### stop doris and clean fdb ####"
+DORIS_HOME="${teamcity_build_checkoutDir}/output"
+export DORIS_HOME
+stop_doris
+clean_fdb "cloud_instance_0"
+
+echo "#### docker-compose down ####"
+docker-compose down
diff --git a/regression-test/pipeline/vault_p0/conf/be_custom.conf
b/regression-test/pipeline/vault_p0/conf/be_custom.conf
new file mode 100644
index 00000000000..d201cad3fac
--- /dev/null
+++ b/regression-test/pipeline/vault_p0/conf/be_custom.conf
@@ -0,0 +1,41 @@
+streaming_load_rpc_max_alive_time_sec = 72000
+quick_cooldown = true
+disable_stream_load_2pc=false
+enable_vectorized_alter_table = true
+enable_new_scan_node = true
+push_worker_count_high_priority = 2
+streaming_load_max_mb = 107374182400
+clear_file_cache=true
+enable_file_cache=true
+#disable_storage_page_cache = true
+enable_file_cache_query_limit=true
+file_cache_max_file_segment_size=1048576
+s3_write_buffer_whole_size=52428800
+enable_vertical_compaction=true
+fuzzy_vertical_compaction=true
+vacuum_stale_rowsets_interval_seconds=60
+tablet_rowset_stale_sweep_time_sec=300
+user_files_secure_path=/
+enable_file_cache_as_load_buffer=true
+enable_merge_on_write_correctness_check=true
+enable_debug_points=true
+prioritize_query_perf_in_compaction = true
+cumulative_compaction_min_deltas = 5
+#p0 parameter
+meta_service_endpoint = 127.0.0.1:5000
+cloud_unique_id = cloud_unique_id_compute_node0
+meta_service_use_load_balancer = false
+enable_file_cache = true
+file_cache_path =
[{"path":"/data/doris_cloud/file_cache","total_size":104857600,"query_limit":104857600}]
+tmp_file_dirs =
[{"path":"/data/doris_cloud/tmp","max_cache_bytes":104857600,"max_upload_bytes":104857600}]
+thrift_rpc_timeout_ms = 360000
+save_load_error_log_to_s3 = true
+enable_stream_load_record = true
+stream_load_record_batch_size = 500
+webserver_num_workers = 128
+enable_new_tablet_do_compaction = true
+arrow_flight_sql_port = 8181
+pipeline_task_leakage_detect_period_sec=1
+crash_in_memory_tracker_inaccurate = true
+enable_table_size_correctness_check=true
+enable_brpc_connection_check=true
diff --git a/regression-test/pipeline/vault_p0/conf/fe_custom.conf
b/regression-test/pipeline/vault_p0/conf/fe_custom.conf
new file mode 100644
index 00000000000..d35632eb748
--- /dev/null
+++ b/regression-test/pipeline/vault_p0/conf/fe_custom.conf
@@ -0,0 +1,43 @@
+stream_load_default_timeout_second = 72000
+replication_num_forced_in_cloud_mode = true
+ignore_unsupported_properties_in_cloud_mode = true
+enable_array_type = true
+tablet_stat_update_interval_second = 10
+catalog_trash_expire_second = 600
+cloud_delete_loaded_internal_stage_files = true
+merge_on_write_forced_to_false = true
+enable_ssl = true
+light_schema_change_force_to_true = true
+enable_mtmv = true
+remote_fragment_exec_timeout_ms=60000
+dynamic_partition_check_interval_seconds=10
+use_fuzzy_session_variable=true
+
+enable_cloud_snapshot_version = true
+enable_auto_collect_statistics = false
+
+forbid_function_stmt = false
+forbid_insecurity_stmt = false
+
+enable_debug_points = true
+
+disable_datev1=false
+
+disable_decimalv2=false
+max_query_profile_num=1000
+
+statistics_sql_mem_limit_in_bytes=21474836480
+cpu_resource_limit_per_analyze_task=-1
+
+arrow_flight_sql_port = 8081
+
+priority_networks=127.0.0.1/24
+cloud_http_port=18030
+meta_service_endpoint=127.0.0.1:5000
+cloud_unique_id=cloud_unique_id_sql_server00
+# for case test_build_mtmv.groovy
+enable_job_schedule_second_for_test=true
+enable_light_index_change=false
+
+workload_sched_policy_interval_ms = 1000
+enable_advance_next_id = true
diff --git a/regression-test/pipeline/vault_p0/conf/ms_custom.conf
b/regression-test/pipeline/vault_p0/conf/ms_custom.conf
new file mode 100644
index 00000000000..1c88dc87938
--- /dev/null
+++ b/regression-test/pipeline/vault_p0/conf/ms_custom.conf
@@ -0,0 +1,2 @@
+# below lines will be appended to the default doris_cloud.conf when deploying
meta service
+meta_schema_value_version = 1
diff --git a/regression-test/pipeline/vault_p0/conf/recycler_custom.conf
b/regression-test/pipeline/vault_p0/conf/recycler_custom.conf
new file mode 100644
index 00000000000..62deff2b870
--- /dev/null
+++ b/regression-test/pipeline/vault_p0/conf/recycler_custom.conf
@@ -0,0 +1,2 @@
+# below lines will be appended to the default doris_cloud.conf when deploying
recycler
+brpc_listen_port = 6000
diff --git
a/regression-test/pipeline/vault_p0/conf/regression-conf-custom.groovy
b/regression-test/pipeline/vault_p0/conf/regression-conf-custom.groovy
new file mode 100644
index 00000000000..3efd22d90c2
--- /dev/null
+++ b/regression-test/pipeline/vault_p0/conf/regression-conf-custom.groovy
@@ -0,0 +1,36 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+testDirectories = "vault_p0"
+max_failure_num = 10
+
+jdbcUrl =
"jdbc:mysql://127.0.0.1:9030/?useLocalSessionState=true&allowLoadLocalInfile=true&zeroDateTimeBehavior=round"
+targetJdbcUrl =
"jdbc:mysql://127.0.0.1:9030/?useLocalSessionState=true&allowLoadLocalInfile=true&zeroDateTimeBehavior=round"
+
+// for vault case, consistent with the configuration in the
create_warehouse_vault method in the doris-utils.sh file.
+instanceId="cloud_instance_0"
+multiClusterInstanceId="cloud_instance_0"
+
+hdfsFs = "hdfs://127.0.0.1:8020"
+hdfsUser = "root"
+hdfsPasswd = ""
+
+extHiveHmsHost = "127.0.0.1"
+extHiveHmsPort = 7004
+extHdfsPort = 8020
+extHiveServerPort= 7001
+extHiveHmsUser = "root"
diff --git a/regression-test/pipeline/vault_p0/conf/session_variables.sql
b/regression-test/pipeline/vault_p0/conf/session_variables.sql
new file mode 100644
index 00000000000..b2e30361957
--- /dev/null
+++ b/regression-test/pipeline/vault_p0/conf/session_variables.sql
@@ -0,0 +1 @@
+-- set those session variables before run cloud p0 regression
diff --git a/regression-test/pipeline/vault_p0/deploy.sh
b/regression-test/pipeline/vault_p0/deploy.sh
new file mode 100644
index 00000000000..ec81bc7fb9d
--- /dev/null
+++ b/regression-test/pipeline/vault_p0/deploy.sh
@@ -0,0 +1,110 @@
+#!/usr/bin/env bash
+
+########################### Teamcity Build Step: Command Line
#######################
+: <<EOF
+#!/bin/bash
+
+if [[ -f
"${teamcity_build_checkoutDir:-}"/regression-test/pipeline/vault_p0/deploy.sh
]]; then
+ cd "${teamcity_build_checkoutDir}"/regression-test/pipeline/vault_p0
+ bash -x deploy.sh
+else
+ echo "Build Step file missing:
regression-test/pipeline/vault_p0/deploy.sh" && exit 1
+fi
+EOF
+#####################################################################################
+
+########################## deploy.sh content
########################################
+# shellcheck source=/dev/null
+source "$(bash
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
'get')"
+if ${skip_pipeline:=false}; then echo "INFO: skip build pipline" && exit 0;
else echo "INFO: no skip"; fi
+
+# shellcheck source=/dev/null
+# upload_doris_log_to_oss, download_oss_file
+source
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/oss-utils.sh
+# shellcheck source=/dev/null
+# stop_doris, install_fdb, clean_fdb, print_doris_conf,
+# start_doris_fe, get_doris_conf_value, start_doris_be,
+# print_doris_fe_log, print_doris_be_log, archive_doris_logs
+source
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/doris-utils.sh
+
+if ${DEBUG:-false}; then
+ pr_num_from_trigger=${pr_num_from_debug:-"30772"}
+
commit_id_from_trigger=${commit_id_from_debug:-"8a0077c2cfc492894d9ff68916e7e131f9a99b65"}
+fi
+echo "#### Check env"
+if [[ -z "${teamcity_build_checkoutDir}" ]]; then echo "ERROR: env
teamcity_build_checkoutDir not set" && exit 1; fi
+if [[ -z "${pr_num_from_trigger}" ]]; then echo "ERROR: env
pr_num_from_trigger not set" && exit 1; fi
+if [[ -z "${commit_id_from_trigger}" ]]; then echo "ERROR: env
commit_id_from_trigger not set" && exit 1; fi
+if [[ -z "${oss_ak}" || -z "${oss_sk}" ]]; then echo "ERROR: env oss_ak or
oss_sk not set." && exit 1; fi
+
+echo "#### Deploy Doris ####"
+DORIS_HOME="${teamcity_build_checkoutDir}/output"
+export DORIS_HOME
+exit_flag=0
+(
+ echo "#### 1. download doris binary"
+ cd "${teamcity_build_checkoutDir}"
+ export OSS_DIR="${OSS_DIR:-"oss://opensource-pipeline/compile_result"}"
+ if download_oss_file
"${pr_num_from_trigger}_${commit_id_from_trigger}.tar.gz"; then
+ rm -rf "${teamcity_build_checkoutDir}"/output
+ tar -I pigz -xf
"${pr_num_from_trigger}_${commit_id_from_trigger}.tar.gz"
+ else exit 1; fi
+
+ echo "#### 2. try to kill old doris process and clean foundationdb"
+ stop_doris
+ install_fdb && clean_fdb "cloud_instance_0"
+
+ set -e
+ echo "#### 3. copy conf from regression-test/pipeline/vault_p0/conf/ and
modify"
+ cp -rf "${DORIS_HOME}"/ms/ "${DORIS_HOME}"/recycler/
+ cp -f
"${teamcity_build_checkoutDir}"/regression-test/pipeline/vault_p0/conf/fe_custom.conf
"${DORIS_HOME}"/fe/conf/
+ cp -f
"${teamcity_build_checkoutDir}"/regression-test/pipeline/vault_p0/conf/be_custom.conf
"${DORIS_HOME}"/be/conf/
+ fdb_cluster="$(cat /etc/foundationdb/fdb.cluster)"
+ sed -i "s/^fdb_cluster = .*/fdb_cluster = ${fdb_cluster}/"
"${DORIS_HOME}"/ms/conf/doris_cloud.conf
+ sed -i "s/^fdb_cluster = .*/fdb_cluster = ${fdb_cluster}/"
"${DORIS_HOME}"/recycler/conf/doris_cloud.conf
+ cat
"${teamcity_build_checkoutDir}"/regression-test/pipeline/vault_p0/conf/ms_custom.conf
>>"${DORIS_HOME}"/ms/conf/doris_cloud.conf
+ echo >>"${DORIS_HOME}"/ms/conf/doris_cloud.conf
+ cat
"${teamcity_build_checkoutDir}"/regression-test/pipeline/vault_p0/conf/recycler_custom.conf
>>"${DORIS_HOME}"/recycler/conf/doris_cloud.conf
+ echo >>"${DORIS_HOME}"/recycler/conf/doris_cloud.conf
+ print_doris_conf
+
+ echo "#### 4. start Doris"
+ JAVA_HOME="$(find /usr/lib/jvm -maxdepth 1 -type d -name 'java-17-*' | sed
-n '1p')"
+ export JAVA_HOME
+ if ! start_doris_ms; then exit 1; fi
+ if ! start_doris_recycler; then exit 1; fi
+ if ! create_warehouse_vault; then exit 1; fi
+ if ! warehouse_add_fe; then exit 1; fi
+ if ! warehouse_add_be; then exit 1; fi
+ # if ! prepare_java_udf; then exit 1; fi
+ if ! start_doris_fe; then exit 1; fi
+ if ! start_doris_be; then exit 1; fi
+ if ! set_default_storage_vault; then exit 1; fi
+ # if ! deploy_doris_sql_converter; then exit 1; else
+ # set_session_variable sql_converter_service_url
"http://127.0.0.1:${doris_sql_converter_port:-5001}/api/v1/convert"
+ # fi
+ if ! check_doris_ready; then exit 1; fi
+
+ echo "#### 5. set session variables"
+ if ! reset_doris_session_variables; then exit 1; fi
+
session_variables_file="${teamcity_build_checkoutDir}/regression-test/pipeline/vault_p0/conf/session_variables.sql"
+ echo -e "\n\ntuned session variables:\n$(cat
"${session_variables_file}")\n\n"
+ set_doris_session_variables_from_file "${session_variables_file}"
+ # record session variables
+ set +x
+ show_session_variables &>"${DORIS_HOME}"/session_variables
+)
+exit_flag="$?"
+
+echo "#### 5. check if need backup doris logs"
+if [[ ${exit_flag} != "0" ]]; then
+ stop_doris
+ print_doris_fe_log
+ print_doris_be_log
+ if file_name=$(archive_doris_logs
"${pr_num_from_trigger}_${commit_id_from_trigger}_$(date
+%Y%m%d%H%M%S)_doris_logs.tar.gz"); then
+ upload_doris_log_to_oss "${file_name}"
+ fi
+fi
+
+exit "${exit_flag}"
+#####################################################################################
diff --git a/regression-test/pipeline/vault_p0/prepare.sh
b/regression-test/pipeline/vault_p0/prepare.sh
new file mode 100644
index 00000000000..eeb40e3014d
--- /dev/null
+++ b/regression-test/pipeline/vault_p0/prepare.sh
@@ -0,0 +1,176 @@
+#!/usr/bin/env bash
+
+########################### Teamcity Build Step: Command Line
#######################
+: <<EOF
+#!/bin/bash
+
+set -x
+pwd
+rm -rf ../.old/*
+
+export teamcity_build_checkoutDir="%teamcity.build.checkoutDir%"
+export commit_id_from_checkout="%build.vcs.number%"
+export target_branch='%teamcity.pullRequest.target.branch%'
+export teamcity_buildType_id='%system.teamcity.buildType.id%'
+export skip_pipeline='%skip_pipline%'
+export PATH=/usr/local/software/apache-maven-3.6.3/bin:${PATH}
+if [[ -f
"${teamcity_build_checkoutDir:-}"/regression-test/pipeline/vault_p0/prepare.sh
]]; then
+ cd "${teamcity_build_checkoutDir}"/regression-test/pipeline/vault_p0/
+ if [[ "${skip_pipeline}" == "true" ]]; then
+ bash
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
'set' "export skip_pipeline=true"
+ fi
+ bash prepare.sh
+else
+ echo "Build Step file missing:
regression-test/pipeline/vault_p0/prepare.sh" && exit 1
+fi
+EOF
+#####################################################################################
+## prepare.sh content ##
+
+if ${DEBUG:-false}; then
+ pr_num_from_trigger=${pr_num_from_debug:-"30772"}
+
commit_id_from_trigger=${commit_id_from_debug:-"8a0077c2cfc492894d9ff68916e7e131f9a99b65"}
+
commit_id_from_checkout=${commit_id_from_debug:-"8a0077c2cfc492894d9ff68916e7e131f9a99b65"}
# teamcity checkout commit id
+ target_branch="master"
+fi
+
+# shellcheck source=/dev/null
+# stop_doris, clean_fdb, install_fdb, install_java, clear_coredump
+source
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/doris-utils.sh
+# shellcheck source=/dev/null
+# check_oss_file_exist, download_oss_file
+source
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/oss-utils.sh
+
+echo "#### Check env"
+if [[ -z "${teamcity_build_checkoutDir}" ]]; then echo "ERROR: env
teamcity_build_checkoutDir not set" && exit 1; fi
+if [[ -z "${pr_num_from_trigger}" ]]; then echo "ERROR: env
pr_num_from_trigger not set" && exit 1; fi
+if [[ -z "${commit_id_from_trigger}" ]]; then echo "ERROR: env
commit_id_from_trigger not set" && exit 1; fi
+if [[ -z "${commit_id_from_checkout}" ]]; then echo "ERROR: env
commit_id_from_checkout not set" && exit 1; fi
+if [[ -z "${target_branch}" ]]; then echo "ERROR: env target_branch not set"
&& exit 1; fi
+if [[ -z "${s3SourceAk}" || -z "${s3SourceSk}" ]]; then echo "ERROR: env
s3SourceAk or s3SourceSk not set" && exit 1; fi
+if [[ -z "${oss_ak}" || -z "${oss_sk}" ]]; then echo "ERROR: env oss_ak or
oss_sk not set." && exit 1; fi
+
+echo "#### 1. check if need run"
+if [[ "${commit_id_from_trigger}" != "${commit_id_from_checkout}" ]]; then
+ echo -e "从触发流水线 -> 流水线开始跑,这个时间段中如果有新commit,
+这时候流水线 checkout 出来的 commit 就不是触发时的传过来的 commit 了,
+这种情况不需要跑,预期 pr owner 会重新触发。"
+ echo -e "ERROR: PR(${pr_num_from_trigger}),
+ the commit_id_from_checkout
+ ${commit_id_from_checkout}
+ not equail to the commit_id_from_trigger
+ ${commit_id_from_trigger}
+ commit_id_from_trigger is outdate"
+ exit 1
+fi
+
+# shellcheck source=/dev/null
+source "$(bash
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
'get')"
+if ${skip_pipeline:=false}; then echo "INFO: skip build pipline" && exit 0;
else echo "INFO: no skip"; fi
+if [[ "${target_branch}" == "master" ]]; then
+ echo "INFO: PR target branch ${target_branch}"
+ install_java
+else
+ echo "WARNING: PR target branch ${target_branch} is NOT in (master), skip
pipeline."
+ bash
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
'set' "export skip_pipeline=true"
+ exit 0
+fi
+
+# shellcheck source=/dev/null
+# _get_pr_changed_files file_changed_performance
+source
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/github-utils.sh
+if _get_pr_changed_files "${pr_num_from_trigger}"; then
+ if ! file_changed_cloud_p0; then
+ bash
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
'set' "export skip_pipeline=true"
+ exit 0
+ fi
+fi
+
+echo "#### 2. check if tpch depending files exist"
+set -x
+if ! [[ -d "${teamcity_build_checkoutDir}"/regression-test/pipeline/vault_p0/
&&
+ -f
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/oss-utils.sh &&
+ -f
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/doris-utils.sh
&&
+ -f
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/github-utils.sh
&&
+ -f
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
]]; then
+ echo "ERROR: depending files missing" && exit 1
+fi
+
+echo "#### 3. try to kill old doris process"
+DORIS_HOME="${teamcity_build_checkoutDir}/output"
+export DORIS_HOME
+stop_doris
+clear_coredump
+
+echo "#### 4. prepare fundationdb and docker-compose"
+install_fdb
+clean_fdb "cloud_instance_0"
+if ! command -v docker-compose; then
+ if apt update >/dev/null && apt install -y docker-compose; then
+ echo "INFO: docker-compose installed"
+ else
+ echo "ERROR: docker-compose install failed" && exit 1
+ fi
+fi
+
+echo "#### 5. check if binary package ready"
+merge_pr_to_master_commit() {
+ local pr_num_from_trigger="$1"
+ local target_branch="$2"
+ local master_commit="$3"
+ echo "INFO: merge pull request into ${target_branch} ${master_commit}"
+ if [[ -z "${teamcity_build_checkoutDir}" ]]; then
+ echo "ERROR: env teamcity_build_checkoutDir not set" && return 1
+ fi
+ cd "${teamcity_build_checkoutDir}" || return 1
+ git reset --hard
+ git fetch origin "${target_branch}"
+ git checkout "${target_branch}"
+ git reset --hard origin/"${target_branch}"
+ git checkout "${master_commit}"
+ returnValue=$?
+ if [[ ${returnValue} -ne 0 ]]; then
+ echo "ERROR: checkout ${target_branch} ${master_commit} failed. please
rebase to the newest version."
+ return 1
+ fi
+ git rev-parse HEAD
+ git config user.email "[email protected]"
+ git config user.name "ci"
+ echo "git fetch origin refs/pull/${pr_num_from_trigger}/head"
+ git fetch origin "refs/pull/${pr_num_from_trigger}/head"
+ git merge --no-edit --allow-unrelated-histories FETCH_HEAD
+ echo "INFO: merge refs/pull/${pr_num_from_trigger}/head into
${target_branch} ${master_commit}"
+ # CONFLICTS=$(git ls-files -u | wc -l)
+ if [[ $(git ls-files -u | wc -l) -gt 0 ]]; then
+ echo "ERROR: merge refs/pull/${pr_num_from_trigger}/head into failed.
Aborting"
+ git merge --abort
+ return 1
+ fi
+}
+export OSS_DIR="${OSS_DIR:-"oss://opensource-pipeline/compile_result"}"
+if ! check_oss_file_exist
"${pr_num_from_trigger}_${commit_id_from_trigger}.tar.gz"; then return 1; fi
+if download_oss_file
"${pr_num_from_trigger}_${commit_id_from_trigger}.tar.gz"; then
+ rm -rf "${teamcity_build_checkoutDir}"/output
+ tar -I pigz -xf "${pr_num_from_trigger}_${commit_id_from_trigger}.tar.gz"
+ master_commit_file="master.commit"
+ if [[ -e output/${master_commit_file} ]]; then
+ # checkout to master commit and merge this pr, to ensure binary and
case are same version
+ master_commit=$(cat output/"${master_commit_file}")
+ if merge_pr_to_master_commit "${pr_num_from_trigger}"
"${target_branch}" "${master_commit}"; then
+ echo "INFO: merged done"
+ if [[ "${teamcity_buildType_id:-}" =~
^Doris_DorisCloudRegression_CloudP1 ]]; then
+ echo "INFO: 用cloud_p1/conf覆盖cloud_p0/conf"
+ if [[ -d
"${teamcity_build_checkoutDir:-}"/regression-test/pipeline/cloud_p1/conf ]];
then
+ cp -rf
"${teamcity_build_checkoutDir}"/regression-test/pipeline/cloud_p1/conf/* \
+
"${teamcity_build_checkoutDir}"/regression-test/pipeline/vault_p0/conf/
+ else
+ echo "ERROR: regression-test/pipeline/cloud_p1/conf not
exist" && exit 1
+ fi
+ fi
+ else
+ exit 1
+ fi
+ fi
+else
+ exit 1
+fi
diff --git a/regression-test/pipeline/vault_p0/run.sh
b/regression-test/pipeline/vault_p0/run.sh
new file mode 100644
index 00000000000..d0d0e26e733
--- /dev/null
+++ b/regression-test/pipeline/vault_p0/run.sh
@@ -0,0 +1,167 @@
+#!/usr/bin/env bash
+
+########################### Teamcity Build Step: Command Line
#######################
+: <<EOF
+#!/bin/bash
+export
PATH=/usr/local/software/jdk1.8.0_131/bin:/usr/local/software/apache-maven-3.6.3/bin:${PATH}
+if [[ -f
"${teamcity_build_checkoutDir:-}"/regression-test/pipeline/vault_p0/run.sh ]];
then
+ cd "${teamcity_build_checkoutDir}"/regression-test/pipeline/vault_p0/
+ bash -x run.sh
+else
+ echo "Build Step file missing: regression-test/pipeline/vault_p0/run.sh"
&& exit 1
+fi
+EOF
+############################# run.sh content
########################################
+# shellcheck source=/dev/null
+# _monitor_regression_log, print_running_pipeline_tasks
+source
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/doris-utils.sh
+# shellcheck source=/dev/null
+# create_an_issue_comment
+source
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/github-utils.sh
+# shellcheck source=/dev/null
+# upload_doris_log_to_oss
+source
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/oss-utils.sh
+# shellcheck source=/dev/null
+# reporting_build_problem, reporting_messages_error
+source
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/teamcity-utils.sh
+
+if ${DEBUG:-false}; then
+ pr_num_from_trigger=${pr_num_from_debug:-"30772"}
+
commit_id_from_trigger=${commit_id_from_debug:-"8a0077c2cfc492894d9ff68916e7e131f9a99b65"}
+fi
+echo "#### Check env"
+if [[ -z "${teamcity_build_checkoutDir}" ]]; then echo "ERROR: env
teamcity_build_checkoutDir not set" && exit 1; fi
+if [[ -z "${pr_num_from_trigger}" ]]; then echo "ERROR: env
pr_num_from_trigger not set" && exit 1; fi
+if [[ -z "${commit_id_from_trigger}" ]]; then echo "ERROR: env
commit_id_from_trigger not set" && exit 1; fi
+if [[ -z "${s3SourceAk}" || -z "${s3SourceSk}" ]]; then echo "ERROR: env
s3SourceAk or s3SourceSk not set" && exit 1; fi
+if [[ -z "${hwYunAk}" || -z "${hwYunSk}" ]]; then echo "WARNING: env hwYunAk
or hwYunSk not set"; fi
+if [[ -z "${txYunAk}" || -z "${txYunSk}" ]]; then echo "WARNING: env txYunAk
or txYunSk not set"; fi
+
+# shellcheck source=/dev/null
+source "$(bash
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
'get')"
+if ${skip_pipeline:=false}; then echo "INFO: skip build pipline" && exit 0;
else echo "INFO: no skip"; fi
+
+echo "#### Run vault_p0 test on Doris ####"
+DORIS_HOME="${teamcity_build_checkoutDir}/output"
+export DORIS_HOME
+exit_flag=0
+need_collect_log=false
+
+# monitoring the log files in "${DORIS_HOME}"/regression-test/log/ for keyword
'Reach limit of connections'
+_monitor_regression_log &
+
+# shellcheck disable=SC2317
+run() {
+ set -e
+ shopt -s inherit_errexit
+
+ cd "${teamcity_build_checkoutDir}" || return 1
+ {
+ echo # add a new line to prevent two config items from being combined,
which will cause the error "No signature of method"
+ echo "ak='${s3SourceAk}'"
+ echo "sk='${s3SourceSk}'"
+ echo "hwYunAk='${hwYunAk:-}'"
+ echo "hwYunSk='${hwYunSk:-}'"
+ echo "txYunAk='${txYunAk:-}'"
+ echo "txYunSk='${txYunSk:-}'"
+ }
>>"${teamcity_build_checkoutDir}"/regression-test/pipeline/vault_p0/conf/regression-conf-custom.groovy
+ cp -f
"${teamcity_build_checkoutDir}"/regression-test/pipeline/vault_p0/conf/regression-conf-custom.groovy
\
+ "${teamcity_build_checkoutDir}"/regression-test/conf/
+ # # start kafka docker to run case test_rountine_load
+ # sed -i
"s/^CONTAINER_UID=\"doris--\"/CONTAINER_UID=\"doris-external--\"/"
"${teamcity_build_checkoutDir}"/docker/thirdparties/custom_settings.env
+ # if bash
"${teamcity_build_checkoutDir}"/docker/thirdparties/run-thirdparties-docker.sh
--stop; then echo; fi
+ # if bash
"${teamcity_build_checkoutDir}"/docker/thirdparties/run-thirdparties-docker.sh
-c kafka; then echo; else echo "ERROR: start kafka docker failed"; fi
+ # used to set up HDFS docker
+ docker_compose_hdfs_yaml='
+version: "3"
+
+services:
+ namenode:
+ image: bde2020/hadoop-namenode:2.0.0-hadoop3.2.1-java8
+ environment:
+ - CLUSTER_NAME=test
+ container_name: hadoop3-namenode
+ ports:
+ - "9870:9870"
+ expose:
+ - "9870"
+ healthcheck:
+ test: [ "CMD", "curl", "http://localhost:9870/" ]
+ interval: 5s
+ timeout: 120s
+ retries: 120
+ network_mode: "host"
+
+ datanode:
+ image: bde2020/hadoop-datanode:2.0.0-hadoop3.2.1-java8
+ ports:
+ - "9864:9864"
+ container_name: hadoop3-datanode
+ expose:
+ - "9864"
+ healthcheck:
+ test: [ "CMD", "curl", "http://localhost:9864" ]
+ interval: 5s
+ timeout: 60s
+ retries: 120
+ network_mode: "host"
+'
+ if echo "${docker_compose_hdfs_yaml}" >docker-compose.yaml &&
docker-compose up -d; then echo; else echo "ERROR: start hdfs docker failed"; fi
+ JAVA_HOME="$(find /usr/lib/jvm -maxdepth 1 -type d -name 'java-8-*' | sed
-n '1p')"
+ export JAVA_HOME
+ if "${teamcity_build_checkoutDir}"/run-regression-test.sh \
+ --teamcity \
+ --run \
+ --times "${repeat_times_from_trigger:-1}" \
+ -parallel 10 \
+ -suiteParallel 10 \
+ -actionParallel 10 \
+ -runNonConcurrent true; then
+ echo
+ else
+ bash
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
'set' "export need_collect_log=true"
+ # regression 测试跑完后输出的汇总信息,Test 1961 suites, failed 1 suites, fatal 0
scripts, skipped 0 scripts
+ # 如果 test_suites>0 && failed_suites<=3 &&
fatal_scripts=0,就把返回状态码改为正常的0,让teamcity根据跑case的情况去判断成功还是失败
+ # 这样预期能够快速 mute 不稳定的 case
+ summary=$(
+ grep -aoE 'Test ([0-9]+) suites, failed ([0-9]+) suites, fatal
([0-9]+) scripts, skipped ([0-9]+) scripts' \
+ "${DORIS_HOME}"/regression-test/log/doris-regression-test.*.log
+ )
+ set -x
+ test_suites=$(echo "${summary}" | cut -d ' ' -f 2)
+ failed_suites=$(echo "${summary}" | cut -d ' ' -f 5)
+ fatal_scripts=$(echo "${summary}" | cut -d ' ' -f 8)
+ if [[ ${test_suites} -gt 0 && ${failed_suites} -le
${failed_suites_threshold:=100} && ${fatal_scripts} -eq 0 ]]; then
+ echo "INFO: regression test result meet (test_suites>0 &&
failed_suites<=${failed_suites_threshold} && fatal_scripts=0)"
+ else
+ return 1
+ fi
+ fi
+}
+export -f run
+# 设置超时时间(以分为单位)
+timeout_minutes=$((${repeat_times_from_trigger:-1} *
${BUILD_TIMEOUT_MINUTES:-180}))m
+timeout "${timeout_minutes}" bash -cx run
+exit_flag="$?"
+if print_running_pipeline_tasks; then :; fi
+# shellcheck source=/dev/null
+source "$(cd "${teamcity_build_checkoutDir}" && bash
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
'get')"
+
+echo "#### 5. check if need backup doris logs"
+if [[ ${exit_flag} != "0" ]] || ${need_collect_log}; then
+ check_if_need_gcore "${exit_flag}"
+ if core_file_name=$(archive_doris_coredump
"${pr_num_from_trigger}_${commit_id_from_trigger}_$(date
+%Y%m%d%H%M%S)_doris_coredump.tar.gz"); then
+ reporting_build_problem "coredump"
+ print_doris_fe_log
+ print_doris_be_log
+ fi
+ stop_doris
+ if log_file_name=$(archive_doris_logs
"${pr_num_from_trigger}_${commit_id_from_trigger}_$(date
+%Y%m%d%H%M%S)_doris_logs.tar.gz"); then
+ if log_info="$(upload_doris_log_to_oss "${log_file_name}")"; then
+ reporting_messages_error "${log_info##*logs.tar.gz to }"
+ fi
+ fi
+ if core_info="$(upload_doris_log_to_oss "${core_file_name}")"; then
reporting_messages_error "${core_info##*coredump.tar.gz to }"; fi
+fi
+
+exit "${exit_flag}"
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]