This is an automated email from the ASF dual-hosted git repository.
morningman pushed a commit to branch branch-2.1
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-2.1 by this push:
new c744eb87c59 [fix](regression)fix some regression test (#40928) (#41046)
c744eb87c59 is described below
commit c744eb87c59cf2bc69f34fd6ae04205d9d0fe72c
Author: daidai <[email protected]>
AuthorDate: Fri Sep 20 18:17:44 2024 +0800
[fix](regression)fix some regression test (#40928) (#41046)
bp #40928
---
.../docker-compose/hive/hadoop-hive-2x.env.tpl | 17 +++++++
.../docker-compose/hive/hadoop-hive-3x.env.tpl | 22 ++++++++
.../docker-compose/hive/hadoop-hive.env.tpl | 3 --
.../docker-compose/hive/hive-2x.yaml.tpl | 8 +--
.../docker-compose/hive/hive-3x.yaml.tpl | 8 +--
docker/thirdparties/run-thirdparties-docker.sh | 14 ++---
.../data/manager/test_manager_interface_1.out | 22 ++++----
regression-test/pipeline/external/conf/fe.conf | 2 +-
.../hive/test_hms_event_notification.groovy | 2 +-
...est_hms_event_notification_multi_catalog.groovy | 2 +-
.../suites/manager/test_manager_interface_1.groovy | 59 +++++++++++-----------
11 files changed, 99 insertions(+), 60 deletions(-)
diff --git a/docker/thirdparties/docker-compose/hive/hadoop-hive-2x.env.tpl
b/docker/thirdparties/docker-compose/hive/hadoop-hive-2x.env.tpl
new file mode 100644
index 00000000000..6222972176a
--- /dev/null
+++ b/docker/thirdparties/docker-compose/hive/hadoop-hive-2x.env.tpl
@@ -0,0 +1,17 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
diff --git a/docker/thirdparties/docker-compose/hive/hadoop-hive-3x.env.tpl
b/docker/thirdparties/docker-compose/hive/hadoop-hive-3x.env.tpl
new file mode 100644
index 00000000000..84bfce1754f
--- /dev/null
+++ b/docker/thirdparties/docker-compose/hive/hadoop-hive-3x.env.tpl
@@ -0,0 +1,22 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+HIVE_SITE_CONF_hive_metastore_event_db_notification_api_auth=false
+HIVE_SITE_CONF_hive_metastore_dml_events=true
+HIVE_SITE_CONF_hive_metastore_transactional_event_listeners=org.apache.hive.hcatalog.listener.DbNotificationListener
+
diff --git a/docker/thirdparties/docker-compose/hive/hadoop-hive.env.tpl
b/docker/thirdparties/docker-compose/hive/hadoop-hive.env.tpl
index b7e662f5e52..0e074228410 100644
--- a/docker/thirdparties/docker-compose/hive/hadoop-hive.env.tpl
+++ b/docker/thirdparties/docker-compose/hive/hadoop-hive.env.tpl
@@ -28,9 +28,6 @@ HIVE_SITE_CONF_hive_server2_webui_port=0
HIVE_SITE_CONF_hive_compactor_initiator_on=true
HIVE_SITE_CONF_hive_compactor_worker_threads=2
HIVE_SITE_CONF_metastore_storage_schema_reader_impl=org.apache.hadoop.hive.metastore.SerDeStorageSchemaReader
-HIVE_SITE_CONF_hive_metastore_event_db_notification_api_auth=false
-HIVE_SITE_CONF_hive_metastore_dml_events=true
-HIVE_SITE_CONF_hive_metastore_transactional_event_listeners=org.apache.hive.hcatalog.listener.DbNotificationListener
CORE_CONF_fs_defaultFS=hdfs://${IP_HOST}:${FS_PORT}
CORE_CONF_hadoop_http_staticuser_user=root
diff --git a/docker/thirdparties/docker-compose/hive/hive-2x.yaml.tpl
b/docker/thirdparties/docker-compose/hive/hive-2x.yaml.tpl
index cdb47e3788b..64601112212 100644
--- a/docker/thirdparties/docker-compose/hive/hive-2x.yaml.tpl
+++ b/docker/thirdparties/docker-compose/hive/hive-2x.yaml.tpl
@@ -24,7 +24,7 @@ services:
environment:
- CLUSTER_NAME=test
env_file:
- - ./hadoop-hive.env
+ - ./hadoop-hive-2x.env
container_name: ${CONTAINER_UID}hadoop2-namenode
expose:
- "50070"
@@ -39,7 +39,7 @@ services:
datanode:
image: bde2020/hadoop-datanode:2.0.0-hadoop2.7.4-java8
env_file:
- - ./hadoop-hive.env
+ - ./hadoop-hive-2x.env
environment:
SERVICE_PRECONDITION: "${IP_HOST}:50070"
container_name: ${CONTAINER_UID}hadoop2-datanode
@@ -55,7 +55,7 @@ services:
hive-server:
image: bde2020/hive:2.3.2-postgresql-metastore
env_file:
- - ./hadoop-hive.env
+ - ./hadoop-hive-2x.env
environment:
HIVE_CORE_CONF_javax_jdo_option_ConnectionURL:
"jdbc:postgresql://${IP_HOST}:${PG_PORT}/metastore"
SERVICE_PRECONDITION: "${IP_HOST}:${HMS_PORT}"
@@ -76,7 +76,7 @@ services:
hive-metastore:
image: bde2020/hive:2.3.2-postgresql-metastore
env_file:
- - ./hadoop-hive.env
+ - ./hadoop-hive-2x.env
command: /bin/bash /mnt/scripts/hive-metastore.sh
environment:
SERVICE_PRECONDITION: "${IP_HOST}:50070 ${IP_HOST}:50075
${IP_HOST}:${PG_PORT}"
diff --git a/docker/thirdparties/docker-compose/hive/hive-3x.yaml.tpl
b/docker/thirdparties/docker-compose/hive/hive-3x.yaml.tpl
index bb615af536e..e05e07c3976 100644
--- a/docker/thirdparties/docker-compose/hive/hive-3x.yaml.tpl
+++ b/docker/thirdparties/docker-compose/hive/hive-3x.yaml.tpl
@@ -24,7 +24,7 @@ services:
environment:
- CLUSTER_NAME=test
env_file:
- - ./hadoop-hive.env
+ - ./hadoop-hive-3x.env
container_name: ${CONTAINER_UID}hadoop3-namenode
expose:
- "9870"
@@ -39,7 +39,7 @@ services:
datanode:
image: bde2020/hadoop-datanode:2.0.0-hadoop3.2.1-java8
env_file:
- - ./hadoop-hive.env
+ - ./hadoop-hive-3x.env
environment:
SERVICE_PRECONDITION: "${IP_HOST}:9870"
container_name: ${CONTAINER_UID}hadoop3-datanode
@@ -55,7 +55,7 @@ services:
hive-server:
image: doristhirdpartydocker/hive:3.1.2-postgresql-metastore
env_file:
- - ./hadoop-hive.env
+ - ./hadoop-hive-3x.env
environment:
HIVE_CORE_CONF_javax_jdo_option_ConnectionURL:
"jdbc:postgresql://${IP_HOST}:${PG_PORT}/metastore"
SERVICE_PRECONDITION: "${IP_HOST}:${HMS_PORT}"
@@ -76,7 +76,7 @@ services:
hive-metastore:
image: doristhirdpartydocker/hive:3.1.2-postgresql-metastore
env_file:
- - ./hadoop-hive.env
+ - ./hadoop-hive-3x.env
command: /bin/bash /mnt/scripts/hive-metastore.sh
environment:
SERVICE_PRECONDITION: "${IP_HOST}:9870 ${IP_HOST}:9864
${IP_HOST}:${PG_PORT}"
diff --git a/docker/thirdparties/run-thirdparties-docker.sh
b/docker/thirdparties/run-thirdparties-docker.sh
index e056d8469b6..982f070f719 100755
--- a/docker/thirdparties/run-thirdparties-docker.sh
+++ b/docker/thirdparties/run-thirdparties-docker.sh
@@ -349,10 +349,11 @@ if [[ "${RUN_HIVE2}" -eq 1 ]]; then
export CONTAINER_UID=${CONTAINER_UID}
. "${ROOT}"/docker-compose/hive/hive-2x_settings.env
envsubst < "${ROOT}"/docker-compose/hive/hive-2x.yaml.tpl >
"${ROOT}"/docker-compose/hive/hive-2x.yaml
- envsubst < "${ROOT}"/docker-compose/hive/hadoop-hive.env.tpl >
"${ROOT}"/docker-compose/hive/hadoop-hive.env
- sudo docker compose -p ${CONTAINER_UID}hive2 -f
"${ROOT}"/docker-compose/hive/hive-2x.yaml --env-file
"${ROOT}"/docker-compose/hive/hadoop-hive.env down
+ envsubst < "${ROOT}"/docker-compose/hive/hadoop-hive.env.tpl >
"${ROOT}"/docker-compose/hive/hadoop-hive-2x.env
+ envsubst < "${ROOT}"/docker-compose/hive/hadoop-hive-2x.env.tpl >>
"${ROOT}"/docker-compose/hive/hadoop-hive-2x.env
+ sudo docker compose -p ${CONTAINER_UID}hive2 -f
"${ROOT}"/docker-compose/hive/hive-2x.yaml --env-file
"${ROOT}"/docker-compose/hive/hadoop-hive-2x.env down
if [[ "${STOP}" -ne 1 ]]; then
- sudo docker compose -p ${CONTAINER_UID}hive2 -f
"${ROOT}"/docker-compose/hive/hive-2x.yaml --env-file
"${ROOT}"/docker-compose/hive/hadoop-hive.env up --build --remove-orphans -d
+ sudo docker compose -p ${CONTAINER_UID}hive2 -f
"${ROOT}"/docker-compose/hive/hive-2x.yaml --env-file
"${ROOT}"/docker-compose/hive/hadoop-hive-2x.env up --build --remove-orphans -d
fi
fi
@@ -387,10 +388,11 @@ if [[ "${RUN_HIVE3}" -eq 1 ]]; then
export CONTAINER_UID=${CONTAINER_UID}
. "${ROOT}"/docker-compose/hive/hive-3x_settings.env
envsubst < "${ROOT}"/docker-compose/hive/hive-3x.yaml.tpl >
"${ROOT}"/docker-compose/hive/hive-3x.yaml
- envsubst < "${ROOT}"/docker-compose/hive/hadoop-hive.env.tpl >
"${ROOT}"/docker-compose/hive/hadoop-hive.env
- sudo docker compose -p ${CONTAINER_UID}hive3 -f
"${ROOT}"/docker-compose/hive/hive-3x.yaml --env-file
"${ROOT}"/docker-compose/hive/hadoop-hive.env down
+ envsubst < "${ROOT}"/docker-compose/hive/hadoop-hive.env.tpl >
"${ROOT}"/docker-compose/hive/hadoop-hive-3x.env
+ envsubst < "${ROOT}"/docker-compose/hive/hadoop-hive-3x.env.tpl >>
"${ROOT}"/docker-compose/hive/hadoop-hive-3x.env
+ sudo docker compose -p ${CONTAINER_UID}hive3 -f
"${ROOT}"/docker-compose/hive/hive-3x.yaml --env-file
"${ROOT}"/docker-compose/hive/hadoop-hive-3x.env down
if [[ "${STOP}" -ne 1 ]]; then
- sudo docker compose -p ${CONTAINER_UID}hive3 -f
"${ROOT}"/docker-compose/hive/hive-3x.yaml --env-file
"${ROOT}"/docker-compose/hive/hadoop-hive.env up --build --remove-orphans -d
+ sudo docker compose -p ${CONTAINER_UID}hive3 -f
"${ROOT}"/docker-compose/hive/hive-3x.yaml --env-file
"${ROOT}"/docker-compose/hive/hadoop-hive-3x.env up --build --remove-orphans -d
fi
fi
diff --git a/regression-test/data/manager/test_manager_interface_1.out
b/regression-test/data/manager/test_manager_interface_1.out
index 0f50524d648..0a432e959cc 100644
--- a/regression-test/data/manager/test_manager_interface_1.out
+++ b/regression-test/data/manager/test_manager_interface_1.out
@@ -5,19 +5,19 @@ internal test_manager_metadata_name_ids \N
-- !metadata_2 --
internal test_manager_metadata_name_ids test_metadata_name_ids
--- !metadata_2 --
+-- !metadata_3 --
-- !tables_1 --
-k1 TINYINT Yes true \N
-k2 DECIMAL(10, 2) Yes true 10.05
-k3 CHAR(10) Yes true \N BLOOM_FILTER
-k4 INT No false 1 NONE
-k5 TEXT Yes false \N NONE,BLOOM_FILTER
+k1 tinyint Yes true \N
+k2 decimal(10,2) Yes true 10.05
+k3 char(10) Yes true \N BLOOM_FILTER
+k4 int No false 1 NONE
+k5 text Yes false \N NONE,BLOOM_FILTER
-- !tables_2 --
-test_manager_tb_1 DUP_KEYS k1 TINYINT TINYINT Yes true
\N true
- k2 DECIMAL(10, 2) DECIMALV3(10, 2) Yes true
10.05 true
- k3 CHAR(10) CHAR(10) Yes true \N
BLOOM_FILTER true
- k4 INT INT No false 1 NONE true
- k5 TEXT TEXT Yes false \N
NONE,BLOOM_FILTER true
+test_manager_tb_1 DUP_KEYS k1 tinyint tinyint Yes true
\N true
+ k2 DECIMAL(10, 2) decimalv3(10,2) Yes true 10.05
true
+ k3 char(10) char(10) Yes true \N
BLOOM_FILTER true
+ k4 int int No false 1 NONE true
+ k5 text text Yes false \N
NONE,BLOOM_FILTER true
diff --git a/regression-test/pipeline/external/conf/fe.conf
b/regression-test/pipeline/external/conf/fe.conf
index b876ba40e28..2aff2dcf1ba 100644
--- a/regression-test/pipeline/external/conf/fe.conf
+++ b/regression-test/pipeline/external/conf/fe.conf
@@ -97,6 +97,6 @@ enable_feature_binlog=true
auth_token = 5ff161c3-2c08-4079-b108-26c8850b6598
infodb_support_ext_catalog=true
-hms_events_polling_interval_ms=2000
+hms_events_polling_interval_ms=700
KRB5_CONFIG=/keytabs/krb5.conf
diff --git
a/regression-test/suites/external_table_p0/hive/test_hms_event_notification.groovy
b/regression-test/suites/external_table_p0/hive/test_hms_event_notification.groovy
index 52724b807d3..a3fa4750225 100644
---
a/regression-test/suites/external_table_p0/hive/test_hms_event_notification.groovy
+++
b/regression-test/suites/external_table_p0/hive/test_hms_event_notification.groovy
@@ -22,7 +22,7 @@ suite("test_hms_event_notification",
"p0,external,hive,external_docker,external_
return;
}
for (String useMetaCache : ["true","false"] ) {
- for (String hivePrefix : [ "hive2","hive3"]) {
+ for (String hivePrefix : ["hive3"]) {
try {
setHivePrefix(hivePrefix)
hive_docker """ set hive.stats.autogather=false; """
diff --git
a/regression-test/suites/external_table_p0/hive/test_hms_event_notification_multi_catalog.groovy
b/regression-test/suites/external_table_p0/hive/test_hms_event_notification_multi_catalog.groovy
index 24c2ac3b7fb..e89475f043d 100644
---
a/regression-test/suites/external_table_p0/hive/test_hms_event_notification_multi_catalog.groovy
+++
b/regression-test/suites/external_table_p0/hive/test_hms_event_notification_multi_catalog.groovy
@@ -24,7 +24,7 @@ suite("test_hms_event_notification_multi_catalog",
"p0,external,hive,external_do
for (String useMetaCache : ["true","false"] ) {
- for (String hivePrefix : [ "hive2","hive3"]) {
+ for (String hivePrefix : ["hive3"]) {
try {
setHivePrefix(hivePrefix)
hive_docker """ set hive.stats.autogather=false; """
diff --git a/regression-test/suites/manager/test_manager_interface_1.groovy
b/regression-test/suites/manager/test_manager_interface_1.groovy
index fbfe3f54389..bbdb1a3fdd7 100644
--- a/regression-test/suites/manager/test_manager_interface_1.groovy
+++ b/regression-test/suites/manager/test_manager_interface_1.groovy
@@ -118,7 +118,7 @@ suite('test_manager_interface_1',"p0") {
sql """ drop table test_metadata_name_ids """
- qt_metadata_2 """ select CATALOG_NAME,DATABASE_NAME,TABLE_NAME from
${tableName}
+ qt_metadata_3 """ select CATALOG_NAME,DATABASE_NAME,TABLE_NAME from
${tableName}
where CATALOG_NAME="internal" and DATABASE_NAME
="test_manager_metadata_name_ids" and TABLE_NAME="test_metadata_name_ids";"""
}
test_metadata_name_ids()
@@ -273,18 +273,19 @@ suite('test_manager_interface_1',"p0") {
assertTrue(result[0][0].toLowerCase() == "test_manager_tb_1")
- result = sql """ show create table test_manager_tb_1"""
+ result = sql """ show create table test_manager_tb_1"""
+ logger.info ("result = ${result}")
assertTrue(result[0][0] == "test_manager_tb_1") // TABLE NAME
// assertTrue(result[0][1].substring() == "test_manager_tb_1") //DDL
def ddl_str = result[0][1]
def idx = ddl_str.indexOf("PROPERTIES")
assertTrue(idx != -1 );
assertTrue( ddl_str.startsWith("""CREATE TABLE `test_manager_tb_1` (
- `k1` TINYINT NULL,
- `k2` DECIMAL(10, 2) NULL DEFAULT "10.05",
- `k3` CHAR(10) NULL COMMENT 'string column',
- `k4` INT NOT NULL DEFAULT "1" COMMENT 'int column',
- `k5` TEXT NULL
+ `k1` tinyint NULL,
+ `k2` decimal(10,2) NULL DEFAULT "10.05",
+ `k3` char(10) NULL COMMENT 'string column',
+ `k4` int NOT NULL DEFAULT "1" COMMENT 'int column',
+ `k5` text NULL
) ENGINE=OLAP
DUPLICATE KEY(`k1`, `k2`, `k3`)
COMMENT 'manager_test_table'
@@ -680,28 +681,28 @@ DISTRIBUTED BY HASH(`k1`) BUCKETS 1"""))
assertTrue(result[0][0] == "audit_log")
assertTrue(result[0][1].contains("CREATE TABLE `audit_log`"))
- assertTrue(result[0][1].contains("`query_id` VARCHAR(48) NULL,"))
- assertTrue(result[0][1].contains("`time` DATETIME(3) NULL,"))
- assertTrue(result[0][1].contains("`client_ip` VARCHAR(128) NULL,"))
- assertTrue(result[0][1].contains("`user` VARCHAR(128) NULL,"))
- assertTrue(result[0][1].contains("`catalog` VARCHAR(128) NULL"))
- assertTrue(result[0][1].contains("`db` VARCHAR(128) NULL,"))
- assertTrue(result[0][1].contains("`state` VARCHAR(128) NULL"))
- assertTrue(result[0][1].contains("`error_code` INT NULL,"))
- assertTrue(result[0][1].contains("`error_message` TEXT NULL,"))
- assertTrue(result[0][1].contains("`query_time` BIGINT NULL,"))
- assertTrue(result[0][1].contains("`scan_bytes` BIGINT NULL,"))
- assertTrue(result[0][1].contains("`scan_rows` BIGINT NULL,"))
- assertTrue(result[0][1].contains("`return_rows` BIGINT NULL,"))
- assertTrue(result[0][1].contains("`stmt_id` BIGINT NULL,"))
- assertTrue(result[0][1].contains("`is_query` TINYINT NULL,"))
- assertTrue(result[0][1].contains("`frontend_ip` VARCHAR(128) NULL,"))
- assertTrue(result[0][1].contains("`cpu_time_ms` BIGINT NULL,"))
- assertTrue(result[0][1].contains("`sql_hash` VARCHAR(128) NULL,"))
- assertTrue(result[0][1].contains("`sql_digest` VARCHAR(128) NULL,"))
- assertTrue(result[0][1].contains("`peak_memory_bytes` BIGINT NULL,"))
- assertTrue(result[0][1].contains("`workload_group` TEXT NULL,"))
- assertTrue(result[0][1].contains("`stmt` TEXT NULL"))
+ assertTrue(result[0][1].contains("`query_id` varchar(48) NULL,"))
+ assertTrue(result[0][1].contains("`time` datetime(3) NULL,"))
+ assertTrue(result[0][1].contains("`client_ip` varchar(128) NULL,"))
+ assertTrue(result[0][1].contains("`user` varchar(128) NULL,"))
+ assertTrue(result[0][1].contains("`catalog` varchar(128) NULL"))
+ assertTrue(result[0][1].contains("`db` varchar(128) NULL,"))
+ assertTrue(result[0][1].contains("`state` varchar(128) NULL"))
+ assertTrue(result[0][1].contains("`error_code` int NULL,"))
+ assertTrue(result[0][1].contains("`error_message` text NULL,"))
+ assertTrue(result[0][1].contains("`query_time` bigint NULL,"))
+ assertTrue(result[0][1].contains("`scan_bytes` bigint NULL,"))
+ assertTrue(result[0][1].contains("`scan_rows` bigint NULL,"))
+ assertTrue(result[0][1].contains("`return_rows` bigint NULL,"))
+ assertTrue(result[0][1].contains("`stmt_id` bigint NULL,"))
+ assertTrue(result[0][1].contains("`is_query` tinyint NULL,"))
+ assertTrue(result[0][1].contains("`frontend_ip` varchar(128) NULL,"))
+ assertTrue(result[0][1].contains("`cpu_time_ms` bigint NULL,"))
+ assertTrue(result[0][1].contains("`sql_hash` varchar(128) NULL,"))
+ assertTrue(result[0][1].contains("`sql_digest` varchar(128) NULL,"))
+ assertTrue(result[0][1].contains("`peak_memory_bytes` bigint NULL,"))
+ assertTrue(result[0][1].contains("`workload_group` text NULL,"))
+ assertTrue(result[0][1].contains("`stmt` text NULL"))
assertTrue(result[0][1].contains("ENGINE=OLAP"))
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]