This is an automated email from the ASF dual-hosted git repository.
morningman pushed a commit to branch branch-2.1
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-2.1 by this push:
new bf3ea1839c6 [test]Mv external p2 test case to p0. (#37070) (#37140)
bf3ea1839c6 is described below
commit bf3ea1839c6df9b4bb6d40402aced1f8fe5301ca
Author: Jibing-Li <[email protected]>
AuthorDate: Thu Jul 4 11:19:31 2024 +0800
[test]Mv external p2 test case to p0. (#37070) (#37140)
backport: https://github.com/apache/doris/pull/37070
---
.../orc_partitioned_columns/create_table.hql | 20 ++++
.../orc_partitioned_columns/data.tar.gz | Bin 0 -> 722 bytes
.../multi_catalog/orc_partitioned_columns/run.sh | 12 +++
.../orc_partitioned_one_column/create_table.hql | 20 ++++
.../orc_partitioned_one_column/data.tar.gz | Bin 0 -> 1064 bytes
.../orc_partitioned_one_column/run.sh | 12 +++
.../parquet_partitioned_columns/create_table.hql | 20 ++++
.../parquet_partitioned_columns/data.tar.gz | Bin 0 -> 639 bytes
.../parquet_partitioned_columns/run.sh | 12 +++
.../create_table.hql | 20 ++++
.../parquet_partitioned_one_column/data.tar.gz | Bin 0 -> 729 bytes
.../parquet_partitioned_one_column/run.sh | 12 +++
.../test_mixed_par_locations_orc/create_table.hql | 22 ++++
.../test_mixed_par_locations_orc/data.tar.gz | Bin 0 -> 1325 bytes
.../test_mixed_par_locations_orc/run.sh | 12 +++
.../create_table.hql | 22 ++++
.../test_mixed_par_locations_parquet/data.tar.gz | Bin 0 -> 952 bytes
.../test_mixed_par_locations_parquet/run.sh | 12 +++
.../create_table.hql | 18 ++++
.../data.tar.gz | Bin 0 -> 781 bytes
.../run.sh | 12 +++
.../create_table.hql | 18 ++++
.../data.tar.gz | Bin 0 -> 611 bytes
.../run.sh | 12 +++
.../create_table.hql | 18 ++++
.../data.tar.gz | Bin 0 -> 316 bytes
.../run.sh | 12 +++
.../text_partitioned_columns/create_table.hql | 21 ++++
.../text_partitioned_columns/data.tar.gz | Bin 0 -> 410 bytes
.../multi_catalog/text_partitioned_columns/run.sh | 12 +++
.../text_partitioned_one_column/create_table.hql | 21 ++++
.../text_partitioned_one_column/data.tar.gz | Bin 0 -> 321 bytes
.../text_partitioned_one_column/run.sh | 12 +++
.../hive/test_external_catalog_hive_partition.out | 118 ++++++++++++---------
.../hive/test_mixed_par_locations.out | 36 +++++++
.../hive/test_truncate_char_or_varchar_columns.out | 84 +++++++++++++++
.../test_external_catalog_hive_partition.groovy | 20 ++--
.../hive/test_hive_statistic_auto.groovy | 16 +--
.../hive/test_hive_statistic_clean.groovy | 24 +++--
.../hive/test_mixed_par_locations.groovy | 57 ++++++++++
.../test_truncate_char_or_varchar_columns.groovy | 16 +--
.../hive/test_mixed_par_locations.groovy | 62 -----------
42 files changed, 646 insertions(+), 139 deletions(-)
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_columns/create_table.hql
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_columns/create_table.hql
new file mode 100644
index 00000000000..3cc9ce67032
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_columns/create_table.hql
@@ -0,0 +1,20 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `orc_partitioned_columns`(
+ `t_timestamp` timestamp)
+PARTITIONED BY (
+ `t_int` int,
+ `t_float` float,
+ `t_string` string)
+ROW FORMAT SERDE
+ 'org.apache.hadoop.hive.ql.io.orc.OrcSerde'
+WITH SERDEPROPERTIES (
+ 'serialization.format' = '1')
+STORED AS INPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'
+OUTPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/orc_partitioned_columns';
+
+msck repair table orc_partitioned_columns;
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_columns/data.tar.gz
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_columns/data.tar.gz
new file mode 100644
index 00000000000..ea87f4489b0
Binary files /dev/null and
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_columns/data.tar.gz
differ
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_columns/run.sh
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_columns/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_columns/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_one_column/create_table.hql
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_one_column/create_table.hql
new file mode 100644
index 00000000000..21e42866abd
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_one_column/create_table.hql
@@ -0,0 +1,20 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `orc_partitioned_one_column`(
+ `t_float` float,
+ `t_string` string,
+ `t_timestamp` timestamp)
+PARTITIONED BY (
+ `t_int` int)
+ROW FORMAT SERDE
+ 'org.apache.hadoop.hive.ql.io.orc.OrcSerde'
+WITH SERDEPROPERTIES (
+ 'serialization.format' = '1')
+STORED AS INPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'
+OUTPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/orc_partitioned_one_column';
+
+msck repair table orc_partitioned_one_column;
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_one_column/data.tar.gz
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_one_column/data.tar.gz
new file mode 100644
index 00000000000..91dd8646e79
Binary files /dev/null and
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_one_column/data.tar.gz
differ
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_one_column/run.sh
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_one_column/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_partitioned_one_column/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_columns/create_table.hql
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_columns/create_table.hql
new file mode 100644
index 00000000000..8df497e249d
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_columns/create_table.hql
@@ -0,0 +1,20 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `parquet_partitioned_columns`(
+ `t_timestamp` timestamp)
+PARTITIONED BY (
+ `t_int` int,
+ `t_float` float,
+ `t_string` string)
+ROW FORMAT SERDE
+ 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'
+WITH SERDEPROPERTIES (
+ 'serialization.format' = '1')
+STORED AS INPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'
+OUTPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/parquet_partitioned_columns';
+
+msck repair table parquet_partitioned_columns;
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_columns/data.tar.gz
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_columns/data.tar.gz
new file mode 100644
index 00000000000..c212294940e
Binary files /dev/null and
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_columns/data.tar.gz
differ
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_columns/run.sh
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_columns/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_columns/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_one_column/create_table.hql
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_one_column/create_table.hql
new file mode 100644
index 00000000000..ad839449a03
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_one_column/create_table.hql
@@ -0,0 +1,20 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `parquet_partitioned_one_column`(
+ `t_float` float,
+ `t_string` string,
+ `t_timestamp` timestamp)
+PARTITIONED BY (
+ `t_int` int)
+ROW FORMAT SERDE
+ 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'
+WITH SERDEPROPERTIES (
+ 'serialization.format' = '1')
+STORED AS INPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'
+OUTPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/parquet_partitioned_one_column';
+
+msck repair table parquet_partitioned_one_column;
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_one_column/data.tar.gz
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_one_column/data.tar.gz
new file mode 100644
index 00000000000..193de4c0191
Binary files /dev/null and
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_one_column/data.tar.gz
differ
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_one_column/run.sh
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_one_column/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/parquet_partitioned_one_column/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_orc/create_table.hql
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_orc/create_table.hql
new file mode 100644
index 00000000000..9521cd80fb1
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_orc/create_table.hql
@@ -0,0 +1,22 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `test_mixed_par_locations_orc`(
+ `id` int,
+ `name` string,
+ `age` int,
+ `city` string,
+ `sex` string)
+PARTITIONED BY (
+ `par` string)
+ROW FORMAT SERDE
+ 'org.apache.hadoop.hive.ql.io.orc.OrcSerde'
+WITH SERDEPROPERTIES (
+ 'serialization.format' = '1')
+STORED AS INPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'
+OUTPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/test_mixed_par_locations_orc';
+
+msck repair table test_mixed_par_locations_orc;
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_orc/data.tar.gz
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_orc/data.tar.gz
new file mode 100644
index 00000000000..047a4e85bcd
Binary files /dev/null and
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_orc/data.tar.gz
differ
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_orc/run.sh
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_orc/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_orc/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_parquet/create_table.hql
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_parquet/create_table.hql
new file mode 100644
index 00000000000..951b2f724a5
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_parquet/create_table.hql
@@ -0,0 +1,22 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `test_mixed_par_locations_parquet`(
+ `id` int,
+ `name` string,
+ `age` int,
+ `city` string,
+ `sex` string)
+PARTITIONED BY (
+ `par` string)
+ROW FORMAT SERDE
+ 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'
+WITH SERDEPROPERTIES (
+ 'serialization.format' = '1')
+STORED AS INPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'
+OUTPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/test_mixed_par_locations_parquet';
+
+msck repair table test_mixed_par_locations_parquet;
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_parquet/data.tar.gz
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_parquet/data.tar.gz
new file mode 100644
index 00000000000..9994d85e4eb
Binary files /dev/null and
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_parquet/data.tar.gz
differ
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_parquet/run.sh
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_parquet/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_mixed_par_locations_parquet/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_orc/create_table.hql
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_orc/create_table.hql
new file mode 100644
index 00000000000..19cb03245a3
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_orc/create_table.hql
@@ -0,0 +1,18 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `test_truncate_char_or_varchar_columns_orc`(
+ `id` int,
+ `city` varchar(3),
+ `country` char(3))
+ROW FORMAT SERDE
+ 'org.apache.hadoop.hive.ql.io.orc.OrcSerde'
+WITH SERDEPROPERTIES (
+ 'serialization.format' = '1')
+STORED AS INPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'
+OUTPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
+LOCATION
'/user/doris/suites/multi_catalog/test_truncate_char_or_varchar_columns_orc';
+
+msck repair table test_truncate_char_or_varchar_columns_orc;
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_orc/data.tar.gz
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_orc/data.tar.gz
new file mode 100644
index 00000000000..78316632106
Binary files /dev/null and
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_orc/data.tar.gz
differ
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_orc/run.sh
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_orc/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_orc/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_parquet/create_table.hql
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_parquet/create_table.hql
new file mode 100644
index 00000000000..d038dbe4f56
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_parquet/create_table.hql
@@ -0,0 +1,18 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `test_truncate_char_or_varchar_columns_parquet`(
+ `id` int,
+ `city` varchar(3),
+ `country` char(3))
+ROW FORMAT SERDE
+ 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'
+WITH SERDEPROPERTIES (
+ 'serialization.format' = '1')
+STORED AS INPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'
+OUTPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+LOCATION
'/user/doris/suites/multi_catalog/test_truncate_char_or_varchar_columns_parquet';
+
+msck repair table test_truncate_char_or_varchar_columns_parquet;
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_parquet/data.tar.gz
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_parquet/data.tar.gz
new file mode 100644
index 00000000000..1e65dca1bb3
Binary files /dev/null and
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_parquet/data.tar.gz
differ
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_parquet/run.sh
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_parquet/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_parquet/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_text/create_table.hql
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_text/create_table.hql
new file mode 100644
index 00000000000..c52bbf4a2d2
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_text/create_table.hql
@@ -0,0 +1,18 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `test_truncate_char_or_varchar_columns_text`(
+ `id` int,
+ `city` varchar(3),
+ `country` char(3))
+ROW FORMAT SERDE
+ 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+WITH SERDEPROPERTIES (
+ 'serialization.format' = '1')
+STORED AS INPUTFORMAT
+ 'org.apache.hadoop.mapred.TextInputFormat'
+OUTPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION
'/user/doris/suites/multi_catalog/test_truncate_char_or_varchar_columns_text';
+
+msck repair table test_truncate_char_or_varchar_columns_text;
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_text/data.tar.gz
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_text/data.tar.gz
new file mode 100644
index 00000000000..ce107f7438e
Binary files /dev/null and
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_text/data.tar.gz
differ
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_text/run.sh
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_text/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/test_truncate_char_or_varchar_columns_text/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_columns/create_table.hql
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_columns/create_table.hql
new file mode 100644
index 00000000000..863155230f3
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_columns/create_table.hql
@@ -0,0 +1,21 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `text_partitioned_columns`(
+ `t_timestamp` timestamp)
+PARTITIONED BY (
+ `t_int` int,
+ `t_float` float,
+ `t_string` string)
+ROW FORMAT SERDE
+ 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+WITH SERDEPROPERTIES (
+ 'serialization.format' = ',',
+ 'field.delim' = ',')
+STORED AS INPUTFORMAT
+ 'org.apache.hadoop.mapred.TextInputFormat'
+OUTPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/text_partitioned_columns';
+
+msck repair table text_partitioned_columns;
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_columns/data.tar.gz
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_columns/data.tar.gz
new file mode 100644
index 00000000000..f70f44b00a4
Binary files /dev/null and
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_columns/data.tar.gz
differ
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_columns/run.sh
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_columns/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_columns/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_one_column/create_table.hql
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_one_column/create_table.hql
new file mode 100644
index 00000000000..1eff2e09090
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_one_column/create_table.hql
@@ -0,0 +1,21 @@
+CREATE DATABASE IF NOT EXISTS multi_catalog;
+USE multi_catalog;
+
+CREATE TABLE `text_partitioned_one_column`(
+ `t_float` float,
+ `t_string` string,
+ `t_timestamp` timestamp)
+PARTITIONED BY (
+ `t_int` int)
+ROW FORMAT SERDE
+ 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+WITH SERDEPROPERTIES (
+ 'serialization.format' = ',',
+ 'field.delim' = ',')
+STORED AS INPUTFORMAT
+ 'org.apache.hadoop.mapred.TextInputFormat'
+OUTPUTFORMAT
+ 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION '/user/doris/suites/multi_catalog/text_partitioned_one_column';
+
+msck repair table text_partitioned_one_column;
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_one_column/data.tar.gz
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_one_column/data.tar.gz
new file mode 100644
index 00000000000..443e52b561f
Binary files /dev/null and
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_one_column/data.tar.gz
differ
diff --git
a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_one_column/run.sh
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_one_column/run.sh
new file mode 100644
index 00000000000..f3136eaa200
--- /dev/null
+++
b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/text_partitioned_one_column/run.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -x
+
+CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+## mkdir and put data to hdfs
+cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
+hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
+hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
+
+# create table
+hive -f "${CUR_DIR}/create_table.hql"
diff --git
a/regression-test/data/external_table_p2/hive/test_external_catalog_hive_partition.out
b/regression-test/data/external_table_p0/hive/test_external_catalog_hive_partition.out
similarity index 51%
rename from
regression-test/data/external_table_p2/hive/test_external_catalog_hive_partition.out
rename to
regression-test/data/external_table_p0/hive/test_external_catalog_hive_partition.out
index c823189e688..aa1e48a439d 100644
---
a/regression-test/data/external_table_p2/hive/test_external_catalog_hive_partition.out
+++
b/regression-test/data/external_table_p0/hive/test_external_catalog_hive_partition.out
@@ -23,30 +23,53 @@
-- !q06 --
2023-01-03T00:00 100 0.3 test3
--- !q07 --
-1994 50063846 1820677
-1995 58220229 1820677
-1995 66859335 1820677
-1997 77350500 1820677
-1995 98899109 1820677
-1996 122310373 1820677
-1996 138664326 1820677
-1995 145803300 1820677
-1998 187514084 1820677
-1994 197627203 1820677
-1993 216217095 1820677
-1997 260737890 1820677
-1998 279581856 1820677
-1992 296560224 1820677
-1993 306190854 1820677
-1997 329189126 1820677
-1992 389043491 1820677
-1997 435247522 1820677
-1998 449388167 1820677
-1994 526241665 1820677
-1998 533034534 1820677
-1996 576018657 1820677
-1997 582732039 1820677
+-- !q01 --
+0.1 test1 2023-01-01T00:00 \N
+0.2 test2 2023-01-02T00:00 \N
+0.3 test3 2023-01-03T00:00 100
+
+-- !q02 --
+0.1 test1 2023-01-01T00:00 \N
+0.2 test2 2023-01-02T00:00 \N
+
+-- !q03 --
+0.3 test3 2023-01-03T00:00 100
+
+-- !q04 --
+2023-01-01T00:00 \N 0.1 test1
+2023-01-02T00:00 \N 0.2 test2
+2023-01-03T00:00 100 0.3 test3
+
+-- !q05 --
+2023-01-01T00:00 \N 0.1 test1
+2023-01-02T00:00 \N 0.2 test2
+
+-- !q06 --
+2023-01-03T00:00 100 0.3 test3
+
+-- !q01 --
+0.1 test1 2023-01-01T00:00 \N
+0.2 test2 2023-01-02T00:00 \N
+0.3 test3 2023-01-03T00:00 100
+
+-- !q02 --
+0.1 test1 2023-01-01T00:00 \N
+0.2 test2 2023-01-02T00:00 \N
+
+-- !q03 --
+0.3 test3 2023-01-03T00:00 100
+
+-- !q04 --
+2023-01-01T00:00 \N 0.1 test1
+2023-01-02T00:00 \N 0.2 test2
+2023-01-03T00:00 100 0.3 test3
+
+-- !q05 --
+2023-01-01T00:00 \N 0.1 test1
+2023-01-02T00:00 \N 0.2 test2
+
+-- !q06 --
+2023-01-03T00:00 100 0.3 test3
-- !q01 --
0.1 test1 2023-01-01T00:00 \N
@@ -72,30 +95,29 @@
-- !q06 --
2023-01-03T00:00 100 0.3 test3
--- !q07 --
-1994 50063846 1820677
-1995 58220229 1820677
-1995 66859335 1820677
-1997 77350500 1820677
-1995 98899109 1820677
-1996 122310373 1820677
-1996 138664326 1820677
-1995 145803300 1820677
-1998 187514084 1820677
-1994 197627203 1820677
-1993 216217095 1820677
-1997 260737890 1820677
-1998 279581856 1820677
-1992 296560224 1820677
-1993 306190854 1820677
-1997 329189126 1820677
-1992 389043491 1820677
-1997 435247522 1820677
-1998 449388167 1820677
-1994 526241665 1820677
-1998 533034534 1820677
-1996 576018657 1820677
-1997 582732039 1820677
+-- !q01 --
+0.1 test1 2023-01-01T00:00 \N
+0.2 test2 2023-01-02T00:00 \N
+0.3 test3 2023-01-03T00:00 100
+
+-- !q02 --
+0.1 test1 2023-01-01T00:00 \N
+0.2 test2 2023-01-02T00:00 \N
+
+-- !q03 --
+0.3 test3 2023-01-03T00:00 100
+
+-- !q04 --
+2023-01-01T00:00 \N 0.1 test1
+2023-01-02T00:00 \N 0.2 test2
+2023-01-03T00:00 100 0.3 test3
+
+-- !q05 --
+2023-01-01T00:00 \N 0.1 test1
+2023-01-02T00:00 \N 0.2 test2
+
+-- !q06 --
+2023-01-03T00:00 100 0.3 test3
-- !q01 --
0.1 test1 2023-01-01T00:00 \N
diff --git
a/regression-test/data/external_table_p2/hive/test_mixed_par_locations.out
b/regression-test/data/external_table_p0/hive/test_mixed_par_locations.out
similarity index 53%
rename from
regression-test/data/external_table_p2/hive/test_mixed_par_locations.out
rename to
regression-test/data/external_table_p0/hive/test_mixed_par_locations.out
index e4344d897fd..7e57e2d8475 100644
--- a/regression-test/data/external_table_p2/hive/test_mixed_par_locations.out
+++ b/regression-test/data/external_table_p0/hive/test_mixed_par_locations.out
@@ -35,3 +35,39 @@ guangzhou 2
hangzhou 2
shanghai 2
+-- !01 --
+1 Tom 48 shanghai male 20230101
+2 Jerry 35 guangzhou male 20230101
+3 Frank 25 hangzhou male 20230101
+4 Ada 22 beijing female 20230101
+5 Jason 46 shanghai male 20230102
+6 Andy 38 guangzhou male 20230102
+7 Sam 29 hangzhou male 20230102
+8 Chloea 18 beijing female 20230102
+
+-- !02 --
+8
+
+-- !03 --
+guangzhou 2
+hangzhou 2
+shanghai 2
+
+-- !01 --
+1 Tom 48 shanghai male 20230101
+2 Jerry 35 guangzhou male 20230101
+3 Frank 25 hangzhou male 20230101
+4 Ada 22 beijing female 20230101
+5 Jason 46 shanghai male 20230102
+6 Andy 38 guangzhou male 20230102
+7 Sam 29 hangzhou male 20230102
+8 Chloea 18 beijing female 20230102
+
+-- !02 --
+8
+
+-- !03 --
+guangzhou 2
+hangzhou 2
+shanghai 2
+
diff --git
a/regression-test/data/external_table_p2/hive/test_truncate_char_or_varchar_columns.out
b/regression-test/data/external_table_p0/hive/test_truncate_char_or_varchar_columns.out
similarity index 51%
rename from
regression-test/data/external_table_p2/hive/test_truncate_char_or_varchar_columns.out
rename to
regression-test/data/external_table_p0/hive/test_truncate_char_or_varchar_columns.out
index 5dab20925f2..5c562000408 100644
---
a/regression-test/data/external_table_p2/hive/test_truncate_char_or_varchar_columns.out
+++
b/regression-test/data/external_table_p0/hive/test_truncate_char_or_varchar_columns.out
@@ -83,3 +83,87 @@ beijing at beijing in china
Boston at Boston in 美利坚合众国
哈尔滨 at 哈尔滨 in 中华人民共和国
+-- !q01 --
+1 han chi
+2 bei chi
+3 杭州 中华人
+4 Bos 美利坚
+5 哈尔滨 中华人
+
+-- !q02 --
+han at han in chi
+bei at bei in chi
+杭州 at 杭州 in 中华人
+Bos at Bos in 美利坚
+哈尔滨 at 哈尔滨 in 中华人
+
+-- !q01 --
+1 han chi
+2 bei chi
+3 杭州 中华人
+4 Bos 美利坚
+5 哈尔滨 中华人
+
+-- !q02 --
+han at han in chi
+bei at bei in chi
+杭州 at 杭州 in 中华人
+Bos at Bos in 美利坚
+哈尔滨 at 哈尔滨 in 中华人
+
+-- !q01 --
+1 han chi
+2 bei chi
+3 杭州 中华人
+4 Bos 美利坚
+5 哈尔滨 中华人
+
+-- !q02 --
+han at han in chi
+bei at bei in chi
+杭州 at 杭州 in 中华人
+Bos at Bos in 美利坚
+哈尔滨 at 哈尔滨 in 中华人
+
+-- !q01 --
+1 hangzhou china
+2 beijing china
+3 杭州 中华人民共和国
+4 Boston 美利坚合众国
+5 哈尔滨 中华人民共和国
+
+-- !q02 --
+hangzhou at hangzhou in china
+beijing at beijing in china
+杭州 at 杭州 in 中华人民共和国
+Boston at Boston in 美利坚合众国
+哈尔滨 at 哈尔滨 in 中华人民共和国
+
+-- !q01 --
+1 hangzhou china
+2 beijing china
+3 杭州 中华人民共和国
+4 Boston 美利坚合众国
+5 哈尔滨 中华人民共和国
+
+-- !q02 --
+hangzhou at hangzhou in china
+beijing at beijing in china
+杭州 at 杭州 in 中华人民共和国
+Boston at Boston in 美利坚合众国
+哈尔滨 at 哈尔滨 in 中华人民共和国
+
+-- !q01 --
+1 hangzhou china
+2 beijing china
+3 杭州 中华人民共和国
+4 Boston 美利坚合众国
+5 哈尔滨 中华人民共和国
+
+-- !q02 --
+hangzhou at hangzhou in china
+beijing at beijing in china
+杭州 at 杭州 in 中华人民共和国
+Boston at Boston in 美利坚合众国
+哈尔滨 at 哈尔滨 in 中华人民共和国
+
diff --git
a/regression-test/suites/external_table_p2/hive/test_external_catalog_hive_partition.groovy
b/regression-test/suites/external_table_p0/hive/test_external_catalog_hive_partition.groovy
similarity index 82%
rename from
regression-test/suites/external_table_p2/hive/test_external_catalog_hive_partition.groovy
rename to
regression-test/suites/external_table_p0/hive/test_external_catalog_hive_partition.groovy
index 196625b3b49..32b80f5650d 100644
---
a/regression-test/suites/external_table_p2/hive/test_external_catalog_hive_partition.groovy
+++
b/regression-test/suites/external_table_p0/hive/test_external_catalog_hive_partition.groovy
@@ -15,12 +15,16 @@
// specific language governing permissions and limitations
// under the License.
-suite("test_external_catalog_hive_partition",
"p2,external,hive,external_remote,external_remote_hive") {
- String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
- if (enabled != null && enabled.equalsIgnoreCase("true")) {
- String extHiveHmsHost =
context.config.otherConfigs.get("extHiveHmsHost")
- String extHiveHmsPort =
context.config.otherConfigs.get("extHiveHmsPort")
- String catalog_name = "test_external_catalog_hive_partition"
+suite("test_external_catalog_hive_partition",
"p0,external,hive,external_docker,external_docker_hive") {
+ String enabled = context.config.otherConfigs.get("enableHiveTest")
+ if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+ logger.info("disable Hive test.")
+ return;
+ }
+ for (String hivePrefix : ["hive2", "hive3"]) {
+ String extHiveHmsHost =
context.config.otherConfigs.get("externalEnvIp")
+ String extHiveHmsPort = context.config.otherConfigs.get(hivePrefix +
"HmsPort")
+ String catalog_name =
"${hivePrefix}_test_external_catalog_hive_partition"
sql """drop catalog if exists ${catalog_name};"""
sql """
@@ -39,7 +43,7 @@ suite("test_external_catalog_hive_partition",
"p2,external,hive,external_remote,
qt_q04 """ select * from multi_catalog.parquet_partitioned_columns
order by t_float """
qt_q05 """ select * from multi_catalog.parquet_partitioned_columns
where t_int is null order by t_float """
qt_q06 """ select * from multi_catalog.parquet_partitioned_columns
where t_int is not null order by t_float """
- qt_q07 """ select o_orderyear, o_orderkey, o_custkey from
multi_catalog.orders_par_parquet where o_custkey=1820677 order by o_orderkey """
+ //qt_q07 """ select o_orderyear, o_orderkey, o_custkey from
multi_catalog.orders_par_parquet where o_custkey=1820677 order by o_orderkey """
}
// test orc format
def q01_orc = {
@@ -49,7 +53,7 @@ suite("test_external_catalog_hive_partition",
"p2,external,hive,external_remote,
qt_q04 """ select * from multi_catalog.orc_partitioned_columns
order by t_float """
qt_q05 """ select * from multi_catalog.orc_partitioned_columns
where t_int is null order by t_float """
qt_q06 """ select * from multi_catalog.orc_partitioned_columns
where t_int is not null order by t_float """
- qt_q07 """ select o_orderyear, o_orderkey, o_custkey from
multi_catalog.orders_par_orc where o_custkey=1820677 order by o_orderkey """
+ //qt_q07 """ select o_orderyear, o_orderkey, o_custkey from
multi_catalog.orders_par_orc where o_custkey=1820677 order by o_orderkey """
}
// test text format
def q01_text = {
diff --git
a/regression-test/suites/external_table_p2/hive/test_hive_statistic_auto.groovy
b/regression-test/suites/external_table_p0/hive/test_hive_statistic_auto.groovy
similarity index 88%
rename from
regression-test/suites/external_table_p2/hive/test_hive_statistic_auto.groovy
rename to
regression-test/suites/external_table_p0/hive/test_hive_statistic_auto.groovy
index 8a7591daeb1..8a34bf9204f 100644
---
a/regression-test/suites/external_table_p2/hive/test_hive_statistic_auto.groovy
+++
b/regression-test/suites/external_table_p0/hive/test_hive_statistic_auto.groovy
@@ -15,12 +15,16 @@
// specific language governing permissions and limitations
// under the License.
-suite("test_hive_statistic_auto",
"p2,external,hive,external_remote,external_remote_hive") {
- String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
- if (enabled != null && enabled.equalsIgnoreCase("true")) {
- String extHiveHmsHost =
context.config.otherConfigs.get("extHiveHmsHost")
- String extHiveHmsPort =
context.config.otherConfigs.get("extHiveHmsPort")
- String catalog_name = "test_hive_statistic_auto"
+suite("test_hive_statistic_auto",
"p0,external,hive,external_docker,external_docker_hive") {
+ String enabled = context.config.otherConfigs.get("enableHiveTest")
+ if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+ logger.info("disable Hive test.")
+ return;
+ }
+ for (String hivePrefix : ["hive2", "hive3"]) {
+ String extHiveHmsHost =
context.config.otherConfigs.get("externalEnvIp")
+ String extHiveHmsPort = context.config.otherConfigs.get(hivePrefix +
"HmsPort")
+ String catalog_name = "${hivePrefix}_test_hive_statistic_auto"
sql """drop catalog if exists ${catalog_name};"""
sql """
create catalog if not exists ${catalog_name} properties (
diff --git
a/regression-test/suites/external_table_p2/hive/test_hive_statistic_clean.groovy
b/regression-test/suites/external_table_p0/hive/test_hive_statistic_clean.groovy
similarity index 88%
rename from
regression-test/suites/external_table_p2/hive/test_hive_statistic_clean.groovy
rename to
regression-test/suites/external_table_p0/hive/test_hive_statistic_clean.groovy
index e04f9fbe884..2813f1ffc90 100644
---
a/regression-test/suites/external_table_p2/hive/test_hive_statistic_clean.groovy
+++
b/regression-test/suites/external_table_p0/hive/test_hive_statistic_clean.groovy
@@ -15,17 +15,20 @@
// specific language governing permissions and limitations
// under the License.
-suite("test_hive_statistic_clean",
"p2,external,hive,external_remote,external_remote_hive") {
- String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
- if (enabled != null && enabled.equalsIgnoreCase("true")) {
- String extHiveHmsHost =
context.config.otherConfigs.get("extHiveHmsHost")
- String extHiveHmsPort =
context.config.otherConfigs.get("extHiveHmsPort")
- String catalog_name = "test_hive_statistic_clean"
+suite("test_hive_statistic_clean",
"p0,external,hive,external_docker,external_docker_hive") {
+ String enabled = context.config.otherConfigs.get("enableHiveTest")
+ if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+ logger.info("disable Hive test.")
+ return;
+ }
+ for (String hivePrefix : ["hive2", "hive3"]) {
+ String extHiveHmsHost =
context.config.otherConfigs.get("externalEnvIp")
+ String extHiveHmsPort = context.config.otherConfigs.get(hivePrefix +
"HmsPort")
+ String catalog_name = "${hivePrefix}_test_hive_statistic_clean"
sql """drop catalog if exists ${catalog_name};"""
sql """
create catalog if not exists ${catalog_name} properties (
'type'='hms',
- 'hadoop.username' = 'hadoop',
'hive.metastore.uris' =
'thrift://${extHiveHmsHost}:${extHiveHmsPort}'
);
"""
@@ -68,6 +71,7 @@ suite("test_hive_statistic_clean",
"p2,external,hive,external_remote,external_re
assertEquals(result[0][7], "1")
assertEquals(result[0][8], "7")
+ /*
sql """drop expired stats"""
result = sql """show column stats `statistics` (lo_quantity)"""
assertEquals(result.size(), 1)
@@ -101,6 +105,7 @@ suite("test_hive_statistic_clean",
"p2,external,hive,external_remote,external_re
assertEquals(result[0][6], "4.0")
assertEquals(result[0][7], "1")
assertEquals(result[0][8], "7")
+ */
def ctlId
result = sql """show catalogs"""
@@ -111,8 +116,9 @@ suite("test_hive_statistic_clean",
"p2,external,hive,external_remote,external_re
}
}
- sql """drop catalog ${catalog_name}"""
- sql """drop expired stats"""
+ // sql """drop catalog ${catalog_name}"""
+ // sql """drop expired stats"""
+ sql """drop stats `statistics`"""
result = sql """select * from
internal.__internal_schema.column_statistics where catalog_id=${ctlId}"""
assertEquals(result.size(), 0)
diff --git
a/regression-test/suites/external_table_p0/hive/test_mixed_par_locations.groovy
b/regression-test/suites/external_table_p0/hive/test_mixed_par_locations.groovy
new file mode 100644
index 00000000000..39367877107
--- /dev/null
+++
b/regression-test/suites/external_table_p0/hive/test_mixed_par_locations.groovy
@@ -0,0 +1,57 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_mixed_par_locations",
"p0,external,hive,external_docker,external_docker_hive") {
+
+ def formats = ["_parquet", "_orc"]
+ def q1 = """select * from test_mixed_par_locationsSUFFIX order by id;"""
+ def q2 = """select count(id) from test_mixed_par_locationsSUFFIX;"""
+ def q3 = """select city, count(*) from test_mixed_par_locations_parquet
where sex = 'male' group by city order by city;"""
+
+ String enabled = context.config.otherConfigs.get("enableHiveTest")
+ if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+ logger.info("disable Hive test.")
+ return;
+ }
+ for (String hivePrefix : ["hive2", "hive3"]) {
+ String extHiveHmsHost =
context.config.otherConfigs.get("externalEnvIp")
+ String extHiveHmsPort = context.config.otherConfigs.get(hivePrefix +
"HmsPort")
+ String catalog_name = "${hivePrefix}_test_mixed_par_locations"
+
+ sql """drop catalog if exists ${catalog_name};"""
+ sql """
+ create catalog if not exists ${catalog_name} properties (
+ 'type'='hms',
+ 'hive.metastore.uris' =
'thrift://${extHiveHmsHost}:${extHiveHmsPort}'
+ );
+ """
+ logger.info("catalog " + catalog_name + " created")
+ sql """switch ${catalog_name};"""
+ logger.info("switched to catalog " + catalog_name)
+ sql """use multi_catalog;"""
+ logger.info("use multi_catalog")
+
+ for (String format in formats) {
+ logger.info("Process format " + format)
+ qt_01 q1.replace("SUFFIX", format)
+ qt_02 q2.replace("SUFFIX", format)
+ qt_03 q3.replace("SUFFIX", format)
+ }
+ sql """drop catalog if exists ${catalog_name}"""
+ }
+}
+
diff --git
a/regression-test/suites/external_table_p2/hive/test_truncate_char_or_varchar_columns.groovy
b/regression-test/suites/external_table_p0/hive/test_truncate_char_or_varchar_columns.groovy
similarity index 88%
rename from
regression-test/suites/external_table_p2/hive/test_truncate_char_or_varchar_columns.groovy
rename to
regression-test/suites/external_table_p0/hive/test_truncate_char_or_varchar_columns.groovy
index b597e3d4573..88ba9afca62 100644
---
a/regression-test/suites/external_table_p2/hive/test_truncate_char_or_varchar_columns.groovy
+++
b/regression-test/suites/external_table_p0/hive/test_truncate_char_or_varchar_columns.groovy
@@ -15,12 +15,16 @@
// specific language governing permissions and limitations
// under the License.
-suite("test_truncate_char_or_varchar_columns",
"p2,external,hive,external_remote,external_remote_hive") {
- String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
- if (enabled != null && enabled.equalsIgnoreCase("true")) {
- String extHiveHmsHost =
context.config.otherConfigs.get("extHiveHmsHost")
- String extHiveHmsPort =
context.config.otherConfigs.get("extHiveHmsPort")
- String catalog_name = "test_truncate_char_or_varchar_columns"
+suite("test_truncate_char_or_varchar_columns",
"p0,external,hive,external_docker,external_docker_hive") {
+ String enabled = context.config.otherConfigs.get("enableHiveTest")
+ if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+ logger.info("disable Hive test.")
+ return;
+ }
+ for (String hivePrefix : ["hive2", "hive3"]) {
+ String extHiveHmsHost =
context.config.otherConfigs.get("externalEnvIp")
+ String extHiveHmsPort = context.config.otherConfigs.get(hivePrefix +
"HmsPort")
+ String catalog_name =
"${hivePrefix}_test_truncate_char_or_varchar_columns"
sql """drop catalog if exists ${catalog_name};"""
sql """
diff --git
a/regression-test/suites/external_table_p2/hive/test_mixed_par_locations.groovy
b/regression-test/suites/external_table_p2/hive/test_mixed_par_locations.groovy
deleted file mode 100644
index c6ac330db48..00000000000
---
a/regression-test/suites/external_table_p2/hive/test_mixed_par_locations.groovy
+++ /dev/null
@@ -1,62 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements. See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership. The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License. You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied. See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-suite("test_mixed_par_locations",
"p2,external,hive,external_remote,external_remote_hive") {
-
- def formats = ["_parquet", "_orc"]
- def q1 = """select * from test_mixed_par_locationsSUFFIX order by id;"""
- def q2 = """select count(id) from test_mixed_par_locationsSUFFIX;"""
- def q3 = """select city, count(*) from test_mixed_par_locations_parquet
where sex = 'male' group by city order by city;"""
-
- String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
- if (enabled != null && enabled.equalsIgnoreCase("true")) {
- try {
- String extHiveHmsHost =
context.config.otherConfigs.get("extHiveHmsHost")
- String extHiveHmsPort =
context.config.otherConfigs.get("extHiveHmsPort")
- String extAk = context.config.otherConfigs.get("extAk");
- String extSk = context.config.otherConfigs.get("extSk");
- String extS3Endpoint =
context.config.otherConfigs.get("extS3Endpoint");
- String extS3Region =
context.config.otherConfigs.get("extS3Region");
- String catalog_name = "test_mixed_par_locations"
-
- sql """drop catalog if exists ${catalog_name};"""
- sql """
- create catalog if not exists ${catalog_name} properties (
- 'type'='hms',
- 'hive.metastore.uris' =
'thrift://${extHiveHmsHost}:${extHiveHmsPort}',
- 'cos.access_key' = '${extAk}',
- 'cos.secret_key' = '${extSk}',
- 'cos.endpoint' = '${extS3Endpoint}'
- );
- """
- logger.info("catalog " + catalog_name + " created")
- sql """switch ${catalog_name};"""
- logger.info("switched to catalog " + catalog_name)
- sql """use multi_catalog;"""
- logger.info("use multi_catalog")
-
- for (String format in formats) {
- logger.info("Process format " + format)
- qt_01 q1.replace("SUFFIX", format)
- qt_02 q2.replace("SUFFIX", format)
- qt_03 q3.replace("SUFFIX", format)
- }
- sql """drop catalog if exists ${catalog_name}"""
- } finally {
- }
- }
-}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]