This is an automated email from the ASF dual-hosted git repository.
dataroaring pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-3.0 by this push:
new 39d7f9e93c3 branch-3.0: [fix](test) change s3 tables name to avoid
conflict(#53433) (#53499)
39d7f9e93c3 is described below
commit 39d7f9e93c3950014be66e38b0e02153e620b79a
Author: Mingyu Chen (Rayner) <[email protected]>
AuthorDate: Thu Jul 17 19:08:19 2025 -0700
branch-3.0: [fix](test) change s3 tables name to avoid conflict(#53433)
(#53499)
bp #53433
---
.../iceberg/test_s3tables_write_insert.out | Bin 41743 -> 41551 bytes
.../iceberg/test_s3tables_insert_overwrite.groovy | 42 +++++++-------
.../iceberg/test_s3tables_write_insert.groovy | 63 +++++++++++++--------
.../iceberg/test_s3tables_write_partitions.groovy | 26 +++++----
4 files changed, 75 insertions(+), 56 deletions(-)
diff --git
a/regression-test/data/external_table_p2/iceberg/test_s3tables_write_insert.out
b/regression-test/data/external_table_p2/iceberg/test_s3tables_write_insert.out
index a0fcbd6816d..710fc368216 100644
Binary files
a/regression-test/data/external_table_p2/iceberg/test_s3tables_write_insert.out
and
b/regression-test/data/external_table_p2/iceberg/test_s3tables_write_insert.out
differ
diff --git
a/regression-test/suites/external_table_p2/iceberg/test_s3tables_insert_overwrite.groovy
b/regression-test/suites/external_table_p2/iceberg/test_s3tables_insert_overwrite.groovy
index 89c667719d8..dde476decfb 100644
---
a/regression-test/suites/external_table_p2/iceberg/test_s3tables_insert_overwrite.groovy
+++
b/regression-test/suites/external_table_p2/iceberg/test_s3tables_insert_overwrite.groovy
@@ -22,9 +22,11 @@ suite("test_s3tables_insert_overwrite",
"p0,external,iceberg,external_docker,ext
def parts = format_compression.split("_")
def format = parts[0]
def compression = parts[1]
- sql """ DROP TABLE IF EXISTS
`iceberg_overwrite_all_types_${format_compression}`; """
+ def all_types_table =
"iceberg_overwrite_all_types_${format_compression}_branch30"
+ def all_types_partition_table =
"iceberg_overwrite_types_par_${format_compression}_branch30"
+ sql """ DROP TABLE IF EXISTS `${all_types_table}`; """
sql """
- CREATE TABLE `iceberg_overwrite_all_types_${format_compression}`(
+ CREATE TABLE `${all_types_table}`(
`boolean_col` boolean,
`int_col` int,
`bigint_col` bigint,
@@ -82,7 +84,7 @@ suite("test_s3tables_insert_overwrite",
"p0,external,iceberg,external_docker,ext
"""
sql """
- INSERT OVERWRITE table
iceberg_overwrite_all_types_${format_compression}
+ INSERT OVERWRITE table ${all_types_table}
VALUES (
1, -- boolean_col
2147483647, -- int_col
@@ -136,11 +138,11 @@ suite("test_s3tables_insert_overwrite",
"p0,external,iceberg,external_docker,ext
20240320 -- dt
);
"""
- order_qt_q01 """ select * from
iceberg_overwrite_all_types_${format_compression};
+ order_qt_q01 """ select * from ${all_types_table};
"""
sql """
- INSERT OVERWRITE table
iceberg_overwrite_all_types_${format_compression}
+ INSERT OVERWRITE table ${all_types_table}
VALUES (
1, -- boolean_col
2147483647, -- int_col
@@ -298,11 +300,11 @@ suite("test_s3tables_insert_overwrite",
"p0,external,iceberg,external_docker,ext
20240322 -- dt
);
"""
- order_qt_q02 """ select * from
iceberg_overwrite_all_types_${format_compression};
+ order_qt_q02 """ select * from ${all_types_table};
"""
sql """
- INSERT OVERWRITE table
iceberg_overwrite_all_types_${format_compression}(float_col, t_map_int,
t_ARRAY_decimal_precision_8, t_ARRAY_string_starting_with_nulls)
+ INSERT OVERWRITE table ${all_types_table}(float_col, t_map_int,
t_ARRAY_decimal_precision_8, t_ARRAY_string_starting_with_nulls)
VALUES (
CAST(123.45 AS FLOAT), -- float_col
MAP(1, 10), -- t_map_int
@@ -310,19 +312,21 @@ suite("test_s3tables_insert_overwrite",
"p0,external,iceberg,external_docker,ext
ARRAY(null, 'value1', 'value2') -- t_ARRAY_string_starting_with_nulls
);
"""
- order_qt_q03 """ select * from
iceberg_overwrite_all_types_${format_compression};
+ order_qt_q03 """ select * from ${all_types_table};
"""
- sql """ DROP TABLE iceberg_overwrite_all_types_${format_compression};
"""
+ sql """ DROP TABLE ${all_types_table}; """
}
def q03 = { String format_compression, String catalog_name ->
def parts = format_compression.split("_")
def format = parts[0]
def compression = parts[1]
- sql """ DROP TABLE IF EXISTS
`iceberg_overwrite_types_par_${format_compression}`; """
+ def all_types_table =
"iceberg_overwrite_all_types_${format_compression}_branch30"
+ def all_types_partition_table =
"iceberg_overwrite_types_par_${format_compression}_branch30"
+ sql """ DROP TABLE IF EXISTS `${all_types_partition_table}`; """
sql """
- CREATE TABLE `iceberg_overwrite_types_par_${format_compression}`(
+ CREATE TABLE `${all_types_partition_table}`(
`boolean_col` boolean,
`int_col` int,
`bigint_col` bigint,
@@ -381,7 +385,7 @@ suite("test_s3tables_insert_overwrite",
"p0,external,iceberg,external_docker,ext
"""
sql """
- INSERT OVERWRITE TABLE
iceberg_overwrite_types_par_${format_compression}
+ INSERT OVERWRITE TABLE ${all_types_partition_table}
VALUES (
1, -- boolean_col
2147483647, -- int_col
@@ -435,11 +439,11 @@ suite("test_s3tables_insert_overwrite",
"p0,external,iceberg,external_docker,ext
20240320 -- dt
);
"""
- order_qt_q01 """ select * from
iceberg_overwrite_types_par_${format_compression};
+ order_qt_q01 """ select * from ${all_types_partition_table};
"""
sql """
- INSERT OVERWRITE TABLE
iceberg_overwrite_types_par_${format_compression}
+ INSERT OVERWRITE TABLE ${all_types_partition_table}
VALUES (
1, -- boolean_col
2147483647, -- int_col
@@ -597,11 +601,11 @@ suite("test_s3tables_insert_overwrite",
"p0,external,iceberg,external_docker,ext
20240322 -- dt
);
"""
- order_qt_q02 """ select * from
iceberg_overwrite_types_par_${format_compression};
+ order_qt_q02 """ select * from ${all_types_partition_table};
"""
sql """
- INSERT OVERWRITE TABLE
iceberg_overwrite_types_par_${format_compression}(float_col, t_map_int,
t_ARRAY_decimal_precision_8, t_ARRAY_string_starting_with_nulls, dt)
+ INSERT OVERWRITE TABLE ${all_types_partition_table}(float_col,
t_map_int, t_ARRAY_decimal_precision_8, t_ARRAY_string_starting_with_nulls, dt)
VALUES (
123.45, -- float_col
MAP(1, 10), -- t_map_int
@@ -610,10 +614,10 @@ suite("test_s3tables_insert_overwrite",
"p0,external,iceberg,external_docker,ext
20240321 -- dt
);
"""
- order_qt_q03 """ select * from
iceberg_overwrite_types_par_${format_compression};
+ order_qt_q03 """ select * from ${all_types_partition_table};
"""
- sql """ DROP TABLE iceberg_overwrite_types_par_${format_compression};
"""
+ sql """ DROP TABLE ${all_types_partition_table}; """
}
String enabled =
context.config.otherConfigs.get("enableExternalIcebergTest")
@@ -634,8 +638,6 @@ suite("test_s3tables_insert_overwrite",
"p0,external,iceberg,external_docker,ext
sql """ switch ${catalog_name};"""
sql """ use my_namespace;"""
sql """ set enable_fallback_to_original_planner=false """
- def tables = sql """ show tables; """
- assertTrue(tables.size() > 0)
try {
for (String format_compression in format_compressions) {
diff --git
a/regression-test/suites/external_table_p2/iceberg/test_s3tables_write_insert.groovy
b/regression-test/suites/external_table_p2/iceberg/test_s3tables_write_insert.groovy
index 9db977fdfa2..670b3e11206 100644
---
a/regression-test/suites/external_table_p2/iceberg/test_s3tables_write_insert.groovy
+++
b/regression-test/suites/external_table_p2/iceberg/test_s3tables_write_insert.groovy
@@ -22,9 +22,11 @@ suite("test_s3tables_write_insert",
"p2,external,iceberg,external_remote,externa
def parts = format_compression.split("_")
def format = parts[0]
def compression = parts[1]
- sql """ DROP TABLE IF EXISTS
`iceberg_all_types_${format_compression}`; """
+ def all_types_table =
"iceberg_all_types_${format_compression}_branch30"
+ def all_types_partition_table =
"iceberg_all_types_par_${format_compression}_branch30"
+ sql """ DROP TABLE IF EXISTS `${all_types_table}`; """
sql """
- CREATE TABLE `iceberg_all_types_${format_compression}`(
+ CREATE TABLE `${all_types_table}`(
`boolean_col` boolean,
`int_col` int,
`bigint_col` bigint,
@@ -82,7 +84,7 @@ suite("test_s3tables_write_insert",
"p2,external,iceberg,external_remote,externa
"""
sql """
- INSERT INTO iceberg_all_types_${format_compression}
+ INSERT INTO ${all_types_table}
VALUES (
1, -- boolean_col
2147483647, -- int_col
@@ -136,11 +138,11 @@ suite("test_s3tables_write_insert",
"p2,external,iceberg,external_remote,externa
20240320 -- dt
);
"""
- order_qt_q01 """ select * from iceberg_all_types_${format_compression};
+ order_qt_q01 """ select * from ${all_types_table};
"""
sql """
- INSERT INTO iceberg_all_types_${format_compression}
+ INSERT INTO ${all_types_table}
VALUES (
1, -- boolean_col
2147483647, -- int_col
@@ -298,11 +300,11 @@ suite("test_s3tables_write_insert",
"p2,external,iceberg,external_remote,externa
20240322 -- dt
);
"""
- order_qt_q02 """ select * from iceberg_all_types_${format_compression};
+ order_qt_q02 """ select * from ${all_types_table};
"""
sql """
- INSERT INTO iceberg_all_types_${format_compression}(float_col,
t_map_int, t_ARRAY_decimal_precision_8, t_ARRAY_string_starting_with_nulls)
+ INSERT INTO ${all_types_table}(float_col, t_map_int,
t_ARRAY_decimal_precision_8, t_ARRAY_string_starting_with_nulls)
VALUES (
CAST(123.45 AS FLOAT), -- float_col
MAP(1, 10), -- t_map_int
@@ -310,19 +312,21 @@ suite("test_s3tables_write_insert",
"p2,external,iceberg,external_remote,externa
ARRAY(null, 'value1', 'value2') -- t_ARRAY_string_starting_with_nulls
);
"""
- order_qt_q03 """ select * from iceberg_all_types_${format_compression};
+ order_qt_q03 """ select * from ${all_types_table};
"""
- sql """ DROP TABLE iceberg_all_types_${format_compression}; """
+ sql """ DROP TABLE ${all_types_table}; """
}
def q03 = { String format_compression, String catalog_name ->
def parts = format_compression.split("_")
def format = parts[0]
def compression = parts[1]
- sql """ DROP TABLE IF EXISTS
`iceberg_all_types_par_${format_compression}`; """
+ def all_types_table =
"iceberg_all_types_${format_compression}_branch30"
+ def all_types_partition_table =
"iceberg_all_types_par_${format_compression}_branch30"
+ sql """ DROP TABLE IF EXISTS `${all_types_partition_table}`; """
sql """
- CREATE TABLE `iceberg_all_types_par_${format_compression}`(
+ CREATE TABLE `${all_types_partition_table}`(
`boolean_col` boolean,
`int_col` int,
`bigint_col` bigint,
@@ -381,7 +385,7 @@ suite("test_s3tables_write_insert",
"p2,external,iceberg,external_remote,externa
"""
sql """
- INSERT INTO iceberg_all_types_par_${format_compression}
+ INSERT INTO ${all_types_partition_table}
VALUES (
1, -- boolean_col
2147483647, -- int_col
@@ -435,11 +439,11 @@ suite("test_s3tables_write_insert",
"p2,external,iceberg,external_remote,externa
20240320 -- dt
);
"""
- order_qt_q01 """ select * from
iceberg_all_types_par_${format_compression};
+ order_qt_q01 """ select * from ${all_types_partition_table};
"""
sql """
- INSERT INTO iceberg_all_types_par_${format_compression}
+ INSERT INTO ${all_types_partition_table}
VALUES (
1, -- boolean_col
2147483647, -- int_col
@@ -597,11 +601,11 @@ suite("test_s3tables_write_insert",
"p2,external,iceberg,external_remote,externa
20240322 -- dt
);
"""
- order_qt_q02 """ select * from
iceberg_all_types_par_${format_compression};
+ order_qt_q02 """ select * from ${all_types_partition_table};
"""
sql """
- INSERT INTO iceberg_all_types_par_${format_compression}(float_col,
t_map_int, t_ARRAY_decimal_precision_8, t_ARRAY_string_starting_with_nulls, dt)
+ INSERT INTO ${all_types_partition_table}(float_col, t_map_int,
t_ARRAY_decimal_precision_8, t_ARRAY_string_starting_with_nulls, dt)
VALUES (
123.45, -- float_col
MAP(1, 10), -- t_map_int
@@ -610,10 +614,27 @@ suite("test_s3tables_write_insert",
"p2,external,iceberg,external_remote,externa
20240321 -- dt
);
"""
- order_qt_q03 """ select * from
iceberg_all_types_par_${format_compression};
+ order_qt_q03 """ select * from ${all_types_partition_table};
"""
- sql """ DROP TABLE iceberg_all_types_par_${format_compression}; """
+ // just test
+ sql """
+ SELECT
+ CASE
+ WHEN file_size_in_bytes BETWEEN 0 AND 8 * 1024 * 1024 THEN
'0-8M'
+ WHEN file_size_in_bytes BETWEEN 8 * 1024 * 1024 + 1 AND 32 *
1024 * 1024 THEN '8-32M'
+ WHEN file_size_in_bytes BETWEEN 2 * 1024 * 1024 + 1 AND 128 *
1024 * 1024 THEN '32-128M'
+ WHEN file_size_in_bytes BETWEEN 128 * 1024 * 1024 + 1 AND 512
* 1024 * 1024 THEN '128-512M'
+ WHEN file_size_in_bytes > 512 * 1024 * 1024 THEN '> 512M'
+ ELSE 'Unknown'
+ END AS SizeRange,
+ COUNT(*) AS FileNum
+ FROM ${all_types_partition_table}\$data_files
+ GROUP BY
+ SizeRange;
+ """
+
+ sql """ DROP TABLE ${all_types_partition_table}; """
}
String enabled =
context.config.otherConfigs.get("enableExternalIcebergTest")
@@ -634,12 +655,6 @@ suite("test_s3tables_write_insert",
"p2,external,iceberg,external_remote,externa
sql """ switch ${catalog_name};"""
sql """ use my_namespace;"""
sql """ set enable_fallback_to_original_planner=false """
- def tables = sql """ show tables; """
- assertTrue(tables.size() > 0)
-
- // 1. test querying existing tables
- qt_sql01 """select * from my_table order by id;"""
- qt_sql01 """select * from partitioned_table order by ts;"""
try {
for (String format_compression in format_compressions) {
diff --git
a/regression-test/suites/external_table_p2/iceberg/test_s3tables_write_partitions.groovy
b/regression-test/suites/external_table_p2/iceberg/test_s3tables_write_partitions.groovy
index 143eba25a16..1e25eba10a3 100644
---
a/regression-test/suites/external_table_p2/iceberg/test_s3tables_write_partitions.groovy
+++
b/regression-test/suites/external_table_p2/iceberg/test_s3tables_write_partitions.groovy
@@ -22,9 +22,11 @@ suite("test_s3tables_write_partitions",
"p0,external,iceberg,external_docker,ext
def parts = format_compression.split("_")
def format = parts[0]
def compression = parts[1]
- sql """ drop table if exists
s3_columns_out_of_order_source_tbl_${format_compression} """
+ def source_tbl =
"s3_columns_out_of_order_source_tbl_${format_compression}_branch30"
+ def target_tbl =
"s3_columns_out_of_order_target_tbl_${format_compression}_branch30"
+ sql """ drop table if exists ${source_tbl} """
sql """
- CREATE TABLE
s3_columns_out_of_order_source_tbl_${format_compression} (
+ CREATE TABLE ${source_tbl} (
`col3` bigint,
`col6` int,
`col1` bigint,
@@ -37,9 +39,9 @@ suite("test_s3tables_write_partitions",
"p0,external,iceberg,external_docker,ext
"write-format"=${format}
)
""";
- sql """ drop table if exists
s3_columns_out_of_order_target_tbl_${format_compression} """
+ sql """ drop table if exists ${target_tbl}"""
sql """
- CREATE TABLE
s3_columns_out_of_order_target_tbl_${format_compression} (
+ CREATE TABLE ${target_tbl} (
`col1` bigint,
`col2` bigint,
`col3` bigint,
@@ -57,22 +59,22 @@ suite("test_s3tables_write_partitions",
"p0,external,iceberg,external_docker,ext
""";
sql """
- INSERT INTO
s3_columns_out_of_order_source_tbl_${format_compression} (
+ INSERT INTO ${source_tbl} (
col1, col2, col3, col4, col5, col6
) VALUES (1, 2, 3, 4, 5, 6);
"""
- order_qt_columns_out_of_order01 """ SELECT * FROM
s3_columns_out_of_order_source_tbl_${format_compression} """
+ order_qt_columns_out_of_order01 """ SELECT * FROM ${source_tbl} """
sql """
- INSERT INTO
s3_columns_out_of_order_target_tbl_${format_compression} (
+ INSERT INTO ${target_tbl} (
col1, col2, col3, col4, col5, col6
) VALUES (1, 2, 3, 4, 5, 6);
"""
- order_qt_columns_out_of_order02 """ SELECT * FROM
s3_columns_out_of_order_target_tbl_${format_compression} """
+ order_qt_columns_out_of_order02 """ SELECT * FROM ${target_tbl} """
- sql """ drop table
s3_columns_out_of_order_source_tbl_${format_compression} """
- sql """ drop table
s3_columns_out_of_order_target_tbl_${format_compression} """
+ sql """ drop table ${source_tbl} """
+ sql """ drop table ${target_tbl} """
sql """ drop database if exists `test_s3_columns_out_of_order` """;
}
@@ -94,8 +96,8 @@ suite("test_s3tables_write_partitions",
"p0,external,iceberg,external_docker,ext
sql """ switch ${catalog_name};"""
sql """ use my_namespace;"""
sql """ set enable_fallback_to_original_planner=false """
- def tables = sql """ show tables; """
- assertTrue(tables.size() > 0)
+ // def tables = sql """ show tables; """
+ // assertTrue(tables.size() > 0)
try {
for (String format_compression in format_compressions) {
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]