This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new dac5efec7d0 [fix](test) fix unstable hive test (#56018)
dac5efec7d0 is described below

commit dac5efec7d02f14846fa457437ddabd393f5943b
Author: Mingyu Chen (Rayner) <[email protected]>
AuthorDate: Sun Sep 14 16:57:06 2025 -0700

    [fix](test) fix unstable hive test (#56018)
---
 .../hive/test_hive_basic_type.out                  | Bin 4557927 -> 4557909 
bytes
 .../hive/test_hive_basic_type.groovy               |  22 ++++++++++-----------
 2 files changed, 11 insertions(+), 11 deletions(-)

diff --git 
a/regression-test/data/external_table_p0/hive/test_hive_basic_type.out 
b/regression-test/data/external_table_p0/hive/test_hive_basic_type.out
index 67f4c6a12a4..55271160387 100644
Binary files 
a/regression-test/data/external_table_p0/hive/test_hive_basic_type.out and 
b/regression-test/data/external_table_p0/hive/test_hive_basic_type.out differ
diff --git 
a/regression-test/suites/external_table_p0/hive/test_hive_basic_type.groovy 
b/regression-test/suites/external_table_p0/hive/test_hive_basic_type.groovy
index 4e258a30318..fda250fbce8 100644
--- a/regression-test/suites/external_table_p0/hive/test_hive_basic_type.groovy
+++ b/regression-test/suites/external_table_p0/hive/test_hive_basic_type.groovy
@@ -45,12 +45,12 @@ suite("test_hive_basic_type", 
"external_docker,hive,external_docker_hive,p0,exte
             sql """switch ${catalog_name}"""
 
             def res_dbs_log = sql "show databases;"
-            for (int i = 0; i < res_dbs_log.size(); i++) {
-                def tbs = sql "show tables from  `${res_dbs_log[i][0]}`"
-                log.info("database = ${res_dbs_log[i][0]} => tables = " + 
tbs.toString())
-            }
+            // for (int i = 0; i < res_dbs_log.size(); i++) {
+            //     def tbs = sql "show tables from  `${res_dbs_log[i][0]}`"
+            //     log.info("database = ${res_dbs_log[i][0]} => tables = " + 
tbs.toString())
+            // }
             try {
-                order_qt_2 """select * from 
${catalog_name}.${ex_db_name}.parquet_partition_table order by l_orderkey limit 
1;"""
+                order_qt_2 """select * from 
${catalog_name}.${ex_db_name}.parquet_partition_table order by 
l_orderkey,l_partkey limit 1;"""
                 order_qt_3 """select * from 
${catalog_name}.${ex_db_name}.parquet_delta_binary_packed order by int_value 
limit 1;"""
                 order_qt_4 """select * from 
${catalog_name}.${ex_db_name}.parquet_alltypes_tiny_pages  order by id desc  
limit 5;"""
                 order_qt_5 """select * from 
${catalog_name}.${ex_db_name}.orc_all_types_partition order by bigint_col desc 
limit 3;"""
@@ -148,12 +148,12 @@ suite("test_hive_basic_type", 
"external_docker,hive,external_docker_hive,p0,exte
                 order_qt_parquet11 """ select decimals from 
${catalog_name}.${ex_db_name}.parquet_decimal_bool 
                                        where decimals is not null and decimals 
> 1  order by decimals limit 7 """
             } finally {
-                res_dbs_log = sql "show databases;"
-                for (int i = 0; i < res_dbs_log.size(); i++) {
-                    def tbs = sql "show tables from  `${res_dbs_log[i][0]}`"
-                    log
-                            .info("database = ${res_dbs_log[i][0]} => tables  
=  " + tbs.toString())
-                }
+                // res_dbs_log = sql "show databases;"
+                // for (int i = 0; i < res_dbs_log.size(); i++) {
+                //     def tbs = sql "show tables from  `${res_dbs_log[i][0]}`"
+                //     log
+                //             .info("database = ${res_dbs_log[i][0]} => 
tables  =  " + tbs.toString())
+                // }
             }
             //sql """drop catalog if exists ${catalog_name} """
         }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to