This is an automated email from the ASF dual-hosted git repository.

dataroaring pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
     new e1fe8369175 [regression-test](framework) fix global var not report a 
case failure in teamcity (#51250)
e1fe8369175 is described below

commit e1fe8369175363827c1923207e967456bcf0befb
Author: shuke <[email protected]>
AuthorDate: Fri May 30 17:33:24 2025 +0800

    [regression-test](framework) fix global var not report a case failure in 
teamcity (#51250)
    
    …amcity
    
    pick #50482
---
 .../org/apache/doris/regression/util/LoggerUtils.groovy    | 14 ++++++++++----
 .../suites/account_p0/test_property_session.groovy         |  4 ++--
 regression-test/suites/auth_p0/test_catalogs_auth.groovy   |  4 ++--
 regression-test/suites/auth_p0/test_mtmv_auth.groovy       |  4 ++--
 regression-test/suites/auth_p0/test_partitions_auth.groovy |  4 ++--
 regression-test/suites/auth_p0/test_query_tvf_auth.groovy  |  4 ++--
 .../suites/auth_p0/test_select_count_auth.groovy           |  6 +++---
 .../test_insert_overwrite_recover_no_partition.groovy      |  2 +-
 .../test_array_contains_with_inverted_index.groovy         |  4 ++--
 .../suites/metrics_p0/test_delete_bitmap_metrics.groovy    | 12 ++++++------
 .../suites/query_profile/test_execute_by_frontend.groovy   |  2 +-
 .../suites/rollup/test_materialized_view_bitmap.groovy     |  2 +-
 .../suites/rollup/test_materialized_view_hll.groovy        |  2 +-
 .../rollup/test_materialized_view_hll_with_light_sc.groovy |  2 +-
 .../suites/workload_manager_p0/test_curd_wlg.groovy        |  8 ++++----
 15 files changed, 40 insertions(+), 34 deletions(-)

diff --git 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/util/LoggerUtils.groovy
 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/util/LoggerUtils.groovy
index bcb04e8e9ea..266d6628631 100644
--- 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/util/LoggerUtils.groovy
+++ 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/util/LoggerUtils.groovy
@@ -22,7 +22,11 @@ import com.google.common.collect.Sets
 class LoggerUtils {
     static Tuple2<Integer, String> getErrorInfo(Throwable t, File file) {
         if (file.name.endsWith(".groovy")) {
-            def st = findRootErrorStackTrace(t, Sets.newLinkedHashSet(), file)
+            // to disable global variables, we've add some content to the 
bottom of the groovy file
+            // so if st.getLineNumber > fileLineCt, continue to seek the 
original line.
+            def fileLineCt = file.readLines().size()
+
+            def st = findRootErrorStackTrace(t, Sets.newLinkedHashSet(), file, 
fileLineCt)
             int lineNumber = -1
             if (!st.is(null)) {
                 lineNumber = st.getLineNumber()
@@ -41,12 +45,12 @@ class LoggerUtils {
         }
     }
 
-    static StackTraceElement findRootErrorStackTrace(Throwable t, 
Set<Throwable> throwables, File file) {
+    static StackTraceElement findRootErrorStackTrace(Throwable t, 
Set<Throwable> throwables, File file, int fileLineCt) {
         throwables.add(t)
 
         def cause = t.getCause()
         if (!cause.is(null) && !throwables.contains(cause)) {
-            def foundStackTrace = findRootErrorStackTrace(cause, throwables, 
file)
+            def foundStackTrace = findRootErrorStackTrace(cause, throwables, 
file, fileLineCt)
             if (!foundStackTrace.is(null)) {
                 return foundStackTrace
             }
@@ -54,7 +58,9 @@ class LoggerUtils {
 
         for (def st : t.getStackTrace()) {
             if (Objects.equals(st.fileName, file.name)) {
-                return st
+                if (st.getLineNumber() < fileLineCt) {
+                    return st
+                }
             }
         }
         return null
diff --git a/regression-test/suites/account_p0/test_property_session.groovy 
b/regression-test/suites/account_p0/test_property_session.groovy
index 57b2dad747b..9715fe1d89b 100644
--- a/regression-test/suites/account_p0/test_property_session.groovy
+++ b/regression-test/suites/account_p0/test_property_session.groovy
@@ -32,7 +32,7 @@ suite("test_property_session") {
         sql """GRANT USAGE_PRIV ON CLUSTER `${validCluster}` TO ${userName}""";
     }
     sql """GRANT select_PRIV ON *.*.* TO ${userName}""";
-    connect(user=userName, password="${pwd}", url=context.config.jdbcUrl) {
+    connect(userName, "${pwd}", context.config.jdbcUrl) {
         sql """
               set query_timeout=1;
           """
@@ -46,7 +46,7 @@ suite("test_property_session") {
 
     // the priority of property should be higher than session
     sql """set property for '${userName}' 'query_timeout' = '10';"""
-    connect(user=userName, password="${pwd}", url=context.config.jdbcUrl) {
+    connect(userName, "${pwd}", context.config.jdbcUrl) {
         sql """
             select sleep(3);
         """
diff --git a/regression-test/suites/auth_p0/test_catalogs_auth.groovy 
b/regression-test/suites/auth_p0/test_catalogs_auth.groovy
index 1b67282d8fe..ec9b64f0be4 100644
--- a/regression-test/suites/auth_p0/test_catalogs_auth.groovy
+++ b/regression-test/suites/auth_p0/test_catalogs_auth.groovy
@@ -41,7 +41,7 @@ suite("test_catalogs_auth","p0,auth") {
 
     sql """grant select_priv on regression_test to ${user}"""
 
-    connect(user=user, password="${pwd}", url=context.config.jdbcUrl) {
+    connect(user, "${pwd}", context.config.jdbcUrl) {
         def showRes = sql """show catalogs;"""
         logger.info("showRes: " + showRes.toString())
         assertFalse(showRes.toString().contains("${catalogName}"))
@@ -53,7 +53,7 @@ suite("test_catalogs_auth","p0,auth") {
 
     sql """grant select_priv on ${catalogName}.*.* to ${user}"""
 
-    connect(user=user, password="${pwd}", url=context.config.jdbcUrl) {
+    connect(user, "${pwd}", context.config.jdbcUrl) {
         def showRes = sql """show catalogs;"""
         logger.info("showRes: " + showRes.toString())
         assertTrue(showRes.toString().contains("${catalogName}"))
diff --git a/regression-test/suites/auth_p0/test_mtmv_auth.groovy 
b/regression-test/suites/auth_p0/test_mtmv_auth.groovy
index a190edaa022..11934aed7c3 100644
--- a/regression-test/suites/auth_p0/test_mtmv_auth.groovy
+++ b/regression-test/suites/auth_p0/test_mtmv_auth.groovy
@@ -63,7 +63,7 @@ suite("test_mtmv_auth","p0,auth") {
 
     sql """grant select_priv on regression_test to ${user}"""
 
-    connect(user=user, password="${pwd}", url=context.config.jdbcUrl) {
+    connect(user, "${pwd}", context.config.jdbcUrl) {
         def mvsRes = sql """select * from mv_infos("database"="${dbName}");"""
         logger.info("mvsRes: " + mvsRes.toString())
         assertFalse(mvsRes.toString().contains("${mvName}"))
@@ -80,7 +80,7 @@ suite("test_mtmv_auth","p0,auth") {
 
     sql """grant select_priv on ${dbName}.${mvName} to ${user}"""
 
-    connect(user=user, password="${pwd}", url=context.config.jdbcUrl) {
+    connect(user, "${pwd}", context.config.jdbcUrl) {
        def mvsRes = sql """select * from mv_infos("database"="${dbName}");"""
        logger.info("mvsRes: " + mvsRes.toString())
        assertTrue(mvsRes.toString().contains("${mvName}"))
diff --git a/regression-test/suites/auth_p0/test_partitions_auth.groovy 
b/regression-test/suites/auth_p0/test_partitions_auth.groovy
index 1a398b84b4e..2406930671f 100644
--- a/regression-test/suites/auth_p0/test_partitions_auth.groovy
+++ b/regression-test/suites/auth_p0/test_partitions_auth.groovy
@@ -53,7 +53,7 @@ suite("test_partitions_auth","p0,auth") {
 
     sql """grant select_priv on regression_test to ${user}"""
 
-    connect(user=user, password="${pwd}", url=context.config.jdbcUrl) {
+    connect(user, "${pwd}", context.config.jdbcUrl) {
         test {
               sql """
                  show partitions from ${dbName}.${tableName};
@@ -70,7 +70,7 @@ suite("test_partitions_auth","p0,auth") {
 
     sql """grant select_priv on ${dbName}.${tableName} to ${user}"""
 
-    connect(user=user, password="${pwd}", url=context.config.jdbcUrl) {
+    connect(user, "${pwd}", context.config.jdbcUrl) {
         sql """
              show partitions from ${dbName}.${tableName};
           """
diff --git a/regression-test/suites/auth_p0/test_query_tvf_auth.groovy 
b/regression-test/suites/auth_p0/test_query_tvf_auth.groovy
index 6353ca142a9..d46c9f10b6a 100644
--- a/regression-test/suites/auth_p0/test_query_tvf_auth.groovy
+++ b/regression-test/suites/auth_p0/test_query_tvf_auth.groovy
@@ -53,7 +53,7 @@ suite("test_query_tvf_auth", 
"p0,auth,external,external_docker") {
 
         sql """grant select_priv on regression_test to ${dorisuser}"""
 
-        connect(user=dorisuser, password="${dorispwd}", 
url=context.config.jdbcUrl) {
+        connect(dorisuser, "${dorispwd}", context.config.jdbcUrl) {
             test {
                   sql """
                      select * from query('catalog' = '${catalog_name}', 
'query' = 'select * from doris_test.all_types');
@@ -62,7 +62,7 @@ suite("test_query_tvf_auth", 
"p0,auth,external,external_docker") {
             }
         }
         sql """grant select_priv on ${catalog_name}.*.* to ${dorisuser}"""
-        connect(user=dorisuser, password="${dorispwd}", 
url=context.config.jdbcUrl) {
+        connect(dorisuser, "${dorispwd}", context.config.jdbcUrl) {
           sql """
              select * from query('catalog' = '${catalog_name}', 'query' = 
'select * from doris_test.all_types');
           """
diff --git a/regression-test/suites/auth_p0/test_select_count_auth.groovy 
b/regression-test/suites/auth_p0/test_select_count_auth.groovy
index 47a199aaca2..859aa5b1372 100644
--- a/regression-test/suites/auth_p0/test_select_count_auth.groovy
+++ b/regression-test/suites/auth_p0/test_select_count_auth.groovy
@@ -34,7 +34,7 @@ suite("test_select_count_auth","p0,auth") {
 
     sql """grant select_priv on regression_test to ${user}"""
 
-    connect(user=user, password="${pwd}", url=context.config.jdbcUrl) {
+    connect(user, "${pwd}", context.config.jdbcUrl) {
         test {
               sql """
                   select count(*) from __internal_schema.audit_log;
@@ -57,7 +57,7 @@ suite("test_select_count_auth","p0,auth") {
 
     sql """grant select_priv(query_id) on __internal_schema.audit_log to 
${user}"""
 
-    connect(user=user, password="${pwd}", url=context.config.jdbcUrl) {
+    connect(user, "${pwd}", context.config.jdbcUrl) {
         test {
               sql """
                   select count(*) from __internal_schema.audit_log;
@@ -77,7 +77,7 @@ suite("test_select_count_auth","p0,auth") {
 
     sql """grant select_priv on __internal_schema.audit_log to ${user}"""
 
-    connect(user=user, password="${pwd}", url=context.config.jdbcUrl) {
+    connect(user, "${pwd}", context.config.jdbcUrl) {
         sql """
               select count(*) from __internal_schema.audit_log;
           """
diff --git 
a/regression-test/suites/catalog_recycle_bin_p0/test_insert_overwrite_recover_no_partition.groovy
 
b/regression-test/suites/catalog_recycle_bin_p0/test_insert_overwrite_recover_no_partition.groovy
index f3123d71b8d..d3398e7af59 100644
--- 
a/regression-test/suites/catalog_recycle_bin_p0/test_insert_overwrite_recover_no_partition.groovy
+++ 
b/regression-test/suites/catalog_recycle_bin_p0/test_insert_overwrite_recover_no_partition.groovy
@@ -52,7 +52,7 @@ suite("test_insert_overwrite_recover_no_partition") {
     sql """ recover partition ${table} as p2  from ${table}; """
 
     // create a table to copy the data only for partition p2.
-    table_bk = "test_insert_overwrite_recover_no_partition_backup"
+    def table_bk = "test_insert_overwrite_recover_no_partition_backup"
     sql """ drop table if exists ${table_bk} force"""
     sql """
     create table ${table_bk} (
diff --git 
a/regression-test/suites/inverted_index_p0/test_array_contains_with_inverted_index.groovy
 
b/regression-test/suites/inverted_index_p0/test_array_contains_with_inverted_index.groovy
index 0b1d92e441c..b8b9097cde1 100644
--- 
a/regression-test/suites/inverted_index_p0/test_array_contains_with_inverted_index.groovy
+++ 
b/regression-test/suites/inverted_index_p0/test_array_contains_with_inverted_index.groovy
@@ -85,7 +85,7 @@ suite("test_array_contains_with_inverted_index"){
     def param = [["\'s\'", "\'t\'"], [], null, ["\'s\'", "\'\'", "\'t\'"], 
["\'s\'", null, "\'t\'"], [null, "\'\'"], ["\'s\'", null, "\'t\'", "\'\'"]] // 
null for arrays_overlap will return null which in predicate will lead to return 
empty set
 
     sql "set enable_common_expr_pushdown = true"
-    for (i = 0 ; i < param.size(); ++i) {
+    for (def i = 0 ; i < param.size(); ++i) {
         def p = param[i]
         log.info("param: ${p}")
         order_qt_sql """ select * from tai where arrays_overlap(inventors, 
${p}) order by id; """
@@ -99,7 +99,7 @@ suite("test_array_contains_with_inverted_index"){
         order_qt_sql """ select * from tai where (arrays_overlap(inventors, 
${p}) and apply_date = '2017-01-01') or apply_date = '2019-01-01' order by id; 
"""
     }
    sql "set enable_common_expr_pushdown = false"
-       for (i = 0 ; i < param.size(); ++i) {
+       for (def i = 0 ; i < param.size(); ++i) {
         def p = param[i]
         log.info("param: ${p}")
         order_qt_sql """ select * from tai where arrays_overlap(inventors, 
${p}) order by id; """
diff --git 
a/regression-test/suites/metrics_p0/test_delete_bitmap_metrics.groovy 
b/regression-test/suites/metrics_p0/test_delete_bitmap_metrics.groovy
index 9d0e9b2956b..3861586057a 100644
--- a/regression-test/suites/metrics_p0/test_delete_bitmap_metrics.groovy
+++ b/regression-test/suites/metrics_p0/test_delete_bitmap_metrics.groovy
@@ -72,9 +72,9 @@ suite("test_delete_bitmap_metrics", "p0") {
 
         String command = sb.toString()
         logger.info(command)
-        process = command.execute()
-        code = process.waitFor()
-        out = process.getText()
+        def process = command.execute()
+        def code = process.waitFor()
+        def out = process.getText()
         logger.info("Get local delete bitmap count status:  =" + code + ", 
out=" + out)
         assertEquals(code, 0)
         def deleteBitmapStatus = parseJson(out.trim())
@@ -90,9 +90,9 @@ suite("test_delete_bitmap_metrics", "p0") {
 
         String command = sb.toString()
         logger.info(command)
-        process = command.execute()
-        code = process.waitFor()
-        out = process.getText()
+        def process = command.execute()
+        def code = process.waitFor()
+        def out = process.getText()
         logger.info("Get ms delete bitmap count status:  =" + code + ", out=" 
+ out)
         assertEquals(code, 0)
         def deleteBitmapStatus = parseJson(out.trim())
diff --git 
a/regression-test/suites/query_profile/test_execute_by_frontend.groovy 
b/regression-test/suites/query_profile/test_execute_by_frontend.groovy
index 2b2d867f73b..c166dfdcee9 100644
--- a/regression-test/suites/query_profile/test_execute_by_frontend.groovy
+++ b/regression-test/suites/query_profile/test_execute_by_frontend.groovy
@@ -57,7 +57,7 @@ suite('test_execute_by_frontend') {
     sql "set enable_profile=true"
     def simpleSql1 = "select * from test_execute_by_frontend"
     sql "${simpleSql1}"
-    simpleSql2 = """select  cast("1"  as  Int)"""
+    def simpleSql2 = """select  cast("1"  as  Int)"""
     sql "${simpleSql2}"
     def isRecorded = false
     def wholeString = getProfileList()
diff --git a/regression-test/suites/rollup/test_materialized_view_bitmap.groovy 
b/regression-test/suites/rollup/test_materialized_view_bitmap.groovy
index 4e1ba749c50..2feaeb25837 100644
--- a/regression-test/suites/rollup/test_materialized_view_bitmap.groovy
+++ b/regression-test/suites/rollup/test_materialized_view_bitmap.groovy
@@ -33,7 +33,7 @@ suite("test_materialized_view_bitmap", "rollup") {
         """
 
     sql "CREATE MATERIALIZED VIEW test_neg as select 
k1,bitmap_union(to_bitmap(k2)), bitmap_union(to_bitmap(k3)) FROM ${tbName1} 
GROUP BY k1;"
-    max_try_secs = 60
+    def max_try_secs = 60
     while (max_try_secs--) {
         String res = getJobState(tbName1)
         if (res == "FINISHED" || res == "CANCELLED") {
diff --git a/regression-test/suites/rollup/test_materialized_view_hll.groovy 
b/regression-test/suites/rollup/test_materialized_view_hll.groovy
index a6b8b77444d..5409c0f147a 100644
--- a/regression-test/suites/rollup/test_materialized_view_hll.groovy
+++ b/regression-test/suites/rollup/test_materialized_view_hll.groovy
@@ -35,7 +35,7 @@ suite("test_materialized_view_hll", "rollup") {
         """
 
     sql "CREATE materialized VIEW amt_count AS SELECT store_id, 
hll_union(hll_hash(sale_amt)) FROM ${tbName1} GROUP BY store_id;"
-    max_try_secs = 60
+    def max_try_secs = 60
     while (max_try_secs--) {
         String res = getJobState(tbName1)
         if (res == "FINISHED" || res == "CANCELLED") {
diff --git 
a/regression-test/suites/rollup/test_materialized_view_hll_with_light_sc.groovy 
b/regression-test/suites/rollup/test_materialized_view_hll_with_light_sc.groovy
index 310b97bbdee..3a39dd30fe2 100644
--- 
a/regression-test/suites/rollup/test_materialized_view_hll_with_light_sc.groovy
+++ 
b/regression-test/suites/rollup/test_materialized_view_hll_with_light_sc.groovy
@@ -35,7 +35,7 @@ suite("test_materialized_view_hll_with_light_sc", "rollup") {
         """
 
     sql "CREATE materialized VIEW amt_count1 AS SELECT store_id, 
hll_union(hll_hash(sale_amt)) FROM ${tbName1} GROUP BY store_id;"
-    max_try_secs = 60
+    def max_try_secs = 60
     while (max_try_secs--) {
         String res = getJobState(tbName1)
         if (res == "FINISHED" || res == "CANCELLED") {
diff --git a/regression-test/suites/workload_manager_p0/test_curd_wlg.groovy 
b/regression-test/suites/workload_manager_p0/test_curd_wlg.groovy
index ea81ad5df33..ea972796de4 100644
--- a/regression-test/suites/workload_manager_p0/test_curd_wlg.groovy
+++ b/regression-test/suites/workload_manager_p0/test_curd_wlg.groovy
@@ -786,7 +786,7 @@ suite("test_crud_wlg") {
                 "    'enable_memory_overcommit'='true' " +
                 ");"
         sql "set workload_group=test_wg_metrics;"
-        wg = sql("select 
name,cpu_share,memory_limit,enable_memory_overcommit,max_concurrency,max_queue_size,queue_timeout,cpu_hard_limit,scan_thread_num,tag,read_bytes_per_second,remote_read_bytes_per_second
 from information_schema.workload_groups where name = 'test_wg_metrics' order 
by name;");
+        def wg = sql("select 
name,cpu_share,memory_limit,enable_memory_overcommit,max_concurrency,max_queue_size,queue_timeout,cpu_hard_limit,scan_thread_num,tag,read_bytes_per_second,remote_read_bytes_per_second
 from information_schema.workload_groups where name = 'test_wg_metrics' order 
by name;");
         logger.info("wg: " + wg);
 
         // 3. EXECUTE A QUERY SO THAT THE WORKLOAD GROUP IS USED
@@ -794,9 +794,9 @@ suite("test_crud_wlg") {
         
         // curl backend http port to get metrics
         // get first backendId
-        backendId = backendId_to_backendIP.keySet().iterator().next();
-        backendIP = backendId_to_backendIP.get(backendId);
-        backendHttpPort = backendId_to_backendHttpPort.get(backendId);
+        def backendId = backendId_to_backendIP.keySet().iterator().next();
+        def backendIP = backendId_to_backendIP.get(backendId);
+        def backendHttpPort = backendId_to_backendHttpPort.get(backendId);
         logger.info("backendId: " + backendId + ", backendIP: " + backendIP + 
", backendHttpPort: " + backendHttpPort);
 
         // Create a for loop to get metrics 5 times


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to