This is an automated email from the ASF dual-hosted git repository.

morrysnow pushed a commit to branch branch-3.1
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/branch-3.1 by this push:
     new 4c53147d2de branch-3.1: [fix](sql-cache) make test_query_cache_hit 
work #58349 (#58388)
4c53147d2de is described below

commit 4c53147d2de36422dd67472dc5deaae41770d457
Author: Mingyu Chen (Rayner) <[email protected]>
AuthorDate: Thu Nov 27 10:25:35 2025 +0800

    branch-3.1: [fix](sql-cache) make test_query_cache_hit work #58349 (#58388)
    
    bp #58349
---
 .../java/org/apache/doris/qe/ConnectProcessor.java |  6 ++--
 .../java/org/apache/doris/qe/SessionVariable.java  |  9 ------
 .../java/org/apache/doris/qe/StmtExecutor.java     |  8 -----
 .../hive/test_hive_query_cache.groovy              | 34 +++++++++++++---------
 .../hive/test_hive_statistic_auto.groovy           |  6 ++--
 .../external_table_p0/tvf/test_backends_tvf.groovy | 10 +++----
 .../tvf/test_frontends_disks_tvf.groovy            |  8 ++---
 .../tvf/test_frontends_tvf.groovy                  |  9 +++---
 8 files changed, 41 insertions(+), 49 deletions(-)

diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/ConnectProcessor.java 
b/fe/fe-core/src/main/java/org/apache/doris/qe/ConnectProcessor.java
index ff0b3777b58..b1daf0610e4 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/qe/ConnectProcessor.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/qe/ConnectProcessor.java
@@ -40,6 +40,7 @@ import org.apache.doris.common.Pair;
 import org.apache.doris.common.UserException;
 import org.apache.doris.common.util.DebugUtil;
 import org.apache.doris.common.util.SqlUtils;
+import org.apache.doris.common.util.Util;
 import org.apache.doris.datasource.CatalogIf;
 import org.apache.doris.datasource.InternalCatalog;
 import org.apache.doris.metric.MetricRepo;
@@ -252,7 +253,7 @@ public abstract class ConnectProcessor {
 
         if (stmts == null) {
             stmts = parseWithFallback(originStmt, convertedStmt, 
sessionVariable);
-            if (stmts == null) {
+            if (stmts == null || stmts.isEmpty()) {
                 return;
             }
         }
@@ -400,7 +401,7 @@ public abstract class ConnectProcessor {
                 return ImmutableList.of(logicalPlanAdapter);
             }
         } catch (Throwable t) {
-            LOG.warn("Parse from sql cache failed: " + t.getMessage(), t);
+            LOG.warn("Parse from sql cache failed with unexpected exception: 
{}", Util.getRootCauseMessage(t), t);
         } finally {
             statementContext.releasePlannerResources();
         }
@@ -804,3 +805,4 @@ public abstract class ConnectProcessor {
         throw new NotSupportedException("Just MysqlConnectProcessor support 
execute");
     }
 }
+
diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java 
b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java
index 9cea17dcb9f..4fe8c326f72 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java
@@ -561,8 +561,6 @@ public class SessionVariable implements Serializable, 
Writable {
 
     public static final String EXPAND_RUNTIME_FILTER_BY_INNER_JION = 
"expand_runtime_filter_by_inner_join";
 
-    public static final String TEST_QUERY_CACHE_HIT = "test_query_cache_hit";
-
     public static final String ENABLE_AUTO_ANALYZE = "enable_auto_analyze";
 
     public static final String FORCE_SAMPLE_ANALYZE = "force_sample_analyze";
@@ -2053,13 +2051,6 @@ public class SessionVariable implements Serializable, 
Writable {
             }, checker = "checkPartialUpdateNewKeyBehavior", options = 
{"APPEND", "ERROR"})
     public String partialUpdateNewKeyPolicy = "APPEND";
 
-    @VariableMgr.VarAttr(name = TEST_QUERY_CACHE_HIT, description = {
-            "用于测试查询缓存是否命中,如果未命中指定类型的缓存,则会报错",
-            "Used to test whether the query cache is hit. "
-                    + "If the specified type of cache is not hit, an error 
will be reported."},
-            options = {"none", "sql_cache", "partition_cache"})
-    public String testQueryCacheHit = "none";
-
     @VariableMgr.VarAttr(name = ENABLE_AUTO_ANALYZE,
             description = {"该参数控制是否开启自动收集", "Set false to disable auto 
analyze"},
             flag = VariableMgr.GLOBAL)
diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java 
b/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java
index 1c3bb84c574..fb90cddb242 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java
@@ -1776,14 +1776,6 @@ public class StmtExecutor {
                 && CacheAnalyzer.canUseSqlCache(context.getSessionVariable());
         try {
             cacheResult = cacheAnalyzer.getCacheData();
-            if (cacheResult == null) {
-                if (ConnectContext.get() != null
-                        && 
!ConnectContext.get().getSessionVariable().testQueryCacheHit.equals("none")) {
-                    throw new UserException("The variable test_query_cache_hit 
is set to "
-                            + 
ConnectContext.get().getSessionVariable().testQueryCacheHit
-                            + ", but the query cache is not hit.");
-                }
-            }
         } finally {
             if (wantToParseSqlForSqlCache) {
                 String originStmt = parsedStmt.getOrigStmt().originStmt;
diff --git 
a/regression-test/suites/external_table_p0/hive/test_hive_query_cache.groovy 
b/regression-test/suites/external_table_p0/hive/test_hive_query_cache.groovy
index efcf01d3d27..c7e0dde6ff4 100644
--- a/regression-test/suites/external_table_p0/hive/test_hive_query_cache.groovy
+++ b/regression-test/suites/external_table_p0/hive/test_hive_query_cache.groovy
@@ -17,6 +17,20 @@
 
 suite("test_hive_query_cache", 
"p0,external,hive,external_docker,external_docker_hive") {
 
+    def assertHasCache = { String sqlStr ->
+        explain {
+            sql ("physical plan ${sqlStr}")
+            contains("PhysicalSqlCache")
+        }
+    }
+
+    def assertNoCache = { String sqlStr ->
+        explain {
+            sql ("physical plan ${sqlStr}")
+            notContains("PhysicalSqlCache")
+        }
+    }
+
     def q01 = {
         qt_q24 """ select name, count(1) as c from student group by name order 
by c desc;"""
         qt_q25 """ select lo_orderkey, count(1) as c from lineorder group by 
lo_orderkey order by c desc;"""
@@ -113,7 +127,6 @@ suite("test_hive_query_cache", 
"p0,external,hive,external_docker,external_docker
         // test sql cache with empty result
         try {
             sql """set enable_sql_cache=true;"""
-            sql """set test_query_cache_hit="none";"""
             sql """select * from lineitem where l_suppkey="abc";""" // non 
exist l_suppkey;
             sql """select * from lineitem where l_suppkey="abc";"""
         } catch (java.sql.SQLException t) {
@@ -124,7 +137,6 @@ suite("test_hive_query_cache", 
"p0,external,hive,external_docker,external_docker
         // test more sql cache
         sql """use `default`"""
         sql """set enable_sql_cache=true;"""
-        sql """set test_query_cache_hit="none";"""
         // 1. first query, because we need to init the schema of 
table_with_x01 to update the table's update time
         // then sleep 2 seconds to wait longer than 
Config.cache_last_version_interval_second,
         // so that when doing the second query, we can fill the cache on BE
@@ -133,20 +145,14 @@ suite("test_hive_query_cache", 
"p0,external,hive,external_docker,external_docker
         // 2. second query is for filling the cache on BE
         qt_sql2 """select dt, dt, k2, k5, dt from table_with_x01 where dt in 
('2022-11-10') or dt in ('2022-11-10') order by k2 desc limit 10;"""
         // 3. third query, to test cache hit.
-        sql """set test_query_cache_hit="sql";"""
+        assertHasCache """select dt, dt, k2, k5, dt from table_with_x01 where 
dt in ('2022-11-10') or dt in ('2022-11-10') order by k2 desc limit 10;"""
         qt_sql3 """select dt, dt, k2, k5, dt from table_with_x01 where dt in 
('2022-11-10') or dt in ('2022-11-10') order by k2 desc limit 10;"""
 
         // test not hit
-        try {
-            sql """set enable_sql_cache=true;"""
-            sql """set test_query_cache_hit="sql";"""
-            def r = UUID.randomUUID().toString();
-            // using a random sql
-            sql """select dt, "${r}" from table_with_x01 where dt in 
('2022-11-10') or dt in ('2022-11-10') order by k2 desc limit 10;"""
-            assertTrue(1 == 2)
-        } catch (Exception t) {
-            print t.getMessage()
-            assertTrue(t.getMessage().contains("but the query cache is not 
hit"));
-        }
+        sql """set enable_sql_cache=true;"""
+        sql """set enable_hive_sql_cache=true"""
+        def r = UUID.randomUUID().toString();
+        // using a random sql
+        assertNoCache """select dt, "${r}" from table_with_x01 where dt in 
('2022-11-10') or dt in ('2022-11-10') order by k2 desc limit 10;"""
     }
 }
diff --git 
a/regression-test/suites/external_table_p0/hive/test_hive_statistic_auto.groovy 
b/regression-test/suites/external_table_p0/hive/test_hive_statistic_auto.groovy
index f79ccada9ed..5a97470b2e9 100644
--- 
a/regression-test/suites/external_table_p0/hive/test_hive_statistic_auto.groovy
+++ 
b/regression-test/suites/external_table_p0/hive/test_hive_statistic_auto.groovy
@@ -42,7 +42,7 @@ suite("test_hive_statistic_auto", 
"p0,external,hive,external_docker,external_doc
         for (int i = 0; i < 10; i++) {
             Thread.sleep(1000)
             def result = sql """show column stats `statistics` (lo_quantity)"""
-            if (result.size <= 0) {
+            if (result.size() <= 0) {
                 continue;
             }
             assertEquals(result.size(), 1)
@@ -56,7 +56,7 @@ suite("test_hive_statistic_auto", 
"p0,external,hive,external_docker,external_doc
             assertEquals(result[0][8], "N/A")
 
             result = sql """show column stats `statistics` (lo_orderkey)"""
-            if (result.size <= 0) {
+            if (result.size() <= 0) {
                 continue;
             }
             assertEquals(result.size(), 1)
@@ -70,7 +70,7 @@ suite("test_hive_statistic_auto", 
"p0,external,hive,external_docker,external_doc
             assertEquals(result[0][8], "N/A")
 
             result = sql """show column stats `statistics` (lo_linenumber)"""
-            if (result.size <= 0) {
+            if (result.size() <= 0) {
                 continue;
             }
             assertEquals(result.size(), 1)
diff --git 
a/regression-test/suites/external_table_p0/tvf/test_backends_tvf.groovy 
b/regression-test/suites/external_table_p0/tvf/test_backends_tvf.groovy
index b06d0868eae..241e15d6f6a 100644
--- a/regression-test/suites/external_table_p0/tvf/test_backends_tvf.groovy
+++ b/regression-test/suites/external_table_p0/tvf/test_backends_tvf.groovy
@@ -19,30 +19,30 @@
 suite("test_backends_tvf","p0,external,tvf,external_docker") {
     List<List<Object>> table =  sql """ select * from backends(); """
     assertTrue(table.size() > 0)
-    assertEquals(25, table[0].size)
+    assertEquals(25, table[0].size())
 
     // filter columns
     table = sql """ select BackendId, Host, Alive, TotalCapacity, Version, 
NodeRole from backends();"""
     assertTrue(table.size() > 0)
-    assertTrue(table[0].size == 6)
+    assertTrue(table[0].size() == 6)
     assertEquals(true, table[0][2])
 
     // case insensitive
     table = sql """ select backendid, Host, alive, Totalcapacity, version, 
nodeRole from backends();"""
     assertTrue(table.size() > 0)
-    assertTrue(table[0].size == 6)
+    assertTrue(table[0].size() == 6)
     assertEquals(true, table[0][2])
 
     // test aliase columns
     table = sql """ select backendid as id, Host as name, alive, NodeRole as r 
from backends();"""
     assertTrue(table.size() > 0)
-    assertTrue(table[0].size == 4)
+    assertTrue(table[0].size() == 4)
     assertEquals(true, table[0][2])
 
     // test changing position of columns
     table = sql """ select Host as name, NodeRole as r, alive from 
backends();"""
     assertTrue(table.size() > 0)
-    assertTrue(table[0].size == 3)
+    assertTrue(table[0].size() == 3)
     assertEquals(true, table[0][2])
 
     def res = sql """ select count(*) from backends() where alive = 1; """
diff --git 
a/regression-test/suites/external_table_p0/tvf/test_frontends_disks_tvf.groovy 
b/regression-test/suites/external_table_p0/tvf/test_frontends_disks_tvf.groovy
index b4355d8dbaf..509fa5a75dc 100644
--- 
a/regression-test/suites/external_table_p0/tvf/test_frontends_disks_tvf.groovy
+++ 
b/regression-test/suites/external_table_p0/tvf/test_frontends_disks_tvf.groovy
@@ -19,23 +19,23 @@
 suite("test_frontends_disks_tvf", "p0,external,external_docker") {
     List<List<Object>> table =  sql """ select * from `frontends_disks`(); """
     assertTrue(table.size() > 0)
-    assertTrue(table[0].size == 10)
+    assertTrue(table[0].size() == 10)
 
     // filter columns
     table = sql """ select Name from `frontends_disks`();"""
     assertTrue(table.size() > 0)
-    assertTrue(table[0].size == 1)
+    assertTrue(table[0].size() == 1)
 
     // case insensitive
     table = sql """ select name, host, dirtype, dir from frontends_disks() 
order by dirtype;"""
     assertTrue(table.size() > 0)
-    assertTrue(table[0].size == 4)
+    assertTrue(table[0].size() == 4)
     assertEquals("audit-log", table[0][2])
 
     // test aliase columns
     table = sql """ select name as n, host as h, dirtype as a from 
frontends_disks() order by dirtype; """
     assertTrue(table.size() > 0)
-    assertTrue(table[0].size == 3)
+    assertTrue(table[0].size() == 3)
     assertEquals("audit-log", table[0][2])
 
     // test changing position of columns
diff --git 
a/regression-test/suites/external_table_p0/tvf/test_frontends_tvf.groovy 
b/regression-test/suites/external_table_p0/tvf/test_frontends_tvf.groovy
index 0f0e4450cef..0fa743d03cd 100644
--- a/regression-test/suites/external_table_p0/tvf/test_frontends_tvf.groovy
+++ b/regression-test/suites/external_table_p0/tvf/test_frontends_tvf.groovy
@@ -18,24 +18,25 @@
 // This suit test the `frontends` tvf
 suite("test_frontends_tvf","p0,external,tvf,external_docker") {
     List<List<Object>> table =  sql """ select * from `frontends`(); """
+    logger.info("${table}")
     assertTrue(table.size() > 0)
-    assertTrue(table[0].size == 19)
+    assertTrue(table[0].size() == 19)
 
     // filter columns
     table = sql """ select Name from `frontends`();"""
     assertTrue(table.size() > 0)
-    assertTrue(table[0].size == 1)
+    assertTrue(table[0].size() == 1)
 
     // case insensitive
     table = sql """ select name, host, editlogport, httpport, alive from 
frontends();"""
     assertTrue(table.size() > 0)
-    assertTrue(table[0].size == 5)
+    assertTrue(table[0].size() == 5)
     assertEquals("true", table[0][4])
 
     // test aliase columns
     table = sql """ select name as n, host as h, alive as a, editlogport as e 
from frontends(); """
     assertTrue(table.size() > 0)
-    assertTrue(table[0].size == 4)
+    assertTrue(table[0].size() == 4)
     assertEquals("true", table[0][2])
 
     // test changing position of columns


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to