This is an automated email from the ASF dual-hosted git repository.

gavinchou pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 2d3b1b67c68 [fix](filecache) fix regression failure due to data 
compression (#58246)
2d3b1b67c68 is described below

commit 2d3b1b67c68d1ff7dccd0411e0afdc50da2f224e
Author: zhengyu <[email protected]>
AuthorDate: Tue Nov 25 00:21:15 2025 +0800

    [fix](filecache) fix regression failure due to data compression (#58246)
    
    In newer version of doris, compression is set by default, causing data
    size assertion failure in regression cases.
---
 regression-test/data/cloud_p0/cache/ttl/test_show_ttl.out            | 4 ++--
 regression-test/suites/cloud_p0/cache/ttl/alter_ttl_max_int64.groovy | 2 +-
 regression-test/suites/cloud_p0/cache/ttl/create_table_like.groovy   | 2 +-
 regression-test/suites/cloud_p0/cache/ttl/test_ttl.groovy            | 2 +-
 regression-test/suites/cloud_p0/cache/ttl/test_ttl_lru_evict.groovy  | 2 +-
 regression-test/suites/cloud_p0/cache/ttl/test_ttl_preempt.groovy    | 2 +-
 6 files changed, 7 insertions(+), 7 deletions(-)

diff --git a/regression-test/data/cloud_p0/cache/ttl/test_show_ttl.out 
b/regression-test/data/cloud_p0/cache/ttl/test_show_ttl.out
index 4b6c4024a92..5beec6cfd12 100644
--- a/regression-test/data/cloud_p0/cache/ttl/test_show_ttl.out
+++ b/regression-test/data/cloud_p0/cache/ttl/test_show_ttl.out
@@ -6,11 +6,11 @@
 0
 
 -- !test_show_ttl_3 --
-customer_ttl   CREATE TABLE `customer_ttl` (\n  `C_CUSTKEY` int NOT NULL,\n  
`C_NAME` varchar(25) NOT NULL,\n  `C_ADDRESS` varchar(40) NOT NULL,\n  
`C_NATIONKEY` int NOT NULL,\n  `C_PHONE` char(15) NOT NULL,\n  `C_ACCTBAL` 
decimal(15,2) NOT NULL,\n  `C_MKTSEGMENT` char(10) NOT NULL,\n  `C_COMMENT` 
varchar(117) NOT NULL\n) ENGINE=OLAP\nDUPLICATE KEY(`C_CUSTKEY`, 
`C_NAME`)\nDISTRIBUTED BY HASH(`C_CUSTKEY`) BUCKETS 32\nPROPERTIES 
(\n"file_cache_ttl_seconds" = "300",\n"is_being_synced" = "fals [...]
+customer_ttl   CREATE TABLE `customer_ttl` (\n  `C_CUSTKEY` int NOT NULL,\n  
`C_NAME` varchar(25) NOT NULL,\n  `C_ADDRESS` varchar(40) NOT NULL,\n  
`C_NATIONKEY` int NOT NULL,\n  `C_PHONE` char(15) NOT NULL,\n  `C_ACCTBAL` 
decimal(15,2) NOT NULL,\n  `C_MKTSEGMENT` char(10) NOT NULL,\n  `C_COMMENT` 
varchar(117) NOT NULL\n) ENGINE=OLAP\nDUPLICATE KEY(`C_CUSTKEY`, 
`C_NAME`)\nDISTRIBUTED BY HASH(`C_CUSTKEY`) BUCKETS 32\nPROPERTIES 
(\n"file_cache_ttl_seconds" = "300",\n"is_being_synced" = "fals [...]
 
 -- !test_show_ttl_4 --
 0
 
 -- !test_show_ttl_5 --
-customer_ttl   CREATE TABLE `customer_ttl` (\n  `C_CUSTKEY` int NOT NULL,\n  
`C_NAME` varchar(25) NOT NULL,\n  `C_ADDRESS` varchar(40) NOT NULL,\n  
`C_NATIONKEY` int NOT NULL,\n  `C_PHONE` char(15) NOT NULL,\n  `C_ACCTBAL` 
decimal(15,2) NOT NULL,\n  `C_MKTSEGMENT` char(10) NOT NULL,\n  `C_COMMENT` 
varchar(117) NOT NULL\n) ENGINE=OLAP\nDUPLICATE KEY(`C_CUSTKEY`, 
`C_NAME`)\nDISTRIBUTED BY HASH(`C_CUSTKEY`) BUCKETS 32\nPROPERTIES 
(\n"file_cache_ttl_seconds" = "0",\n"is_being_synced" = "false" [...]
+customer_ttl   CREATE TABLE `customer_ttl` (\n  `C_CUSTKEY` int NOT NULL,\n  
`C_NAME` varchar(25) NOT NULL,\n  `C_ADDRESS` varchar(40) NOT NULL,\n  
`C_NATIONKEY` int NOT NULL,\n  `C_PHONE` char(15) NOT NULL,\n  `C_ACCTBAL` 
decimal(15,2) NOT NULL,\n  `C_MKTSEGMENT` char(10) NOT NULL,\n  `C_COMMENT` 
varchar(117) NOT NULL\n) ENGINE=OLAP\nDUPLICATE KEY(`C_CUSTKEY`, 
`C_NAME`)\nDISTRIBUTED BY HASH(`C_CUSTKEY`) BUCKETS 32\nPROPERTIES 
(\n"file_cache_ttl_seconds" = "0",\n"is_being_synced" = "false" [...]
 
diff --git 
a/regression-test/suites/cloud_p0/cache/ttl/alter_ttl_max_int64.groovy 
b/regression-test/suites/cloud_p0/cache/ttl/alter_ttl_max_int64.groovy
index ec1b02bfc87..9ffde534860 100644
--- a/regression-test/suites/cloud_p0/cache/ttl/alter_ttl_max_int64.groovy
+++ b/regression-test/suites/cloud_p0/cache/ttl/alter_ttl_max_int64.groovy
@@ -113,7 +113,7 @@ suite("test_ttl_max_int64") {
                         continue
                     }
                     def i = line.indexOf(' ')
-                    assertTrue(line.substring(i).toLong() > 1073741824)
+                    assertTrue(line.substring(i).toLong() > 838860800)
                     flag1 = true
                 }
             }
diff --git a/regression-test/suites/cloud_p0/cache/ttl/create_table_like.groovy 
b/regression-test/suites/cloud_p0/cache/ttl/create_table_like.groovy
index 9a1ea6e6c76..b6d8f505c90 100644
--- a/regression-test/suites/cloud_p0/cache/ttl/create_table_like.groovy
+++ b/regression-test/suites/cloud_p0/cache/ttl/create_table_like.groovy
@@ -135,7 +135,7 @@ def clearFileCache = { check_func ->
                         continue
                     }
                     def i = line.indexOf(' ')
-                    assertTrue(line.substring(i).toLong() > 1073741824)
+                    assertTrue(line.substring(i).toLong() > 838860800)
                     flag1 = true
                 }
             }
diff --git a/regression-test/suites/cloud_p0/cache/ttl/test_ttl.groovy 
b/regression-test/suites/cloud_p0/cache/ttl/test_ttl.groovy
index f217492b900..b4a668ef3f8 100644
--- a/regression-test/suites/cloud_p0/cache/ttl/test_ttl.groovy
+++ b/regression-test/suites/cloud_p0/cache/ttl/test_ttl.groovy
@@ -146,7 +146,7 @@ suite("test_ttl") {
                 }
             }
             assertTrue(flag1)
-            assertTrue(ttl_cache_size > 1073741824)
+            assertTrue(ttl_cache_size > 838860800)
     }
     sleep(180000)
     getMetricsMethod.call() {
diff --git 
a/regression-test/suites/cloud_p0/cache/ttl/test_ttl_lru_evict.groovy 
b/regression-test/suites/cloud_p0/cache/ttl/test_ttl_lru_evict.groovy
index f0397923abf..45d435e814d 100644
--- a/regression-test/suites/cloud_p0/cache/ttl/test_ttl_lru_evict.groovy
+++ b/regression-test/suites/cloud_p0/cache/ttl/test_ttl_lru_evict.groovy
@@ -253,7 +253,7 @@ suite("test_ttl_lru_evict") {
     // sequentially, coz we don't know what other cases are
     // doing with TTL cache
     logger.info("ttl evict diff:" + (ttl_cache_evict_size_end - 
ttl_cache_evict_size_begin).toString())
-    assertTrue((ttl_cache_evict_size_end - ttl_cache_evict_size_begin) > 
1073741824)
+    assertTrue((ttl_cache_evict_size_end - ttl_cache_evict_size_begin) > 
838860800)
 
     // then we test skip_cache count when doing query when ttl cache is full
     // we expect it to be rare
diff --git a/regression-test/suites/cloud_p0/cache/ttl/test_ttl_preempt.groovy 
b/regression-test/suites/cloud_p0/cache/ttl/test_ttl_preempt.groovy
index a31cf3df038..3120a0144bd 100644
--- a/regression-test/suites/cloud_p0/cache/ttl/test_ttl_preempt.groovy
+++ b/regression-test/suites/cloud_p0/cache/ttl/test_ttl_preempt.groovy
@@ -191,7 +191,7 @@ suite("test_ttl_preempt") {
                         continue
                     }
                     def i = line.indexOf(' ')
-                    assertTrue(line.substring(i).toLong() > 1073741824)
+                    assertTrue(line.substring(i).toLong() > 838860800)
                     flag1 = true
                 }
             }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to