This is an automated email from the ASF dual-hosted git repository.

dataroaring pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 64bfb86f3e4 [fix](filecache) flaky test_lru_persist regression (#56650)
64bfb86f3e4 is described below

commit 64bfb86f3e40f2beb940bb3ff3e9929f37140bf2
Author: zhengyu <[email protected]>
AuthorDate: Tue Sep 30 19:23:49 2025 +0800

    [fix](filecache) flaky test_lru_persist regression (#56650)
---
 be/src/io/cache/file_cache_lru_tool.cpp                | 1 -
 regression-test/suites/demo_p0/test_lru_persist.groovy | 6 +++++-
 2 files changed, 5 insertions(+), 2 deletions(-)

diff --git a/be/src/io/cache/file_cache_lru_tool.cpp 
b/be/src/io/cache/file_cache_lru_tool.cpp
index a6e133c7a56..ce843c563b5 100644
--- a/be/src/io/cache/file_cache_lru_tool.cpp
+++ b/be/src/io/cache/file_cache_lru_tool.cpp
@@ -196,7 +196,6 @@ Status parse_one_lru_entry(std::ifstream& in, std::string& 
filename, io::UInt128
     }
 
     // Get next entry from current group
-    std::cout << "After deserialization: " << 
current_parse_group.DebugString() << std::endl;
     auto entry = current_parse_group.entries(0);
     hash = io::UInt128Wrapper((static_cast<uint128_t>(entry.hash().high()) << 
64) |
                               entry.hash().low());
diff --git a/regression-test/suites/demo_p0/test_lru_persist.groovy 
b/regression-test/suites/demo_p0/test_lru_persist.groovy
index d039c74c5c5..a8347254d88 100644
--- a/regression-test/suites/demo_p0/test_lru_persist.groovy
+++ b/regression-test/suites/demo_p0/test_lru_persist.groovy
@@ -53,7 +53,7 @@ suite('test_lru_persist', 'docker') {
     options.feConfigs += ['example_conf_k1=v1', 'example_conf_k2=v2']
     options.beConfigs += ['enable_file_cache=true', 
'enable_java_support=false', 
'file_cache_enter_disk_resource_limit_mode_percent=99',
                           'file_cache_background_lru_dump_interval_ms=2000', 
'file_cache_background_lru_log_replay_interval_ms=500',
-                          'disable_auto_compation=true', 
'file_cache_enter_need_evict_cache_in_advance_percent=99',
+                          'disable_auto_compaction=true', 
'file_cache_enter_need_evict_cache_in_advance_percent=99',
                           
'file_cache_background_lru_dump_update_cnt_threshold=0'
                         ]
 
@@ -62,6 +62,9 @@ suite('test_lru_persist', 'docker') {
         cluster.checkFeIsAlive(1, true)
         cluster.checkBeIsAlive(1, true)
         sql '''set global enable_auto_analyze=false'''
+        sql '''set global enable_audit_plugin=false''' // not working 
currently, so use below two to work around
+        sql '''set global audit_plugin_max_batch_bytes=5000000000'''
+        sql '''set global audit_plugin_max_batch_interval_sec=10000000'''
 
         sql '''create table tb1 (k int) DISTRIBUTED BY HASH(k) BUCKETS 10 
properties ("replication_num"="1")'''
         sql '''insert into tb1 values (1),(2),(3)'''
@@ -88,6 +91,7 @@ suite('test_lru_persist', 'docker') {
         def normalAfter = "md5sum 
${cachePath}/lru_dump_normal.tail".execute().text.trim().split()[0]
         logger.info("normalAfter: ${normalAfter}")
 
+        sql '''show data'''
         assert normalBefore == normalAfter
 
         // remove dump file


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to