This is an automated email from the ASF dual-hosted git repository.

ashrigondekar pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new b7513740d1b4 [SPARK-55151] Fix RocksDBSuite 
testWithStateStoreCheckpointIds
b7513740d1b4 is described below

commit b7513740d1b47b9da02125353eec83afd5d0aebf
Author: Zifei Feng <[email protected]>
AuthorDate: Mon Jan 26 16:06:55 2026 -0800

    [SPARK-55151] Fix RocksDBSuite testWithStateStoreCheckpointIds
    
    ### What changes were proposed in this pull request?
    
    In ScalaTest, test expects a parameterless lambda (() => Any). However, the 
helper testWithStateStoreCheckpointIds incorrectly passed a lambda with a 
Boolean parameter into test.
    
    As a result, the test body was never executed and was instead treated as a 
literal function value, causing tests to silently not run.
    
    This PR fixes the helper function by ensuring the lambda passed to test has 
no parameters, while still correctly threading the 
enableStateStoreCheckpointIds flag into the test logic. It also updates 
affected tests that began failing after the fix to reflect the corrected 
behavior. Tests including
    - loadEmpty tests
    
    ### Why are the changes needed?
    
    So that future engineers who uses this util function will actually get 
their tests ran
    
    ### Does this PR introduce _any_ user-facing change?
    
    no
    
    ### How was this patch tested?
    
    This 
[run](https://github.com/zifeif2/spark/actions/runs/21275903248/job/61235478518?pr=7)
 was failing if we only fix the helper function, but the fixed unit tests are 
passing in this PR too
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #53936 from zifeif2/zifeif2-fix-rocksdb-suite.
    
    Lead-authored-by: Zifei Feng <[email protected]>
    Co-authored-by: zifeif2 <[email protected]>
    Signed-off-by: Anish Shrigondekar <[email protected]>
---
 .../sql/execution/streaming/state/RocksDBSuite.scala   | 18 ++++++++++++++----
 1 file changed, 14 insertions(+), 4 deletions(-)

diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/RocksDBSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/RocksDBSuite.scala
index 5998f2c04ea7..7aced88d3089 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/RocksDBSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/RocksDBSuite.scala
@@ -276,7 +276,7 @@ trait AlsoTestWithRocksDBFeatures
     Seq(true, false).foreach { enableStateStoreCheckpointIds =>
       val newTestName = s"$testName - with enableStateStoreCheckpointIds = " +
         s"$enableStateStoreCheckpointIds"
-      test(newTestName, testTags: _*) { enableStateStoreCheckpointIds =>
+      test(newTestName, testTags: _*) {
         testBody(enableStateStoreCheckpointIds)
       }
     }
@@ -3286,7 +3286,10 @@ class RocksDBSuite extends AlsoTestWithRocksDBFeatures 
with SharedSparkSession
         // upload snapshot 4.zip
         db.doMaintenance()
       }
-      withDB(remoteDir, version = 4, conf = conf) { db =>
+      withDB(remoteDir, version = 4, conf = conf,
+          enableStateStoreCheckpointIds = enableStateStoreCheckpointIds,
+          versionToUniqueId = versionToUniqueId) { db =>
+        db.close()
       }
     })
   }
@@ -3315,7 +3318,10 @@ class RocksDBSuite extends AlsoTestWithRocksDBFeatures 
with SharedSparkSession
       db.doMaintenance()
     }
 
-    withDB(remoteDir, version = 4, conf = conf) { db =>
+    withDB(remoteDir, version = 4, conf = conf,
+        enableStateStoreCheckpointIds = enableStateStoreCheckpointIds,
+        versionToUniqueId = versionToUniqueId) { db =>
+      db.close()
     }
   }
 
@@ -4177,7 +4183,11 @@ class RocksDBSuite extends AlsoTestWithRocksDBFeatures 
with SharedSparkSession
       // So still do a versionToUniqueId.get
       ckptId match {
         case Some(_) => super.load(version, ckptId, readOnly, loadEmpty)
-        case None => super.load(version, versionToUniqueId.get(version), 
readOnly, loadEmpty)
+        case None => super.load(
+          version,
+          if (!loadEmpty) versionToUniqueId.get(version) else None,
+          readOnly,
+          loadEmpty)
       }
     }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to