xuanyuanking commented on a change in pull request #32582:
URL: https://github.com/apache/spark/pull/32582#discussion_r645661110
##########
File path:
sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/RocksDBSuite.scala
##########
@@ -54,4 +130,30 @@ class RocksDBSuite extends SparkFunSuite {
"""{"sstFiles":[{"localFileName":"00001.sst","dfsSstFileName":"00001-uuid.sst","sizeBytes":12345678901234}],"logFiles":[{"localFileName":"00001.log","dfsLogFileName":"00001-uuid.log","sizeBytes":12345678901234}],"numKeys":12345678901234}""")
// scalastyle:on line.size.limit
}
+
+ def generateFiles(dir: String, fileToLengths: Seq[(String, Int)]): Unit = {
+ fileToLengths.foreach { case (fileName, length) =>
+ val file = new File(dir, fileName)
+ FileUtils.write(file, "a" * length)
+ }
+ }
+
+ def saveCheckpointFiles(
+ fileManager: RocksDBFileManager,
+ fileToLengths: Seq[(String, Int)],
+ version: Int,
+ numKeys: Int): Unit = {
+ val checkpointDir = Utils.createTempDir().getAbsolutePath // local dir to
create checkpoints
Review comment:
The `checkpointDir` here is a temp dir for a specific version of
checkpointing. This checkpoint here corresponds to a RocksDB checkpoint. The
method, `saveCheckpointToDfs` will be called in the `commit` logic for the
RocksDBStateStoreProvider. The commit follows these steps:
```
* - Write all the updates to the native RocksDB
* - Flush all changes to disk
* - Create a RocksDB checkpoint in a new local dir [Done in this method]
* - Sync the checkpoint dir files to DFS [Done in this method]
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]