satishd commented on a change in pull request #11060:
URL: https://github.com/apache/kafka/pull/11060#discussion_r671627863



##########
File path: core/src/main/scala/kafka/server/checkpoints/CheckpointFile.scala
##########
@@ -16,127 +16,41 @@
   */
 package kafka.server.checkpoints
 
-import java.io._
-import java.nio.charset.StandardCharsets
-import java.nio.file.{FileAlreadyExistsException, Files, Paths}
-
 import kafka.server.LogDirFailureChannel
-import kafka.utils.Logging
 import org.apache.kafka.common.errors.KafkaStorageException
-import org.apache.kafka.common.utils.Utils
-
-import scala.collection.{Seq, mutable}
+import org.apache.kafka.server.common.SnapshotFile
+import org.apache.kafka.server.common.SnapshotFile.EntryFormatter
 
-trait CheckpointFileFormatter[T]{
-  def toLine(entry: T): String
-
-  def fromLine(line: String): Option[T]
-}
-
-class CheckpointReadBuffer[T](location: String,
-                              reader: BufferedReader,
-                              version: Int,
-                              formatter: CheckpointFileFormatter[T]) extends 
Logging {
-  def read(): Seq[T] = {
-    def malformedLineException(line: String) =
-      new IOException(s"Malformed line in checkpoint file ($location): 
'$line'")
-
-    var line: String = null
-    try {
-      line = reader.readLine()
-      if (line == null)
-        return Seq.empty
-      line.toInt match {
-        case fileVersion if fileVersion == version =>
-          line = reader.readLine()
-          if (line == null)
-            return Seq.empty
-          val expectedSize = line.toInt
-          val entries = mutable.Buffer[T]()
-          line = reader.readLine()
-          while (line != null) {
-            val entry = formatter.fromLine(line)
-            entry match {
-              case Some(e) =>
-                entries += e
-                line = reader.readLine()
-              case _ => throw malformedLineException(line)
-            }
-          }
-          if (entries.size != expectedSize)
-            throw new IOException(s"Expected $expectedSize entries in 
checkpoint file ($location), but found only ${entries.size}")
-          entries
-        case _ =>
-          throw new IOException(s"Unrecognized version of the checkpoint file 
($location): " + version)
-      }
-    } catch {
-      case _: NumberFormatException => throw malformedLineException(line)
-    }
-  }
-}
+import java.io._
+import scala.collection.Seq
+import scala.jdk.CollectionConverters._
 
 class CheckpointFile[T](val file: File,
                         version: Int,
-                        formatter: CheckpointFileFormatter[T],
+                        formatter: EntryFormatter[T],
                         logDirFailureChannel: LogDirFailureChannel,
-                        logDir: String) extends Logging {
-  private val path = file.toPath.toAbsolutePath
-  private val tempPath = Paths.get(path.toString + ".tmp")
-  private val lock = new Object()
-
-  try Files.createFile(file.toPath) // create the file if it doesn't exist
-  catch { case _: FileAlreadyExistsException => }
+                        logDir: String) {

Review comment:
       I started with that while refactoring the existing class. I implemented  
`IOExceptionHandler` class which takes `logDir` and `logDirFailureChannel`, and 
reuse it in both `LeaderepochCheckpointFile` and `OffsetsCheckpointFile`. I 
preferred to keep `SnapshotFile` simple and implementors can have their own 
logic in extending write and read methods. This allows more custom logic for 
read/write but not only `IOException` handling.  
   In both cases, I had to create a new class, and I preferred to extend the 
`SnapshotFile` class.  
   
   Having said that, I do not have strong opinions on this, I am fine with 
either way.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: jira-unsubscr...@kafka.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to