kunal642 commented on a change in pull request #3965:
URL: https://github.com/apache/carbondata/pull/3965#discussion_r499551395



##########
File path: 
integration/spark/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
##########
@@ -96,20 +100,38 @@ object CarbonStore {
    * Read stage files and return input files
    */
   def readStageInput(
+      tableStagePath: String,
       stageFiles: Seq[CarbonFile],
       status: StageInput.StageStatus): Seq[StageInput] = {
     val gson = new Gson()
     val output = Collections.synchronizedList(new util.ArrayList[StageInput]())
-    stageFiles.map { stage =>
-      val filePath = stage.getAbsolutePath
-      val stream = FileFactory.getDataInputStream(filePath)
+    stageFiles.foreach { stage =>
+      val filePath = tableStagePath + CarbonCommonConstants.FILE_SEPARATOR + 
stage.getName
+      var stream: DataInputStream = null
       try {
-        val stageInput = gson.fromJson(new InputStreamReader(stream), 
classOf[StageInput])
-        stageInput.setCreateTime(stage.getLastModifiedTime)
-        stageInput.setStatus(status)
-        output.add(stageInput)
+        stream = FileFactory.getDataInputStream(filePath)
+        var retry = READ_FILE_RETRY_TIMES
+        breakable {
+          while (retry > 0) {
+            try {
+              val stageInput = gson.fromJson(new InputStreamReader(stream), 
classOf[StageInput])
+              stageInput.setCreateTime(stage.getLastModifiedTime)
+              stageInput.setStatus(status)
+              output.add(stageInput)
+              break()
+            } catch {
+              case _ : FileNotFoundException =>
+                LOGGER.warn("The stage file: " + filePath + " does not exist")
+                break()
+              case _ => retry -= 1
+                Thread.sleep(READ_FILE_RETRY_INTERVAL)

Review comment:
       Why are you not throwing the original exception when retry count is 1

##########
File path: 
integration/spark/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
##########
@@ -96,20 +100,38 @@ object CarbonStore {
    * Read stage files and return input files
    */
   def readStageInput(
+      tableStagePath: String,
       stageFiles: Seq[CarbonFile],
       status: StageInput.StageStatus): Seq[StageInput] = {
     val gson = new Gson()
     val output = Collections.synchronizedList(new util.ArrayList[StageInput]())
-    stageFiles.map { stage =>
-      val filePath = stage.getAbsolutePath
-      val stream = FileFactory.getDataInputStream(filePath)
+    stageFiles.foreach { stage =>
+      val filePath = tableStagePath + CarbonCommonConstants.FILE_SEPARATOR + 
stage.getName
+      var stream: DataInputStream = null
       try {
-        val stageInput = gson.fromJson(new InputStreamReader(stream), 
classOf[StageInput])
-        stageInput.setCreateTime(stage.getLastModifiedTime)
-        stageInput.setStatus(status)
-        output.add(stageInput)
+        stream = FileFactory.getDataInputStream(filePath)
+        var retry = READ_FILE_RETRY_TIMES
+        breakable {
+          while (retry > 0) {
+            try {
+              val stageInput = gson.fromJson(new InputStreamReader(stream), 
classOf[StageInput])
+              stageInput.setCreateTime(stage.getLastModifiedTime)
+              stageInput.setStatus(status)
+              output.add(stageInput)
+              break()
+            } catch {
+              case _ : FileNotFoundException =>
+                LOGGER.warn("The stage file: " + filePath + " does not exist")
+                break()
+              case _ => retry -= 1
+                Thread.sleep(READ_FILE_RETRY_INTERVAL)

Review comment:
       Why are you not throwing the original exception when retry count is 1?




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to