zsxwing commented on a change in pull request #24154: [SPARK-27210][SS] Cleanup
incomplete output files in ManifestFileCommitProtocol if task is aborted
URL: https://github.com/apache/spark/pull/24154#discussion_r267826458
##########
File path:
sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSinkSuite.scala
##########
@@ -478,4 +481,31 @@ class FileStreamSinkSuite extends StreamTest {
checkDatasetUnorderly(outputDf, 1, 2, 3)
}
}
+
+ testQuietly("cleanup incomplete output for aborted task") {
+ withTempDir { tempDir =>
+ val checkpointDir = new File(tempDir, "chk")
+ val outputDir = new File(tempDir, "output")
+ val inputData = MemoryStream[Int]
+ inputData.addData(1, 2, 3)
+ val q = inputData.toDS().map(_ / 0)
+ .writeStream
+ .option("checkpointLocation", checkpointDir.getCanonicalPath)
+ .format("parquet")
+ .start(outputDir.getCanonicalPath)
+
+ intercept[StreamingQueryException] {
+ try {
+ q.processAllAvailable()
+ } finally {
+ q.stop()
+ }
+ }
+
+ import scala.collection.JavaConverters._
Review comment:
nit: since you will need to fix the imports, could you move this to the top
of the file? This is a pretty common import and it will likely be used by
future tests.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]