HeartSaVioR commented on a change in pull request #33763:
URL: https://github.com/apache/spark/pull/33763#discussion_r699726063
##########
File path:
sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala
##########
@@ -1304,6 +1304,140 @@ class FileStreamSourceSuite extends
FileStreamSourceTest {
}
}
+ test("SPARK-36533: Trigger.AvailableNow") {
+ withTempDirs { (src, target) =>
+ val checkpoint = new File(target, "chk").getCanonicalPath
+ val targetDir = new File(target, "data").getCanonicalPath
+ var lastFileModTime: Option[Long] = None
+
+ /** Create a text file with a single data item */
+ def createFile(data: Int): File = {
+ val file = stringToFile(new File(src, s"$data.txt"), data.toString)
+ if (lastFileModTime.nonEmpty) file.setLastModified(lastFileModTime.get
+ 1000)
+ lastFileModTime = Some(file.lastModified)
+ file
+ }
+
+ createFile(1)
+ createFile(2)
+ createFile(3)
+
+ // Set up a query to read text files one at a time
+ val df = spark
+ .readStream
+ .option("maxFilesPerTrigger", 1)
+ .text(src.getCanonicalPath)
+
+ def startQuery(): StreamingQuery = {
+ df.writeStream
+ .format("parquet")
+ .trigger(Trigger.AvailableNow)
+ .option("checkpointLocation", checkpoint)
+ .start(targetDir)
+ }
+ val q = startQuery()
+
+ try {
Review comment:
`assert(q.recentProgress.count(_.numInputRows != 0) == 3)`
This verifies that the query runs three micro-batches instead of one.
Btw, the test `SPARK-36533: Trigger.AvailableNow - checkpointing` covers
everything in here now. The change on `index` determines the number of
micro-batch being executed, and we also checked the output DataFrame. I think
we can simply remove this test.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]