MaxNevermind commented on code in PR #44636:
URL: https://github.com/apache/spark/pull/44636#discussion_r1446772037
##########
sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/FileStreamOptions.scala:
##########
@@ -50,11 +50,25 @@ class FileStreamOptions(parameters:
CaseInsensitiveMap[String]) extends Logging
}
}
+ val maxBytesPerTrigger: Option[Int] =
parameters.get("maxBytesPerTrigger").map { str =>
+ Try(str.toInt).toOption.filter(_ > 0).map(op =>
+ if (maxFilesPerTrigger.nonEmpty) {
+ throw new IllegalArgumentException(
+ s"Options 'maxFilesPerTrigger' and 'maxBytesPerTrigger' " +
+ s"can't be both set at the same time")
Review Comment:
fixed
##########
docs/structured-streaming-programming-guide.md:
##########
@@ -561,6 +561,8 @@ Here are the details of all the sources in Spark.
<br/>
<code>maxFilesPerTrigger</code>: maximum number of new files to be
considered in every trigger (default: no max)
<br/>
+ <code>maxBytesPerTrigger</code>: maximum total size of new files to be
considered in every trigger (default: no max). maxBytesPerTrigger and
maxFilesPerTrigger can't both be set at the same time, only one of two must be
choosen.
Review Comment:
fixed
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]