This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 68136fd50a5b [SPARK-52456][CORE] Lower the minimum limit of 
`spark.eventLog.rolling.maxFileSize`
68136fd50a5b is described below

commit 68136fd50a5b635ed5f1a17984b79d86bdc357eb
Author: Dongjoon Hyun <dongj...@apache.org>
AuthorDate: Wed Jun 11 19:23:52 2025 -0700

    [SPARK-52456][CORE] Lower the minimum limit of 
`spark.eventLog.rolling.maxFileSize`
    
    ### What changes were proposed in this pull request?
    
    This PR aims to lower the minimum limit of 
`spark.eventLog.rolling.maxFileSize` from `10MiB` to `2MiB` at Apache Spark 
4.1.0 while keeping the default (128MiB).
    
    ### Why are the changes needed?
    
    `spark.eventLog.rolling.maxFileSize` has `10MiB` as the lower bound limit 
since Apache Spark 3.0.0.
    
    - #25670
    
    By reducing the lower bound to `2MiB`, we can allow Spark jobs to write 
small log files more frequently and faster without waiting for `10MiB`. This is 
helpful some slow(large micro-batch period) or low-traffic streaming jobs. The 
users will set a proper value for their jobs.
    
    ### Does this PR introduce _any_ user-facing change?
    
    There is no behavior change for the existing jobs. This only extends the 
range of configuration values for a user who wants to have lower values.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #51162 from dongjoon-hyun/SPARK-52456.
    
    Authored-by: Dongjoon Hyun <dongj...@apache.org>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 core/src/main/scala/org/apache/spark/internal/config/package.scala    | 4 ++--
 .../org/apache/spark/deploy/history/EventLogFileWritersSuite.scala    | 2 +-
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/internal/config/package.scala 
b/core/src/main/scala/org/apache/spark/internal/config/package.scala
index 7cb3d068b676..000de4d6c26d 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/package.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/package.scala
@@ -317,8 +317,8 @@ package object config {
         " to be rolled over.")
       .version("3.0.0")
       .bytesConf(ByteUnit.BYTE)
-      .checkValue(_ >= ByteUnit.MiB.toBytes(10), "Max file size of event log 
should be " +
-        "configured to be at least 10 MiB.")
+      .checkValue(_ >= ByteUnit.MiB.toBytes(2), "Max file size of event log 
should be " +
+        "configured to be at least 2 MiB.")
       .createWithDefaultString("128m")
 
   private[spark] val EXECUTOR_ID =
diff --git 
a/core/src/test/scala/org/apache/spark/deploy/history/EventLogFileWritersSuite.scala
 
b/core/src/test/scala/org/apache/spark/deploy/history/EventLogFileWritersSuite.scala
index 7c05613d8b16..da3614bf81a5 100644
--- 
a/core/src/test/scala/org/apache/spark/deploy/history/EventLogFileWritersSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/deploy/history/EventLogFileWritersSuite.scala
@@ -326,7 +326,7 @@ class RollingEventLogFilesWriterSuite extends 
EventLogFileWritersSuite {
 
     val conf = getLoggingConf(testDirPath, None)
     conf.set(EVENT_LOG_ENABLE_ROLLING, true)
-    conf.set(EVENT_LOG_ROLLING_MAX_FILE_SIZE.key, "9m")
+    conf.set(EVENT_LOG_ROLLING_MAX_FILE_SIZE.key, "1m")
 
     val e = intercept[IllegalArgumentException] {
       createWriter(appId, attemptId, testDirPath.toUri, conf,


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to