This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 4efb630db3b8 [SPARK-52587][SHELL] spark-shell 2.13 support `-i` `-I` 
parameter
4efb630db3b8 is described below

commit 4efb630db3b8ceb76eefe80f45f06cd0c4c1ba2f
Author: sychen <syc...@ctrip.com>
AuthorDate: Thu Jun 26 18:04:50 2025 -0700

    [SPARK-52587][SHELL] spark-shell 2.13 support `-i` `-I` parameter
    
    ### What changes were proposed in this pull request?
    
    ### Why are the changes needed?
    
    In spark-shell 2.12 we supported the `-i` and `-I` parameters, but in 
version 2.13, we did not support it.
    
    
https://github.com/apache/spark/blob/185380c414d2e9822b90a9c0a9e83052a4aa83c1/repl/src/main/scala-2.12/org/apache/spark/repl/SparkILoop.scala#L170-L180
    
    ---
    
    If we correctly construct ShellConfig, we can reuse scala's processing `-i` 
and `-I` logic.
    
    scala.tools.nsc.interpreter.shell.ILoop#interpretPreamble
    ```scala
        for (f <- filesToLoad) {
          loadCommand(f)
          addReplay(s":load $f")
        }
        for (f <- filesToPaste) {
          pasteCommand(f)
          addReplay(s":paste $f")
        }
    ```
    
    ### Does this PR introduce _any_ user-facing change?
    Yes
    
    ### How was this patch tested?
    local test
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #51294 from cxzl25/SPARK-52587.
    
    Authored-by: sychen <syc...@ctrip.com>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 repl/src/main/scala/org/apache/spark/repl/Main.scala       |  4 +++-
 repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala | 12 +++++++++---
 2 files changed, 12 insertions(+), 4 deletions(-)

diff --git a/repl/src/main/scala/org/apache/spark/repl/Main.scala 
b/repl/src/main/scala/org/apache/spark/repl/Main.scala
index 8548801266b2..4ed3bd51d752 100644
--- a/repl/src/main/scala/org/apache/spark/repl/Main.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/Main.scala
@@ -57,7 +57,9 @@ object Main extends Logging {
 
   def main(args: Array[String]): Unit = {
     isShellSession = true
-    doMain(args, new SparkILoop)
+    val settings = new GenericRunnerSettings(scalaOptionError)
+    settings.processArguments(args.toList, true)
+    doMain(args, new SparkILoop(settings))
   }
 
   // Visible for testing
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala 
b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index f49e8adcc74a..01a09344343a 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -31,9 +31,15 @@ import scala.util.Properties.{javaVersion, javaVmName, 
versionString}
 /**
  *  A Spark-specific interactive shell.
  */
-class SparkILoop(in0: BufferedReader, out: PrintWriter)
-  extends ILoop(ShellConfig(new GenericRunnerSettings(_ => ())), in0, out) {
-  def this() = this(null, new PrintWriter(Console.out, true))
+class SparkILoop(config: ShellConfig, in0: BufferedReader, out: PrintWriter)
+  extends ILoop(config, in0, out) {
+  def this(in0: BufferedReader, out: PrintWriter) = this(
+    ShellConfig(new GenericRunnerSettings(_ => ())), in0, out)
+
+  def this(settings: Settings) = this(ShellConfig(settings), null,
+    new PrintWriter(Console.out, true))
+
+  def this() = this(new GenericRunnerSettings(_ => ()))
 
   val initializationCommands: Seq[String] = Seq(
     """


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to