This is an automated email from the ASF dual-hosted git repository.

bowenliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new b9fc85c85 [KYUUBI #5180] Replace deprecated classes by equivalent in 
scala.tools.nsc.interpreter package
b9fc85c85 is described below

commit b9fc85c85946a6b39764db95281f47c830a8827c
Author: liangbowen <[email protected]>
AuthorDate: Mon Aug 21 13:13:55 2023 +0800

    [KYUUBI #5180] Replace deprecated classes by equivalent in 
scala.tools.nsc.interpreter package
    
    ### _Why are the changes needed?_
    
    - Replace deprecated class alias `scala.tools.nsc.interpreter.IR` and 
`scala.tools.nsc.interpreter.JPrintWriter` in Scala 2.13 with equivalent 
classes `scala.tools.nsc.interpreter.Results` and `java.io.PrintWriter`
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including 
negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [x] [Run 
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
 locally before make a pull request
    
    ### _Was this patch authored or co-authored using generative AI tooling?_
    
    Closes #5180 from bowenliang123/interpreter-213.
    
    Closes #5180
    
    e76f1f05a [liangbowen] prevent to use deprecated classes in  the package 
scala.tools.nsc.interpreter of Scala 2.13
    
    Authored-by: liangbowen <[email protected]>
    Signed-off-by: liangbowen <[email protected]>
---
 .../org/apache/kyuubi/engine/spark/repl/KyuubiSparkILoop.scala   | 9 ++++-----
 1 file changed, 4 insertions(+), 5 deletions(-)

diff --git 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/repl/KyuubiSparkILoop.scala
 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/repl/KyuubiSparkILoop.scala
index a5437df92..fbbda89ed 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/repl/KyuubiSparkILoop.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/repl/KyuubiSparkILoop.scala
@@ -17,12 +17,11 @@
 
 package org.apache.kyuubi.engine.spark.repl
 
-import java.io.{ByteArrayOutputStream, File}
+import java.io.{ByteArrayOutputStream, File, PrintWriter}
 import java.util.concurrent.locks.ReentrantLock
 
 import scala.tools.nsc.Settings
-import scala.tools.nsc.interpreter.IR
-import scala.tools.nsc.interpreter.JPrintWriter
+import scala.tools.nsc.interpreter.Results
 
 import org.apache.spark.SparkContext
 import org.apache.spark.repl.SparkILoop
@@ -34,7 +33,7 @@ import org.apache.kyuubi.Utils
 private[spark] case class KyuubiSparkILoop private (
     spark: SparkSession,
     output: ByteArrayOutputStream)
-  extends SparkILoop(None, new JPrintWriter(output)) {
+  extends SparkILoop(None, new PrintWriter(output)) {
   import KyuubiSparkILoop._
 
   val result = new DataFrameHolder(spark)
@@ -102,7 +101,7 @@ private[spark] case class KyuubiSparkILoop private (
 
   def clearResult(statementId: String): Unit = result.unset(statementId)
 
-  def interpretWithRedirectOutError(statement: String): IR.Result = 
withLockRequired {
+  def interpretWithRedirectOutError(statement: String): Results.Result = 
withLockRequired {
     Console.withOut(output) {
       Console.withErr(output) {
         this.interpret(statement)

Reply via email to