This is an automated email from the ASF dual-hosted git repository.

chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new b07162a3b [KYUUBI #5075] Refine tests to adapt Spark 4.0
b07162a3b is described below

commit b07162a3bbb1235291ba4891711b00f3678730f4
Author: Fu Chen <[email protected]>
AuthorDate: Thu Jul 20 17:49:42 2023 +0800

    [KYUUBI #5075] Refine tests to adapt Spark 4.0
    
    ### _Why are the changes needed?_
    
    to fix
    
    ```
    SparkDeltaOperationSuite:
    org.apache.kyuubi.engine.spark.operation.SparkDeltaOperationSuite *** 
ABORTED ***
      java.lang.RuntimeException: Unable to load a Suite class 
org.apache.kyuubi.engine.spark.operation.SparkDeltaOperationSuite that was 
discovered in the runpath: Not Support spark version (4,0)
      at 
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:80)
      at 
org.scalatest.tools.DiscoverySuite.$anonfun$nestedSuites$1(DiscoverySuite.scala:38)
      at 
scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
      at scala.collection.Iterator.foreach(Iterator.scala:943)
      at scala.collection.Iterator.foreach$(Iterator.scala:943)
      at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
      at scala.collection.IterableLike.foreach(IterableLike.scala:74)
      at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
      at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
      at scala.collection.TraversableLike.map(TraversableLike.scala:286)
      ...
      Cause: java.lang.IllegalArgumentException: Not Support spark version (4,0)
      at 
org.apache.kyuubi.engine.spark.WithSparkSQLEngine.$init$(WithSparkSQLEngine.scala:42)
      at 
org.apache.kyuubi.engine.spark.operation.SparkDeltaOperationSuite.<init>(SparkDeltaOperationSuite.scala:25)
      at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
      at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
      at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
      at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
      at java.lang.Class.newInstance(Class.java:442)
      at 
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:66)
      at 
org.scalatest.tools.DiscoverySuite.$anonfun$nestedSuites$1(DiscoverySuite.scala:38)
      at 
scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
      ...
    ```
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including 
negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [x] [Run 
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
 locally before make a pull request
    
    Closes #5075 from cfmcgrady/spark-4.0.
    
    Closes #5075
    
    ad38c0d98 [Fu Chen] refine test to adapt Spark 4.0
    
    Authored-by: Fu Chen <[email protected]>
    Signed-off-by: Cheng Pan <[email protected]>
---
 .../apache/kyuubi/engine/spark/KyuubiSparkUtil.scala    |  8 +++-----
 .../apache/kyuubi/engine/spark/WithSparkSQLEngine.scala | 12 +++---------
 .../spark/kyuubi/SparkSQLEngineDeregisterSuite.scala    | 17 +++++++++--------
 3 files changed, 15 insertions(+), 22 deletions(-)

diff --git 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/KyuubiSparkUtil.scala
 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/KyuubiSparkUtil.scala
index 56bb2d69f..fcfdf55dd 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/KyuubiSparkUtil.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/KyuubiSparkUtil.scala
@@ -95,9 +95,7 @@ object KyuubiSparkUtil extends Logging {
     }
   }
 
-  lazy val sparkMajorMinorVersion: (Int, Int) = {
-    val runtimeSparkVer = org.apache.spark.SPARK_VERSION
-    val runtimeVersion = SemanticVersion(runtimeSparkVer)
-    (runtimeVersion.majorVersion, runtimeVersion.minorVersion)
-  }
+  // Given that we are on the Spark SQL engine side, the 
[[org.apache.spark.SPARK_VERSION]] can be
+  // represented as the runtime version of the Spark SQL engine.
+  lazy val SPARK_ENGINE_RUNTIME_VERSION = 
SemanticVersion(org.apache.spark.SPARK_VERSION)
 }
diff --git 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/WithSparkSQLEngine.scala
 
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/WithSparkSQLEngine.scala
index 629a8374b..3b98c2efb 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/WithSparkSQLEngine.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/WithSparkSQLEngine.scala
@@ -21,7 +21,7 @@ import org.apache.spark.sql.SparkSession
 
 import org.apache.kyuubi.{KyuubiFunSuite, Utils}
 import org.apache.kyuubi.config.KyuubiConf
-import org.apache.kyuubi.engine.spark.KyuubiSparkUtil.sparkMajorMinorVersion
+import 
org.apache.kyuubi.engine.spark.KyuubiSparkUtil.SPARK_ENGINE_RUNTIME_VERSION
 
 trait WithSparkSQLEngine extends KyuubiFunSuite {
   protected var spark: SparkSession = _
@@ -34,14 +34,8 @@ trait WithSparkSQLEngine extends KyuubiFunSuite {
 
   // Affected by such configuration' default value
   //    engine.initialize.sql='SHOW DATABASES'
-  protected var initJobId: Int = {
-    sparkMajorMinorVersion match {
-      case (3, minor) if minor >= 2 => 1 // SPARK-35378
-      case (3, _) => 0
-      case _ =>
-        throw new IllegalArgumentException(s"Not Support spark version 
$sparkMajorMinorVersion")
-    }
-  }
+  // SPARK-35378
+  protected lazy val initJobId: Int = if (SPARK_ENGINE_RUNTIME_VERSION >= 
"3.2") 1 else 0
 
   override def beforeAll(): Unit = {
     startSparkEngine()
diff --git 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/spark/kyuubi/SparkSQLEngineDeregisterSuite.scala
 
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/spark/kyuubi/SparkSQLEngineDeregisterSuite.scala
index 8dc93759b..1e3d6163a 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/spark/kyuubi/SparkSQLEngineDeregisterSuite.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/spark/kyuubi/SparkSQLEngineDeregisterSuite.scala
@@ -24,7 +24,7 @@ import org.apache.spark.sql.internal.SQLConf.ANSI_ENABLED
 import org.scalatest.time.SpanSugar.convertIntToGrainOfTime
 
 import org.apache.kyuubi.config.KyuubiConf._
-import org.apache.kyuubi.engine.spark.KyuubiSparkUtil.sparkMajorMinorVersion
+import 
org.apache.kyuubi.engine.spark.KyuubiSparkUtil.SPARK_ENGINE_RUNTIME_VERSION
 import org.apache.kyuubi.engine.spark.WithDiscoverySparkSQLEngine
 import org.apache.kyuubi.engine.spark.WithEmbeddedZookeeper
 import org.apache.kyuubi.service.ServiceState
@@ -61,13 +61,13 @@ abstract class SparkSQLEngineDeregisterSuite
 class SparkSQLEngineDeregisterExceptionSuite extends 
SparkSQLEngineDeregisterSuite {
   override def withKyuubiConf: Map[String, String] = {
     super.withKyuubiConf ++ Map(ENGINE_DEREGISTER_EXCEPTION_CLASSES.key -> {
-      sparkMajorMinorVersion match {
+      if (SPARK_ENGINE_RUNTIME_VERSION > "3.2") {
         // see https://issues.apache.org/jira/browse/SPARK-35958
-        case (3, minor) if minor > 2 => 
"org.apache.spark.SparkArithmeticException"
-        case _ => classOf[ArithmeticException].getCanonicalName
+        "org.apache.spark.SparkArithmeticException"
+      } else {
+        classOf[ArithmeticException].getCanonicalName
       }
     })
-
   }
 }
 
@@ -94,10 +94,11 @@ class SparkSQLEngineDeregisterExceptionTTLSuite
       zookeeperConf ++ Map(
         ANSI_ENABLED.key -> "true",
         ENGINE_DEREGISTER_EXCEPTION_CLASSES.key -> {
-          sparkMajorMinorVersion match {
+          if (SPARK_ENGINE_RUNTIME_VERSION > "3.2") {
             // see https://issues.apache.org/jira/browse/SPARK-35958
-            case (3, minor) if minor > 2 => 
"org.apache.spark.SparkArithmeticException"
-            case _ => classOf[ArithmeticException].getCanonicalName
+            "org.apache.spark.SparkArithmeticException"
+          } else {
+            classOf[ArithmeticException].getCanonicalName
           }
         },
         ENGINE_DEREGISTER_JOB_MAX_FAILURES.key -> maxJobFailures.toString,

Reply via email to