This is an automated email from the ASF dual-hosted git repository.

chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 1e0806412 [KYUUBI #6425] Fix tests in spark engine and kyuubi server 
modules with Spark 4.0
1e0806412 is described below

commit 1e08064123e5ae0200a733b1d490769ed6da4281
Author: Cheng Pan <[email protected]>
AuthorDate: Mon May 27 16:57:21 2024 +0800

    [KYUUBI #6425] Fix tests in spark engine and kyuubi server modules with 
Spark 4.0
    
    # :mag: Description
    
    This PR fixes tests in spark engine and kyuubi server modules with Spark 
4.0.
    
    ## Types of changes :bookmark:
    
    - [ ] Bugfix (non-breaking change which fixes an issue)
    - [x] New feature (non-breaking change which adds functionality)
    - [ ] Breaking change (fix or feature that would cause existing 
functionality to change)
    
    ## Test Plan ๐Ÿงช
    
    Since Spark 4.0.0-preview1 is still under voting phase, this PR does not 
add CI, the change was tested in https://github.com/apache/kyuubi/pull/6407 
with Spark 4.0.0-preview1 RC1
    
    ---
    
    # Checklist ๐Ÿ“
    
    - [x] This patch was not authored or co-authored using [Generative 
Tooling](https://www.apache.org/legal/generative-tooling.html)
    
    **Be nice. Be informative.**
    
    Closes #6425 from pan3793/spark-4.
    
    Closes #6425
    
    101986416 [Cheng Pan] Fix tests in spark engine and kyuubi server modules 
with Spark 4.0
    
    Authored-by: Cheng Pan <[email protected]>
    Signed-off-by: Cheng Pan <[email protected]>
---
 .../kyuubi/engine/spark/WithSparkSQLEngine.scala     |  9 +++++----
 .../spark/kyuubi/SparkSQLEngineDeregisterSuite.scala | 20 ++++++++++++++------
 .../apache/kyuubi/operation/SparkQueryTests.scala    |  6 +++---
 .../kyuubi/operation/PlanOnlyOperationSuite.scala    |  8 ++++++--
 .../kyuubi/server/api/v1/BatchesResourceSuite.scala  |  4 ++--
 .../kyuubi/server/mysql/MySQLSparkQuerySuite.scala   |  4 ++--
 .../kyuubi/server/rest/client/BatchCliSuite.scala    | 12 ++++++------
 7 files changed, 38 insertions(+), 25 deletions(-)

diff --git 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/WithSparkSQLEngine.scala
 
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/WithSparkSQLEngine.scala
index e6b140704..101352209 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/WithSparkSQLEngine.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/WithSparkSQLEngine.scala
@@ -21,6 +21,7 @@ import org.apache.spark.sql.SparkSession
 
 import org.apache.kyuubi.{KyuubiFunSuite, Utils}
 import org.apache.kyuubi.config.KyuubiConf
+import 
org.apache.kyuubi.engine.spark.KyuubiSparkUtil.SPARK_ENGINE_RUNTIME_VERSION
 
 trait WithSparkSQLEngine extends KyuubiFunSuite {
   protected var spark: SparkSession = _
@@ -31,10 +32,10 @@ trait WithSparkSQLEngine extends KyuubiFunSuite {
 
   protected var connectionUrl: String = _
 
-  // Affected by such configuration' default value
-  //    engine.initialize.sql='SHOW DATABASES'
-  // SPARK-35378
-  protected val initJobId: Int = 1
+  // Behavior is affected by the initialization SQL: 'SHOW DATABASES'
+  // SPARK-35378 (3.2.0) makes it triggers job
+  // SPARK-43124 (4.0.0) makes it avoid triggering job
+  protected val initJobId: Int = if (SPARK_ENGINE_RUNTIME_VERSION >= "4.0") 0 
else 1
 
   override def beforeAll(): Unit = {
     startSparkEngine()
diff --git 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/spark/kyuubi/SparkSQLEngineDeregisterSuite.scala
 
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/spark/kyuubi/SparkSQLEngineDeregisterSuite.scala
index 4dddcd4ee..ddb612ad4 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/spark/kyuubi/SparkSQLEngineDeregisterSuite.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/spark/kyuubi/SparkSQLEngineDeregisterSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark.kyuubi
 
 import java.util.UUID
 
-import org.apache.spark.SparkException
+import org.apache.spark.{SparkArithmeticException, SparkException}
 import org.apache.spark.sql.internal.SQLConf.ANSI_ENABLED
 import org.scalatest.time.SpanSugar.convertIntToGrainOfTime
 
@@ -47,8 +47,10 @@ abstract class SparkSQLEngineDeregisterSuite
     assert(engine.frontendServices.head.discoveryService.get.getServiceState 
===
       ServiceState.STARTED)
     (0 until maxJobFailures).foreach { _ =>
-      val e = intercept[SparkException](spark.sql(query).collect())
-      assert(e.getCause.isInstanceOf[ArithmeticException])
+      intercept[Exception](spark.sql(query).collect()) match {
+        case se: SparkException => 
assert(se.getCause.isInstanceOf[ArithmeticException])
+        case e => assert(e.isInstanceOf[SparkArithmeticException])
+      }
     }
     eventually(timeout(5.seconds), interval(1.second)) {
       assert(engine.frontendServices.head.discoveryService.get.getServiceState 
===
@@ -113,12 +115,18 @@ class SparkSQLEngineDeregisterExceptionTTLSuite
     assert(engine.frontendServices.head.discoveryService.get.getServiceState 
===
       ServiceState.STARTED)
 
-    intercept[SparkException](spark.sql(query).collect())
+    intercept[Exception](spark.sql(query).collect()) match {
+      case se: SparkException => 
assert(se.getCause.isInstanceOf[ArithmeticException])
+      case e => assert(e.isInstanceOf[SparkArithmeticException])
+    }
+
     Thread.sleep(deregisterExceptionTTL + 1000)
 
     (0 until maxJobFailures).foreach { _ =>
-      val e = intercept[SparkException](spark.sql(query).collect())
-      assert(e.getCause.isInstanceOf[ArithmeticException])
+      intercept[Exception](spark.sql(query).collect()) match {
+        case se: SparkException => 
assert(se.getCause.isInstanceOf[ArithmeticException])
+        case e => assert(e.isInstanceOf[SparkArithmeticException])
+      }
     }
     eventually(timeout(5.seconds), interval(1.second)) {
       assert(engine.frontendServices.head.discoveryService.get.getServiceState 
===
diff --git 
a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/SparkQueryTests.scala
 
b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/SparkQueryTests.scala
index b46b30402..f39e55f5b 100644
--- 
a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/SparkQueryTests.scala
+++ 
b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/SparkQueryTests.scala
@@ -37,8 +37,8 @@ trait SparkQueryTests extends SparkDataTypeTests with 
HiveJDBCTestHelper {
     val sql = "select date_sub(date'2011-11-11', '1.2')"
     val errors = Set(
       "The second argument of 'date_sub' function needs to be an integer.",
-      // unquoted since Spark-3.4, see 
https://github.com/apache/spark/pull/36693
-      "The second argument of date_sub function needs to be an integer.")
+      "SECOND_FUNCTION_ARGUMENT_NOT_INTEGER",
+      "CAST_INVALID_INPUT")
 
     withJdbcStatement() { statement =>
       val e = intercept[SQLException] {
@@ -254,7 +254,7 @@ trait SparkQueryTests extends SparkDataTypeTests with 
HiveJDBCTestHelper {
       rs.next()
       // scala repl will return resX = YYYYY, and here we only check YYYYY
       val sparkVer = rs.getString(1).split("=")(1).trim
-      
assert("\\d\\.\\d\\.\\d(-SNAPSHOT)?".r.pattern.matcher(sparkVer).matches())
+      assert("\\d\\.\\d\\.\\d.*".r.pattern.matcher(sparkVer).matches())
       assert(rs.getMetaData.getColumnName(1) === "output")
     }
   }
diff --git 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/PlanOnlyOperationSuite.scala
 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/PlanOnlyOperationSuite.scala
index 8773440a6..2c27b51db 100644
--- 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/PlanOnlyOperationSuite.scala
+++ 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/PlanOnlyOperationSuite.scala
@@ -187,7 +187,7 @@ class PlanOnlyOperationSuite extends WithKyuubiServer with 
HiveJDBCTestHelper {
     }
   }
 
-  test("kyuubi #3214: Plan only mode with an incorrect value") {
+  test("KYUUBI #3214: Plan only mode with an incorrect value") {
     withSessionConf()(Map(KyuubiConf.OPERATION_PLAN_ONLY_MODE.key -> 
"parse"))(Map.empty) {
       withJdbcStatement() { statement =>
         statement.executeQuery(s"set 
${KyuubiConf.OPERATION_PLAN_ONLY_MODE.key}=parser")
@@ -196,7 +196,11 @@ class PlanOnlyOperationSuite extends WithKyuubiServer with 
HiveJDBCTestHelper {
         statement.executeQuery(s"set 
${KyuubiConf.OPERATION_PLAN_ONLY_MODE.key}=parse")
         val result = statement.executeQuery("select 1")
         assert(result.next())
-        assert(result.getString(1).contains("Project [unresolvedalias(1, 
None)]"))
+        val plan = result.getString(1)
+        assert {
+          plan.contains("Project [unresolvedalias(1, None)]") ||
+          plan.contains("Project [unresolvedalias(1)]")
+        }
       }
     }
   }
diff --git 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/api/v1/BatchesResourceSuite.scala
 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/api/v1/BatchesResourceSuite.scala
index f3287170a..ac287e906 100644
--- 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/api/v1/BatchesResourceSuite.scala
+++ 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/api/v1/BatchesResourceSuite.scala
@@ -202,8 +202,8 @@ abstract class BatchesResourceSuiteBase extends 
KyuubiFunSuite
 
       // check both kyuubi log and engine log
       assert(
-        logs.exists(_.contains("/bin/spark-submit")) &&
-          logs.exists(_.contains(s"SparkContext: Submitted application: 
$sparkBatchTestAppName")))
+        logs.exists(_.contains("bin/spark-submit")) &&
+          logs.exists(_.contains(s"Submitted application: 
$sparkBatchTestAppName")))
     }
 
     // invalid user name
diff --git 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/mysql/MySQLSparkQuerySuite.scala
 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/mysql/MySQLSparkQuerySuite.scala
index e0cf2ecae..cf894216f 100644
--- 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/mysql/MySQLSparkQuerySuite.scala
+++ 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/mysql/MySQLSparkQuerySuite.scala
@@ -252,8 +252,8 @@ class MySQLSparkQuerySuite extends WithKyuubiServer with 
MySQLJDBCTestHelper {
     val sql = "select date_sub(date'2011-11-11', '1.2')"
     val errors = Set(
       "The second argument of 'date_sub' function needs to be an integer.",
-      // unquoted since Spark-3.4, see 
https://github.com/apache/spark/pull/36693
-      "The second argument of date_sub function needs to be an integer.")
+      "SECOND_FUNCTION_ARGUMENT_NOT_INTEGER",
+      "CAST_INVALID_INPUT")
 
     withJdbcStatement() { statement =>
       val e = intercept[SQLException] {
diff --git 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/BatchCliSuite.scala
 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/BatchCliSuite.scala
index 29caf0468..ffaac5400 100644
--- 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/BatchCliSuite.scala
+++ 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/BatchCliSuite.scala
@@ -258,8 +258,8 @@ class BatchCliSuite extends RestClientTestHelper with 
TestPrematureExit with Bat
       ldapUserPasswd,
       "--forward")
     result = testPrematureExitForControlCli(logArgs, "")
-    assert(result.contains(s"Submitted application: ${sparkBatchTestAppName}"))
-    assert(result.contains("ShutdownHookManager: Shutdown hook called"))
+    assert(result.contains(s"Submitted application: $sparkBatchTestAppName"))
+    assert(result.contains("Shutdown hook called"))
   }
 
   test("submit batch test") {
@@ -271,8 +271,8 @@ class BatchCliSuite extends RestClientTestHelper with 
TestPrematureExit with Bat
       "--password",
       ldapUserPasswd)
     val result = testPrematureExitForControlCli(submitArgs, "")
-    assert(result.contains(s"Submitted application: ${sparkBatchTestAppName}"))
-    assert(result.contains("ShutdownHookManager: Shutdown hook called"))
+    assert(result.contains(s"Submitted application: $sparkBatchTestAppName"))
+    assert(result.contains("Shutdown hook called"))
   }
 
   test("submit batch test with waitCompletion=false") {
@@ -288,8 +288,8 @@ class BatchCliSuite extends RestClientTestHelper with 
TestPrematureExit with Bat
       "--conf",
       s"${CtlConf.CTL_BATCH_LOG_QUERY_INTERVAL.key}=100")
     val result = testPrematureExitForControlCli(submitArgs, "")
-    assert(result.contains(s"/bin/spark-submit"))
-    assert(!result.contains("ShutdownHookManager: Shutdown hook called"))
+    assert(result.contains("bin/spark-submit"))
+    assert(!result.contains("Shutdown hook called"))
   }
 
   test("list batch test") {

Reply via email to