This is an automated email from the ASF dual-hosted git repository.

chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 1bfc8c584 [KYUUBI #6699] Bump Spark 4.0.0-preview2
1bfc8c584 is described below

commit 1bfc8c584039b3ca28012bc292312d3521530b12
Author: Cheng Pan <[email protected]>
AuthorDate: Mon Sep 23 17:42:48 2024 +0800

    [KYUUBI #6699] Bump Spark 4.0.0-preview2
    
    # :mag: Description
    
    Spark 4.0.0-preview2 RC1 passed the vote
    https://lists.apache.org/thread/4ctj2mlgs4q2yb4hdw2jy4z34p5yw2b1
    
    ## Types of changes :bookmark:
    
    - [ ] Bugfix (non-breaking change which fixes an issue)
    - [x] New feature (non-breaking change which adds functionality)
    - [ ] Breaking change (fix or feature that would cause existing 
functionality to change)
    
    ## Test Plan ๐Ÿงช
    
    Pass GHA.
    
    ---
    
    # Checklist ๐Ÿ“
    
    - [x] This patch was not authored or co-authored using [Generative 
Tooling](https://www.apache.org/legal/generative-tooling.html)
    
    **Be nice. Be informative.**
    
    Closes #6699 from pan3793/spark-4.0.0-preview2.
    
    Closes #6699
    
    2db1f645d [Cheng Pan] 4.0.0-preview2
    42055bb1e [Cheng Pan] fix
    d29c0ef83 [Cheng Pan] disable delta test
    98d323b95 [Cheng Pan] fix
    2e782c00b [Cheng Pan] log4j-slf4j2-impl
    fde4bb6ba [Cheng Pan] spark-4.0.0-preview2
    
    Authored-by: Cheng Pan <[email protected]>
    Signed-off-by: Cheng Pan <[email protected]>
---
 .github/workflows/master.yml                       |  2 +-
 .../hive/command/DropNamespaceSuite.scala          |  3 +-
 .../kyuubi/engine/spark/SparkSQLEngine.scala       |  2 +-
 .../engine/spark/util/SparkCatalogUtils.scala      |  2 +-
 .../spark/operation/SparkOperationSuite.scala      |  1 -
 pom.xml                                            | 37 ++++++++++------------
 6 files changed, 22 insertions(+), 25 deletions(-)

diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml
index b4794b8de..109c9b45b 100644
--- a/.github/workflows/master.yml
+++ b/.github/workflows/master.yml
@@ -77,7 +77,7 @@ jobs:
             comment: 'verify-on-spark-3.4-binary'
           - java: 17
             spark: '3.5'
-            spark-archive: '-Pscala-2.13 
-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-4.0.0-preview1
 -Dspark.archive.name=spark-4.0.0-preview1-bin-hadoop3.tgz'
+            spark-archive: '-Pscala-2.13 
-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-4.0.0-preview2
 -Dspark.archive.name=spark-4.0.0-preview2-bin-hadoop3.tgz'
             exclude-tags: 
'-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
             comment: 'verify-on-spark-4.0-binary'
     env:
diff --git 
a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/DropNamespaceSuite.scala
 
b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/DropNamespaceSuite.scala
index eebfbe488..9f4a64efc 100644
--- 
a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/DropNamespaceSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/DropNamespaceSuite.scala
@@ -63,7 +63,8 @@ trait DropNamespaceSuiteBase extends DDLCommandTestUtils {
       sql(s"DROP NAMESPACE $catalogName.unknown")
     }.getMessage
     assert(message.contains(s"'unknown' not found") ||
-      message.contains(s"The schema `unknown` cannot be found"))
+      message.contains(s"The schema `unknown` cannot be found") ||
+      message.contains("SCHEMA_NOT_FOUND"))
   }
 
   test("drop non-empty namespace with a non-cascading mode") {
diff --git 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala
 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala
index d2e205d31..02d2a7afb 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala
@@ -263,7 +263,7 @@ object SparkSQLEngine extends Logging {
     // "Cannot mutate ReadOnlySQLConf" exception when task calling 
HiveResult.getBinaryFormatter.
     // Here we follow the HiveResult.getBinaryFormatter behavior to set it to 
UTF8 if configuration
     // is absent to reserve the legacy behavior for compatibility.
-    _sparkConf.setIfMissing("spark.sql.binaryOutputStyle", "UTF8")
+    _sparkConf.setIfMissing("spark.sql.binaryOutputStyle", "UTF-8")
     _sparkConf.setIfMissing("spark.master", "local")
     _sparkConf.set(
       "spark.redaction.regex",
diff --git 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/util/SparkCatalogUtils.scala
 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/util/SparkCatalogUtils.scala
index ff4564e54..b9a5028ac 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/util/SparkCatalogUtils.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/util/SparkCatalogUtils.scala
@@ -102,7 +102,7 @@ object SparkCatalogUtils extends Logging {
   private def getGlobalTempViewManager(
       spark: SparkSession,
       schemaPattern: String): Seq[String] = {
-    val database = spark.sharedState.globalTempViewManager.database
+    val database = spark.conf.get("spark.sql.globalTempDatabase")
     Option(database).filter(_.matches(schemaPattern)).toSeq
   }
 
diff --git 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
 
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
index 0a2fd3473..a5c911ff3 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
@@ -514,7 +514,6 @@ class SparkOperationSuite extends WithSparkSQLEngine with 
HiveMetadataTests with
       assert(status.getStatusCode === TStatusCode.ERROR_STATUS)
       if (SPARK_ENGINE_RUNTIME_VERSION >= "3.4") {
         assert(errorMessage.contains("[SCHEMA_NOT_FOUND]"))
-        assert(errorMessage.contains(s"The schema `$dbName` cannot be found."))
       } else {
         assert(errorMessage.contains(s"Database '$dbName' not found"))
       }
diff --git a/pom.xml b/pom.xml
index bc9abe337..9632b773f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -458,15 +458,6 @@
                 <artifactId>spark-core_${scala.binary.version}</artifactId>
                 <version>${spark.version}</version>
                 <exclusions>
-                    <!--  Use log4j2 -->
-                    <exclusion>
-                        <groupId>log4j</groupId>
-                        <artifactId>log4j</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.slf4j</groupId>
-                        <artifactId>slf4j-log4j12</artifactId>
-                    </exclusion>
                     <!-- SPARK-40511 upgrade SLF4J2, which is not compatible 
w/ SLF4J1 -->
                     <exclusion>
                         <groupId>org.apache.logging.log4j</groupId>
@@ -485,6 +476,13 @@
                 <groupId>org.apache.spark</groupId>
                 <artifactId>spark-sql_${scala.binary.version}</artifactId>
                 <version>${spark.version}</version>
+                <exclusions>
+                    <!-- SPARK-40511 upgrade SLF4J2, which is not compatible 
w/ SLF4J1 -->
+                    <exclusion>
+                        <groupId>org.apache.logging.log4j</groupId>
+                        <artifactId>log4j-slf4j2-impl</artifactId>
+                    </exclusion>
+                </exclusions>
             </dependency>
 
             <dependency>
@@ -499,15 +497,6 @@
                 <version>${spark.version}</version>
                 <type>test-jar</type>
                 <exclusions>
-                    <!--  Use log4j2 -->
-                    <exclusion>
-                        <groupId>log4j</groupId>
-                        <artifactId>log4j</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.slf4j</groupId>
-                        <artifactId>slf4j-log4j12</artifactId>
-                    </exclusion>
                     <!-- SPARK-40511 upgrade SLF4J2, which is not compatible 
w/ SLF4J1 -->
                     <exclusion>
                         <groupId>org.apache.logging.log4j</groupId>
@@ -528,6 +517,13 @@
                 <artifactId>spark-sql_${scala.binary.version}</artifactId>
                 <version>${spark.version}</version>
                 <type>test-jar</type>
+                <exclusions>
+                    <!-- SPARK-40511 upgrade SLF4J2, which is not compatible 
w/ SLF4J1 -->
+                    <exclusion>
+                        <groupId>org.apache.logging.log4j</groupId>
+                        <artifactId>log4j-slf4j2-impl</artifactId>
+                    </exclusion>
+                </exclusions>
             </dependency>
 
             <dependency>
@@ -2016,9 +2012,10 @@
                 <module>extensions/spark/kyuubi-spark-connector-hive</module>
             </modules>
             <properties>
-                <spark.version>4.0.0-preview1</spark.version>
+                <spark.version>4.0.0-preview2</spark.version>
                 <spark.binary.version>4.0</spark.binary.version>
                 <antlr4.version>4.13.1</antlr4.version>
+                <!-- TODO: update once Delta support Spark 4.0.0-preview2 -->
                 <delta.version>4.0.0rc1</delta.version>
                 
<delta.artifact>delta-spark_${scala.binary.version}</delta.artifact>
                 <!-- TODO: update once Hudi support Spark 4.0 -->
@@ -2027,7 +2024,7 @@
                 
<iceberg.artifact>iceberg-spark-runtime-3.5_${scala.binary.version}</iceberg.artifact>
                 <!-- TODO: update once Paimon support Spark 4.0 -->
                 <paimon.artifact>paimon-spark-3.5</paimon.artifact>
-                
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.HudiTest</maven.plugin.scalatest.exclude.tags>
+                
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.HudiTest</maven.plugin.scalatest.exclude.tags>
                 
<spark.archive.name>spark-${spark.version}-bin-hadoop3.tgz</spark.archive.name>
             </properties>
         </profile>

Reply via email to