This is an automated email from the ASF dual-hosted git repository.
chengpan pushed a commit to branch branch-1.9
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/branch-1.9 by this push:
new 65761590b [KYUUBI #6699] Bump Spark 4.0.0-preview2
65761590b is described below
commit 65761590bb7c61d3e3406397365c5c1b4127988c
Author: Cheng Pan <[email protected]>
AuthorDate: Mon Sep 23 17:42:48 2024 +0800
[KYUUBI #6699] Bump Spark 4.0.0-preview2
Spark 4.0.0-preview2 RC1 passed the vote
https://lists.apache.org/thread/4ctj2mlgs4q2yb4hdw2jy4z34p5yw2b1
- [ ] Bugfix (non-breaking change which fixes an issue)
- [x] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing
functionality to change)
Pass GHA.
---
- [x] This patch was not authored or co-authored using [Generative
Tooling](https://www.apache.org/legal/generative-tooling.html)
**Be nice. Be informative.**
Closes #6699 from pan3793/spark-4.0.0-preview2.
Closes #6699
2db1f645d [Cheng Pan] 4.0.0-preview2
42055bb1e [Cheng Pan] fix
d29c0ef83 [Cheng Pan] disable delta test
98d323b95 [Cheng Pan] fix
2e782c00b [Cheng Pan] log4j-slf4j2-impl
fde4bb6ba [Cheng Pan] spark-4.0.0-preview2
Authored-by: Cheng Pan <[email protected]>
Signed-off-by: Cheng Pan <[email protected]>
(cherry picked from commit 1bfc8c584039b3ca28012bc292312d3521530b12)
Signed-off-by: Cheng Pan <[email protected]>
---
.../spark/connector/hive/command/DropNamespaceSuite.scala | 3 ++-
.../org/apache/kyuubi/engine/spark/SparkSQLEngine.scala | 2 +-
.../kyuubi/engine/spark/util/SparkCatalogUtils.scala | 2 +-
.../engine/spark/operation/SparkOperationSuite.scala | 1 -
pom.xml | 14 ++++++++++++++
5 files changed, 18 insertions(+), 4 deletions(-)
diff --git
a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/DropNamespaceSuite.scala
b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/DropNamespaceSuite.scala
index eebfbe488..9f4a64efc 100644
---
a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/DropNamespaceSuite.scala
+++
b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/DropNamespaceSuite.scala
@@ -63,7 +63,8 @@ trait DropNamespaceSuiteBase extends DDLCommandTestUtils {
sql(s"DROP NAMESPACE $catalogName.unknown")
}.getMessage
assert(message.contains(s"'unknown' not found") ||
- message.contains(s"The schema `unknown` cannot be found"))
+ message.contains(s"The schema `unknown` cannot be found") ||
+ message.contains("SCHEMA_NOT_FOUND"))
}
test("drop non-empty namespace with a non-cascading mode") {
diff --git
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala
index 3707b72d4..f396072f2 100644
---
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala
+++
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala
@@ -263,7 +263,7 @@ object SparkSQLEngine extends Logging {
// "Cannot mutate ReadOnlySQLConf" exception when task calling
HiveResult.getBinaryFormatter.
// Here we follow the HiveResult.getBinaryFormatter behavior to set it to
UTF8 if configuration
// is absent to reserve the legacy behavior for compatibility.
- _sparkConf.setIfMissing("spark.sql.binaryOutputStyle", "UTF8")
+ _sparkConf.setIfMissing("spark.sql.binaryOutputStyle", "UTF-8")
_sparkConf.setIfMissing("spark.master", "local")
_sparkConf.set(
"spark.redaction.regex",
diff --git
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/util/SparkCatalogUtils.scala
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/util/SparkCatalogUtils.scala
index ff4564e54..b9a5028ac 100644
---
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/util/SparkCatalogUtils.scala
+++
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/util/SparkCatalogUtils.scala
@@ -102,7 +102,7 @@ object SparkCatalogUtils extends Logging {
private def getGlobalTempViewManager(
spark: SparkSession,
schemaPattern: String): Seq[String] = {
- val database = spark.sharedState.globalTempViewManager.database
+ val database = spark.conf.get("spark.sql.globalTempDatabase")
Option(database).filter(_.matches(schemaPattern)).toSeq
}
diff --git
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
index fb9873fd0..0a910405e 100644
---
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
+++
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
@@ -514,7 +514,6 @@ class SparkOperationSuite extends WithSparkSQLEngine with
HiveMetadataTests with
assert(status.getStatusCode === TStatusCode.ERROR_STATUS)
if (SPARK_ENGINE_RUNTIME_VERSION >= "3.4") {
assert(errorMessage.contains("[SCHEMA_NOT_FOUND]"))
- assert(errorMessage.contains(s"The schema `$dbName` cannot be found."))
} else {
assert(errorMessage.contains(s"Database '$dbName' not found"))
}
diff --git a/pom.xml b/pom.xml
index 41517ed0b..1117c9161 100644
--- a/pom.xml
+++ b/pom.xml
@@ -467,6 +467,13 @@
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
+ <exclusions>
+ <!-- SPARK-40511 upgrade SLF4J2, which is not compatible
w/ SLF4J1 -->
+ <exclusion>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-slf4j2-impl</artifactId>
+ </exclusion>
+ </exclusions>
</dependency>
<dependency>
@@ -510,6 +517,13 @@
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<type>test-jar</type>
+ <exclusions>
+ <!-- SPARK-40511 upgrade SLF4J2, which is not compatible
w/ SLF4J1 -->
+ <exclusion>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-slf4j2-impl</artifactId>
+ </exclusion>
+ </exclusions>
</dependency>
<dependency>