This is an automated email from the ASF dual-hosted git repository.
chengpan pushed a commit to branch branch-1.9
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/branch-1.9 by this push:
new 6d6604338 [KYUUBI #6424] TPC-H/DS connector support Spark 4.0
6d6604338 is described below
commit 6d660433802deebd1bc1866c6d9f3953e4e87bf6
Author: Cheng Pan <[email protected]>
AuthorDate: Mon May 27 07:02:52 2024 +0000
[KYUUBI #6424] TPC-H/DS connector support Spark 4.0
# :mag: Description
Adapt changes in SPARK-45857
## Types of changes :bookmark:
- [ ] Bugfix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing
functionality to change)
## Test Plan ๐งช
```
build/mvn -pl
':kyuubi-spark-connector-tpch_2.13,:kyuubi-spark-connector-tpcds_2.13' \
-Pscala-2.13 -Pspark-master -am clean install -DskipTests
build/mvn -pl
':kyuubi-spark-connector-tpch_2.13,:kyuubi-spark-connector-tpcds_2.13' \
-Pscala-2.13 -Pspark-master test
```
```
[INFO]
------------------------------------------------------------------------
[INFO] Reactor Summary for Kyuubi Spark TPC-DS Connector 1.10.0-SNAPSHOT:
[INFO]
[INFO] Kyuubi Spark TPC-DS Connector ...................... SUCCESS [
53.699 s]
[INFO] Kyuubi Spark TPC-H Connector ....................... SUCCESS [
30.511 s]
[INFO]
------------------------------------------------------------------------
[INFO] BUILD SUCCESS
[INFO]
------------------------------------------------------------------------
[INFO] Total time: 01:24 min
[INFO] Finished at: 2024-05-27T06:01:58Z
[INFO]
------------------------------------------------------------------------
```
---
# Checklist ๐
- [x] This patch was not authored or co-authored using [Generative
Tooling](https://www.apache.org/legal/generative-tooling.html)
**Be nice. Be informative.**
Closes #6424 from pan3793/tpc-conn-4.
Closes #6424
9012a177f [Cheng Pan] TPC-H/DS connector support Spark 4.0
Authored-by: Cheng Pan <[email protected]>
Signed-off-by: Cheng Pan <[email protected]>
(cherry picked from commit 82441671a5f9fb134e5b828ed0c6c5b3db9d1940)
Signed-off-by: Cheng Pan <[email protected]>
---
.../scala/org/apache/kyuubi/spark/connector/tpcds/TPCDSCatalog.scala | 2 +-
.../main/scala/org/apache/kyuubi/spark/connector/tpch/TPCHCatalog.scala | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git
a/extensions/spark/kyuubi-spark-connector-tpcds/src/main/scala/org/apache/kyuubi/spark/connector/tpcds/TPCDSCatalog.scala
b/extensions/spark/kyuubi-spark-connector-tpcds/src/main/scala/org/apache/kyuubi/spark/connector/tpcds/TPCDSCatalog.scala
index 2e1d21131..2438dbbfe 100644
---
a/extensions/spark/kyuubi-spark-connector-tpcds/src/main/scala/org/apache/kyuubi/spark/connector/tpcds/TPCDSCatalog.scala
+++
b/extensions/spark/kyuubi-spark-connector-tpcds/src/main/scala/org/apache/kyuubi/spark/connector/tpcds/TPCDSCatalog.scala
@@ -58,7 +58,7 @@ class TPCDSCatalog extends TableCatalog with
SupportsNamespaces with Logging {
override def listTables(namespace: Array[String]): Array[Identifier] =
namespace match {
case Array(db) if databases contains db =>
tables.map(Identifier.of(namespace, _))
- case _ => throw new NoSuchNamespaceException(namespace.mkString("."))
+ case _ => throw new NoSuchNamespaceException(namespace)
}
override def loadTable(ident: Identifier): SparkTable = (ident.namespace,
ident.name) match {
diff --git
a/extensions/spark/kyuubi-spark-connector-tpch/src/main/scala/org/apache/kyuubi/spark/connector/tpch/TPCHCatalog.scala
b/extensions/spark/kyuubi-spark-connector-tpch/src/main/scala/org/apache/kyuubi/spark/connector/tpch/TPCHCatalog.scala
index 8ba5e0b0b..7c3845272 100644
---
a/extensions/spark/kyuubi-spark-connector-tpch/src/main/scala/org/apache/kyuubi/spark/connector/tpch/TPCHCatalog.scala
+++
b/extensions/spark/kyuubi-spark-connector-tpch/src/main/scala/org/apache/kyuubi/spark/connector/tpch/TPCHCatalog.scala
@@ -58,7 +58,7 @@ class TPCHCatalog extends TableCatalog with
SupportsNamespaces with Logging {
override def listTables(namespace: Array[String]): Array[Identifier] =
namespace match {
case Array(db) if databases contains db =>
tables.map(Identifier.of(namespace, _))
- case _ => throw new NoSuchNamespaceException(namespace.mkString("."))
+ case _ => throw new NoSuchNamespaceException(namespace)
}
override def loadTable(ident: Identifier): SparkTable = (ident.namespace,
ident.name) match {