This is an automated email from the ASF dual-hosted git repository.
chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new 6933a9158 [KYUUBI #6451] Bump Hudi 0.15.0 and enable Hudi authZ test
for Spark 3.5
6933a9158 is described below
commit 6933a915882139401dbe26204cd69aa33eab20aa
Author: Cheng Pan <[email protected]>
AuthorDate: Wed Jun 5 12:33:29 2024 +0800
[KYUUBI #6451] Bump Hudi 0.15.0 and enable Hudi authZ test for Spark 3.5
# :mag: Description
Kyuubi uses the Hudi Spark bundle jar in authZ module for testing, Hudi
0.15 brings Spark 3.5 and Scala 2.13 support, it also removes hacky for profile
`spark-3.5`.
## Types of changes :bookmark:
- [ ] Bugfix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing
functionality to change)
## Test Plan ๐งช
Pass GHA.
---
# Checklist ๐
- [x] This patch was not authored or co-authored using [Generative
Tooling](https://www.apache.org/legal/generative-tooling.html)
**Be nice. Be informative.**
Closes #6451 from pan3793/hudi-0.15.
Closes #6451
98d6e97c5 [Cheng Pan] fix
2d31307da [Cheng Pan] remove spark-authz-hudi-test
8896f8c3f [Cheng Pan] Enable hudi test
7e9a7c7ae [Cheng Pan] Bump Hudi 0.15.0
Authored-by: Cheng Pan <[email protected]>
Signed-off-by: Cheng Pan <[email protected]>
---
.github/workflows/master.yml | 2 +-
extensions/spark/kyuubi-spark-authz/pom.xml | 23 +++---------
.../HudiCatalogRangerSparkExtensionSuite.scala | 43 +++++++---------------
pom.xml | 4 +-
4 files changed, 21 insertions(+), 51 deletions(-)
diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml
index 41394a520..0999b97ea 100644
--- a/.github/workflows/master.yml
+++ b/.github/workflows/master.yml
@@ -105,7 +105,7 @@ jobs:
fi
TEST_MODULES="dev/kyuubi-codecov"
./build/mvn clean install ${MVN_OPT} -pl ${TEST_MODULES} -am \
- -Pjava-${{ matrix.java }} -Pspark-${{ matrix.spark }}
-Pspark-authz-hudi-test ${{ matrix.spark-archive }} ${{ matrix.exclude-tags }}
+ -Pjava-${{ matrix.java }} -Pspark-${{ matrix.spark }} ${{
matrix.spark-archive }} ${{ matrix.exclude-tags }}
- name: Code coverage
if: |
matrix.java == 8 &&
diff --git a/extensions/spark/kyuubi-spark-authz/pom.xml
b/extensions/spark/kyuubi-spark-authz/pom.xml
index 71048dee8..c3fde22ec 100644
--- a/extensions/spark/kyuubi-spark-authz/pom.xml
+++ b/extensions/spark/kyuubi-spark-authz/pom.xml
@@ -380,6 +380,12 @@
<artifactId>${delta.artifact}_${scala.binary.version}</artifactId>
<scope>test</scope>
</dependency>
+
+ <dependency>
+ <groupId>org.apache.hudi</groupId>
+
<artifactId>hudi-spark${hudi.spark.binary.version}-bundle_${scala.binary.version}</artifactId>
+ <scope>test</scope>
+ </dependency>
</dependencies>
<build>
@@ -393,23 +399,6 @@
</build>
<profiles>
- <!--
- Add spark-authz-hudi-test profile here to avoid import Apache Hudi
when enable scala-2.13.
- Can remove this profile after Apache Hudi support Scala 2.13.
- https://issues.apache.org/jira/browse/HUDI-6296
- -->
- <profile>
- <id>spark-authz-hudi-test</id>
- <dependencies>
- <dependency>
- <groupId>org.apache.hudi</groupId>
-
<artifactId>hudi-spark${hudi.spark.binary.version}-bundle_${scala.binary.version}</artifactId>
- <version>${hudi.version}</version>
- <scope>test</scope>
- </dependency>
- </dependencies>
- </profile>
-
<profile>
<id>gen-policy</id>
<build>
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala
index 72d4130ef..8852aec1d 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala
@@ -17,7 +17,6 @@
package org.apache.kyuubi.plugin.spark.authz.ranger
import org.apache.spark.SparkConf
-import org.scalatest.Outcome
import org.apache.kyuubi.Utils
import org.apache.kyuubi.plugin.spark.authz.AccessControlException
@@ -34,16 +33,9 @@ import
org.apache.kyuubi.util.AssertionUtils.interceptEndsWith
@HudiTest
class HudiCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
override protected val catalogImpl: String = "in-memory"
- // TODO: Apache Hudi not support Spark 3.5 and Scala 2.13 yet,
- // should change after Apache Hudi support Spark 3.5 and Scala 2.13.
- private def isSupportedVersion = !isSparkV35OrGreater && !isScalaV213
override protected val sqlExtensions: String =
- if (isSupportedVersion) {
- "org.apache.spark.sql.hudi.HoodieSparkSessionExtension"
- } else {
- ""
- }
+ "org.apache.spark.sql.hudi.HoodieSparkSessionExtension"
override protected val extraSparkConf: SparkConf =
new SparkConf()
@@ -55,32 +47,23 @@ class HudiCatalogRangerSparkExtensionSuite extends
RangerSparkExtensionSuite {
val outputTable1 = "outputTable_hoodie"
val index1 = "table_hoodie_index1"
- override def withFixture(test: NoArgTest): Outcome = {
- assume(isSupportedVersion)
- test()
- }
-
override def beforeAll(): Unit = {
- if (isSupportedVersion) {
- if (isSparkV32OrGreater) {
- spark.conf.set(
- s"spark.sql.catalog.$sparkCatalog",
- "org.apache.spark.sql.hudi.catalog.HoodieCatalog")
- spark.conf.set(s"spark.sql.catalog.$sparkCatalog.type", "hadoop")
- spark.conf.set(
- s"spark.sql.catalog.$sparkCatalog.warehouse",
- Utils.createTempDir("hudi-hadoop").toString)
- }
- super.beforeAll()
+ if (isSparkV32OrGreater) {
+ spark.conf.set(
+ s"spark.sql.catalog.$sparkCatalog",
+ "org.apache.spark.sql.hudi.catalog.HoodieCatalog")
+ spark.conf.set(s"spark.sql.catalog.$sparkCatalog.type", "hadoop")
+ spark.conf.set(
+ s"spark.sql.catalog.$sparkCatalog.warehouse",
+ Utils.createTempDir("hudi-hadoop").toString)
}
+ super.beforeAll()
}
override def afterAll(): Unit = {
- if (isSupportedVersion) {
- super.afterAll()
- spark.sessionState.catalog.reset()
- spark.sessionState.conf.clear()
- }
+ super.afterAll()
+ spark.sessionState.catalog.reset()
+ spark.sessionState.conf.clear()
}
test("AlterTableCommand") {
diff --git a/pom.xml b/pom.xml
index 6838f71d1..576020f23 100644
--- a/pom.xml
+++ b/pom.xml
@@ -156,7 +156,7 @@
<hive.archive.download.skip>false</hive.archive.download.skip>
<httpclient.version>4.5.14</httpclient.version>
<httpcore.version>4.4.16</httpcore.version>
- <hudi.version>0.14.0</hudi.version>
+ <hudi.version>0.15.0</hudi.version>
<hudi.spark.binary.version>${spark.binary.version}</hudi.spark.binary.version>
<iceberg.version>1.5.2</iceberg.version>
<jackson.version>2.15.4</jackson.version>
@@ -2028,8 +2028,6 @@
<properties>
<delta.artifact>delta-spark</delta.artifact>
<delta.version>3.2.0</delta.version>
- <!-- Remove this when Hudi supports Spark 3.5 -->
- <hudi.spark.binary.version>3.4</hudi.spark.binary.version>
<spark.version>3.5.1</spark.version>
<spark.binary.version>3.5</spark.binary.version>
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow</maven.plugin.scalatest.exclude.tags>