This is an automated email from the ASF dual-hosted git repository.

chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 6876f8205 [KYUUBI #4744] [TEST] Remove Hudi integration tests
6876f8205 is described below

commit 6876f82053fe68eb247c6958b2af052072e0a3b5
Author: Cheng Pan <[email protected]>
AuthorDate: Thu Apr 20 22:59:20 2023 +0800

    [KYUUBI #4744] [TEST] Remove Hudi integration tests
    
    ### _Why are the changes needed?_
    
    This PR aims to remove Hudi integration tests from the Kyuubi project.
    
    Actually, there is no obvious benefit to running Hudi tests w/ Kyuubi, 
since the real work happens on the compute engine and Hudi integration. 
Besides, Hudi's horrible dependency management brings significant maintenance 
efforts to the Kyuubi community.
    
    This change only affects tests, does not affect any functionality.
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including 
negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [x] [Run 
test](https://kyuubi.readthedocs.io/en/master/develop_tools/testing.html#running-tests)
 locally before make a pull request
    
    Closes #4744 from pan3793/remove-hudi.
    
    Closes #4744
    
    ea99f747e [Cheng Pan] Remove Hudi integration tests
    
    Authored-by: Cheng Pan <[email protected]>
    Signed-off-by: Cheng Pan <[email protected]>
---
 .github/workflows/master.yml                       |   4 +-
 externals/kyuubi-spark-sql-engine/pom.xml          |  36 ----
 .../spark/operation/SparkHudiOperationSuite.scala  |  30 ----
 .../test/java/org/apache/kyuubi/tags/HudiTest.java |  29 ----
 .../scala/org/apache/kyuubi/HudiSuiteMixin.scala   |  43 -----
 .../kyuubi/operation/HudiMetadataTests.scala       | 193 ---------------------
 kyuubi-server/pom.xml                              |  36 ----
 .../operation/datalake/HudiOperationSuite.scala    |  34 ----
 pom.xml                                            | 162 +----------------
 9 files changed, 6 insertions(+), 561 deletions(-)

diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml
index 04ecb1a60..6bafea0dc 100644
--- a/.github/workflows/master.yml
+++ b/.github/workflows/master.yml
@@ -57,12 +57,12 @@ jobs:
           - java: 8
             spark: '3.3'
             spark-archive: 
'-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.1.3 
-Dspark.archive.name=spark-3.1.3-bin-hadoop3.2.tgz'
-            exclude-tags: 
'-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.IcebergTest'
+            exclude-tags: 
'-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest'
             comment: 'verify-on-spark-3.1-binary'
           - java: 8
             spark: '3.3'
             spark-archive: 
'-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.2.4 
-Dspark.archive.name=spark-3.2.4-bin-hadoop3.2.tgz'
-            exclude-tags: 
'-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.IcebergTest'
+            exclude-tags: 
'-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest'
             comment: 'verify-on-spark-3.2-binary'
     env:
       SPARK_LOCAL_IP: localhost
diff --git a/externals/kyuubi-spark-sql-engine/pom.xml 
b/externals/kyuubi-spark-sql-engine/pom.xml
index 8c984e4ca..62fe39c00 100644
--- a/externals/kyuubi-spark-sql-engine/pom.xml
+++ b/externals/kyuubi-spark-sql-engine/pom.xml
@@ -146,42 +146,6 @@
             <scope>test</scope>
         </dependency>
 
-        <dependency>
-            <groupId>org.apache.parquet</groupId>
-            <artifactId>parquet-avro</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-avro_${scala.binary.version}</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hudi</groupId>
-            <artifactId>hudi-common</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hudi</groupId>
-            <artifactId>hudi-spark-common_${scala.binary.version}</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hudi</groupId>
-            <artifactId>hudi-spark_${scala.binary.version}</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hudi</groupId>
-            <artifactId>hudi-spark3.1.x_${scala.binary.version}</artifactId>
-            <scope>test</scope>
-        </dependency>
-
         <dependency>
             <groupId>io.delta</groupId>
             <artifactId>delta-core_${scala.binary.version}</artifactId>
diff --git 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkHudiOperationSuite.scala
 
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkHudiOperationSuite.scala
deleted file mode 100644
index c5e8be37a..000000000
--- 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkHudiOperationSuite.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kyuubi.engine.spark.operation
-
-import org.apache.kyuubi.engine.spark.WithSparkSQLEngine
-import org.apache.kyuubi.operation.HudiMetadataTests
-import org.apache.kyuubi.tags.HudiTest
-
-@HudiTest
-class SparkHudiOperationSuite extends WithSparkSQLEngine with 
HudiMetadataTests {
-
-  override protected def jdbcUrl: String = getJdbcUrl
-
-  override def withKyuubiConf: Map[String, String] = extraConfigs
-}
diff --git a/kyuubi-common/src/test/java/org/apache/kyuubi/tags/HudiTest.java 
b/kyuubi-common/src/test/java/org/apache/kyuubi/tags/HudiTest.java
deleted file mode 100644
index 346f146fa..000000000
--- a/kyuubi-common/src/test/java/org/apache/kyuubi/tags/HudiTest.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kyuubi.tags;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-import org.scalatest.TagAnnotation;
-
-@TagAnnotation
-@Retention(RetentionPolicy.RUNTIME)
-@Target({ElementType.METHOD, ElementType.TYPE})
-public @interface HudiTest {}
diff --git 
a/kyuubi-common/src/test/scala/org/apache/kyuubi/HudiSuiteMixin.scala 
b/kyuubi-common/src/test/scala/org/apache/kyuubi/HudiSuiteMixin.scala
deleted file mode 100644
index 17cc5d27f..000000000
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/HudiSuiteMixin.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kyuubi
-
-import java.nio.file.Path
-
-trait HudiSuiteMixin extends DataLakeSuiteMixin {
-
-  override protected def format: String = "hudi"
-
-  override protected def catalog: String = "spark_catalog"
-
-  override protected def warehouse: Path = Utils.createTempDir()
-
-  override protected def extraJars: String = {
-    System.getProperty("java.class.path")
-      .split(":")
-      .filter(i => i.contains("hudi") || i.contains("spark-avro"))
-      .mkString(",")
-  }
-
-  override protected def extraConfigs = Map(
-    "spark.sql.catalogImplementation" -> "in-memory",
-    "spark.sql.defaultCatalog" -> catalog,
-    "spark.sql.extensions" -> 
"org.apache.spark.sql.hudi.HoodieSparkSessionExtension",
-    "spark.serializer" -> "org.apache.spark.serializer.KryoSerializer",
-    "spark.jars" -> extraJars)
-}
diff --git 
a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/HudiMetadataTests.scala
 
b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/HudiMetadataTests.scala
deleted file mode 100644
index e6870a4e3..000000000
--- 
a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/HudiMetadataTests.scala
+++ /dev/null
@@ -1,193 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kyuubi.operation
-
-import org.apache.kyuubi.HudiSuiteMixin
-import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant._
-
-trait HudiMetadataTests extends HiveJDBCTestHelper with HudiSuiteMixin {
-
-  test("get catalogs") {
-    withJdbcStatement() { statement =>
-      val metaData = statement.getConnection.getMetaData
-      val catalogs = metaData.getCatalogs
-      catalogs.next()
-      assert(catalogs.getString(TABLE_CAT) === "spark_catalog")
-      assert(!catalogs.next())
-    }
-  }
-
-  test("get schemas") {
-    val dbs = Seq("db1", "db2", "db33", "db44")
-    val dbDflts = Seq("default", "global_temp")
-
-    val catalog = "spark_catalog"
-    withDatabases(dbs: _*) { statement =>
-      dbs.foreach(db => statement.execute(s"CREATE DATABASE IF NOT EXISTS 
$db"))
-      val metaData = statement.getConnection.getMetaData
-
-      Seq("", "*", "%", null, ".*", "_*", "_%", ".%") foreach { pattern =>
-        checkGetSchemas(metaData.getSchemas(catalog, pattern), dbs ++ dbDflts, 
catalog)
-      }
-
-      Seq("db%", "db.*") foreach { pattern =>
-        checkGetSchemas(metaData.getSchemas(catalog, pattern), dbs, catalog)
-      }
-
-      Seq("db_", "db.") foreach { pattern =>
-        checkGetSchemas(metaData.getSchemas(catalog, pattern), dbs.take(2), 
catalog)
-      }
-
-      checkGetSchemas(metaData.getSchemas(catalog, "db1"), Seq("db1"), catalog)
-      checkGetSchemas(metaData.getSchemas(catalog, "db_not_exist"), Seq.empty, 
catalog)
-    }
-  }
-
-  test("get tables") {
-    val table = "table_1_test"
-    val schema = "default"
-    val tableType = "TABLE"
-
-    withJdbcStatement(table) { statement =>
-      statement.execute(
-        s"""
-           | create table $table (
-           |  id int,
-           |  name string,
-           |  price double,
-           |  ts long
-           | ) using $format
-           | options (
-           |   primaryKey = 'id',
-           |   preCombineField = 'ts',
-           |   hoodie.bootstrap.index.class =
-           |   'org.apache.hudi.common.bootstrap.index.NoOpBootstrapIndex'
-           | )
-       """.stripMargin)
-
-      val metaData = statement.getConnection.getMetaData
-      val rs1 = metaData.getTables(null, null, null, null)
-      assert(rs1.next())
-      val catalogName = rs1.getString(TABLE_CAT)
-      assert(catalogName === "spark_catalog" || catalogName === null)
-      assert(rs1.getString(TABLE_SCHEM) === schema)
-      assert(rs1.getString(TABLE_NAME) == table)
-      assert(rs1.getString(TABLE_TYPE) == tableType)
-      assert(!rs1.next())
-
-      val rs2 = metaData.getTables(null, null, "table%", Array("TABLE"))
-      assert(rs2.next())
-      assert(rs2.getString(TABLE_NAME) == table)
-      assert(!rs2.next())
-
-      val rs3 = metaData.getTables(null, "default", "*", Array("VIEW"))
-      assert(!rs3.next())
-    }
-  }
-
-  test("get columns type") {
-    val dataTypes = Seq(
-      "boolean",
-      "int",
-      "bigint",
-      "float",
-      "double",
-      "decimal(38,20)",
-      "decimal(10,2)",
-      "string",
-      "array<bigint>",
-      "array<string>",
-      "date",
-      "timestamp",
-      "struct<`X`: bigint, `Y`: double>",
-      "binary",
-      "struct<`X`: string>")
-    val cols = dataTypes.zipWithIndex.map { case (dt, idx) => s"c$idx" -> dt }
-    val (colNames, _) = cols.unzip
-
-    val metadataCols = Seq(
-      "_hoodie_commit_time",
-      "_hoodie_commit_seqno",
-      "_hoodie_record_key",
-      "_hoodie_partition_path",
-      "_hoodie_file_name")
-
-    val defaultPkCol = "uuid"
-
-    val reservedCols = metadataCols :+ defaultPkCol
-
-    val tableName = "hudi_get_col_operation"
-    val ddl =
-      s"""
-         |CREATE TABLE IF NOT EXISTS $catalog.$defaultSchema.$tableName (
-         |  $defaultPkCol string,
-         |  ${cols.map { case (cn, dt) => cn + " " + dt }.mkString(",\n")}
-         |)
-         |USING hudi""".stripMargin
-
-    withJdbcStatement(tableName) { statement =>
-      statement.execute(ddl)
-
-      val metaData = statement.getConnection.getMetaData
-
-      Seq("%", null, ".*", "c.*") foreach { columnPattern =>
-        val rowSet = metaData.getColumns(catalog, defaultSchema, tableName, 
columnPattern)
-
-        import java.sql.Types._
-        val expectedJavaTypes = Seq(
-          BOOLEAN,
-          INTEGER,
-          BIGINT,
-          FLOAT,
-          DOUBLE,
-          DECIMAL,
-          DECIMAL,
-          VARCHAR,
-          ARRAY,
-          ARRAY,
-          DATE,
-          TIMESTAMP,
-          STRUCT,
-          BINARY,
-          STRUCT)
-
-        var pos = 0
-        while (rowSet.next()) {
-          assert(rowSet.getString(TABLE_CAT) === catalog)
-          assert(rowSet.getString(TABLE_SCHEM) === defaultSchema)
-          assert(rowSet.getString(TABLE_NAME) === tableName)
-          rowSet.getString(COLUMN_NAME) match {
-            case name if reservedCols.contains(name) =>
-              assert(rowSet.getInt(DATA_TYPE) === VARCHAR)
-              assert(rowSet.getString(TYPE_NAME) equalsIgnoreCase "STRING")
-            case _ =>
-              assert(rowSet.getString(COLUMN_NAME) === colNames(pos))
-              assert(rowSet.getInt(DATA_TYPE) === expectedJavaTypes(pos))
-              assert(rowSet.getString(TYPE_NAME) equalsIgnoreCase 
dataTypes(pos))
-              pos += 1
-          }
-        }
-
-        assert(pos === dataTypes.size, "all columns should have been verified")
-      }
-
-      val rowSet = metaData.getColumns(catalog, "*", "not_exist", "not_exist")
-      assert(!rowSet.next())
-    }
-  }
-}
diff --git a/kyuubi-server/pom.xml b/kyuubi-server/pom.xml
index 7408ac5dd..c4585b131 100644
--- a/kyuubi-server/pom.xml
+++ b/kyuubi-server/pom.xml
@@ -427,42 +427,6 @@
             <scope>test</scope>
         </dependency>
 
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-avro_${scala.binary.version}</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.parquet</groupId>
-            <artifactId>parquet-avro</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hudi</groupId>
-            <artifactId>hudi-common</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hudi</groupId>
-            <artifactId>hudi-spark-common_${scala.binary.version}</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hudi</groupId>
-            <artifactId>hudi-spark_${scala.binary.version}</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hudi</groupId>
-            <artifactId>hudi-spark3.1.x_${scala.binary.version}</artifactId>
-            <scope>test</scope>
-        </dependency>
-
         <dependency>
             <groupId>io.delta</groupId>
             <artifactId>delta-core_${scala.binary.version}</artifactId>
diff --git 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/datalake/HudiOperationSuite.scala
 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/datalake/HudiOperationSuite.scala
deleted file mode 100644
index 0c507504d..000000000
--- 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/datalake/HudiOperationSuite.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kyuubi.operation.datalake
-
-import org.apache.kyuubi.WithKyuubiServer
-import org.apache.kyuubi.config.KyuubiConf
-import org.apache.kyuubi.operation.HudiMetadataTests
-import org.apache.kyuubi.tags.HudiTest
-
-@HudiTest
-class HudiOperationSuite extends WithKyuubiServer with HudiMetadataTests {
-  override protected val conf: KyuubiConf = {
-    val kyuubiConf = KyuubiConf().set(KyuubiConf.ENGINE_IDLE_TIMEOUT, 20000L)
-    extraConfigs.foreach { case (k, v) => kyuubiConf.set(k, v) }
-    kyuubiConf
-  }
-
-  override def jdbcUrl: String = getJdbcUrl
-}
diff --git a/pom.xml b/pom.xml
index 1feae0322..a6e5d2df5 100644
--- a/pom.xml
+++ b/pom.xml
@@ -156,7 +156,6 @@
         <hive.archive.download.skip>false</hive.archive.download.skip>
         <httpclient.version>4.5.14</httpclient.version>
         <httpcore.version>4.4.16</httpcore.version>
-        <hudi.version>0.12.0</hudi.version>
         <iceberg.version>1.2.0</iceberg.version>
         <jackson.version>2.14.2</jackson.version>
         <jakarta.servlet-api.version>4.0.4</jakarta.servlet-api.version>
@@ -225,7 +224,7 @@
         <maven.plugin.scala.version>4.8.0</maven.plugin.scala.version>
         <maven.plugin.surefire.version>3.0.0-M8</maven.plugin.surefire.version>
         <maven.plugin.scalatest.version>2.2.0</maven.plugin.scalatest.version>
-        
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow,org.apache.kyuubi.tags.HudiTest</maven.plugin.scalatest.exclude.tags>
+        
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow</maven.plugin.scalatest.exclude.tags>
         
<maven.plugin.scalatest.include.tags></maven.plugin.scalatest.include.tags>
         <maven.plugin.spotless.version>2.30.0</maven.plugin.spotless.version>
         <maven.plugin.jacoco.version>0.8.7</maven.plugin.jacoco.version>
@@ -1240,159 +1239,6 @@
                 <version>${iceberg.version}</version>
             </dependency>
 
-            <!-- Hudi dependency  -->
-            <dependency>
-                <groupId>org.apache.parquet</groupId>
-                <artifactId>parquet-avro</artifactId>
-                <version>${parquet.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.apache.spark</groupId>
-                <artifactId>spark-avro_${scala.binary.version}</artifactId>
-                <version>${spark.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.apache.hudi</groupId>
-                <artifactId>hudi-common</artifactId>
-                <version>${hudi.version}</version>
-                <exclusions>
-                    <exclusion>
-                        <groupId>com.fasterxml.jackson.core</groupId>
-                        <artifactId>*</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.hbase</groupId>
-                        <artifactId>hbase-server</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.hbase</groupId>
-                        <artifactId>hbase-client</artifactId>
-                    </exclusion>
-                </exclusions>
-            </dependency>
-
-            <dependency>
-                <groupId>org.apache.hudi</groupId>
-                
<artifactId>hudi-spark-common_${scala.binary.version}</artifactId>
-                <version>${hudi.version}</version>
-                <exclusions>
-                    <exclusion>
-                        <groupId>org.scala-lang</groupId>
-                        <artifactId>scala-library</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.hudi</groupId>
-                        <artifactId>hudi-timeline-service</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>io.dropwizard.metrics</groupId>
-                        <artifactId>*</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>io.prometheus</groupId>
-                        <artifactId>*</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.logging.log4j</groupId>
-                        <artifactId>*</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.curator</groupId>
-                        <artifactId>*</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.hadoop</groupId>
-                        <artifactId>*</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.hbase</groupId>
-                        <artifactId>hbase-server</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.hbase</groupId>
-                        <artifactId>hbase-client</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.orc</groupId>
-                        <artifactId>*</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.hudi</groupId>
-                        <artifactId>hudi-aws</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>commons-logging</groupId>
-                        <artifactId>commons-logging</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>com.fasterxml.jackson.core</groupId>
-                        <artifactId>*</artifactId>
-                    </exclusion>
-                </exclusions>
-            </dependency>
-
-            <dependency>
-                <groupId>org.apache.hudi</groupId>
-                <artifactId>hudi-spark_${scala.binary.version}</artifactId>
-                <version>${hudi.version}</version>
-                <exclusions>
-                    <exclusion>
-                        <groupId>org.scala-lang</groupId>
-                        <artifactId>scala-library</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.hudi</groupId>
-                        <artifactId>hudi-spark-common_2.11</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.hudi</groupId>
-                        <artifactId>hudi-spark2_2.11</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.hudi</groupId>
-                        <artifactId>hudi-spark2-common</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.curator</groupId>
-                        <artifactId>*</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>com.fasterxml.jackson.core</groupId>
-                        <artifactId>*</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>com.fasterxml.jackson.module</groupId>
-                        <artifactId>*</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.logging.log4j</groupId>
-                        <artifactId>*</artifactId>
-                    </exclusion>
-                </exclusions>
-            </dependency>
-
-            <dependency>
-                <groupId>org.apache.hudi</groupId>
-                
<artifactId>hudi-spark3.1.x_${scala.binary.version}</artifactId>
-                <version>${hudi.version}</version>
-                <exclusions>
-                    <exclusion>
-                        <groupId>org.apache.hudi</groupId>
-                        <artifactId>hudi-spark-common_2.11</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.scala-lang</groupId>
-                        <artifactId>scala-library</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>com.fasterxml.jackson.core</groupId>
-                        <artifactId>*</artifactId>
-                    </exclusion>
-                </exclusions>
-            </dependency>
-
             <dependency>
                 <groupId>io.delta</groupId>
                 <artifactId>delta-core_${scala.binary.version}</artifactId>
@@ -2347,7 +2193,7 @@
                 <spark.binary.version>3.2</spark.binary.version>
                 <delta.version>2.0.2</delta.version>
                 
<spark.archive.name>spark-${spark.version}-bin-hadoop3.2.tgz</spark.archive.name>
-                
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow,org.apache.kyuubi.tags.HudiTest</maven.plugin.scalatest.exclude.tags>
+                
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow</maven.plugin.scalatest.exclude.tags>
             </properties>
         </profile>
 
@@ -2360,7 +2206,7 @@
                 <module>extensions/spark/kyuubi-spark-connector-kudu</module>
             </modules>
             <properties>
-                
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow,org.apache.kyuubi.tags.HudiTest</maven.plugin.scalatest.exclude.tags>
+                
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow</maven.plugin.scalatest.exclude.tags>
             </properties>
         </profile>
 
@@ -2370,7 +2216,7 @@
                 <spark.version>3.5.0-SNAPSHOT</spark.version>
                 <!-- https://github.com/ThreeTen/threeten-extra/issues/226 -->
                 <threeten.version>1.7.0</threeten.version>
-                
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.PySparkTest</maven.plugin.scalatest.exclude.tags>
+                
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PySparkTest</maven.plugin.scalatest.exclude.tags>
             </properties>
             <dependencyManagement>
                 <dependencies>

Reply via email to