This is an automated email from the ASF dual-hosted git repository.
yihua pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new badee10d290 [HUDI-9167] Remove redundant classes in
hudi-utilities-slim-bundle (#12962)
badee10d290 is described below
commit badee10d290538b93642452bce0aeaae0ff00b8d
Author: Y Ethan Guo <[email protected]>
AuthorDate: Thu Mar 27 17:01:18 2025 -0700
[HUDI-9167] Remove redundant classes in hudi-utilities-slim-bundle (#12962)
---
packaging/hudi-spark-bundle/pom.xml | 9 -------
packaging/hudi-utilities-slim-bundle/pom.xml | 37 ----------------------------
2 files changed, 46 deletions(-)
diff --git a/packaging/hudi-spark-bundle/pom.xml
b/packaging/hudi-spark-bundle/pom.xml
index 708883c58ea..088c02ffc76 100644
--- a/packaging/hudi-spark-bundle/pom.xml
+++ b/packaging/hudi-spark-bundle/pom.xml
@@ -90,7 +90,6 @@
<include>org.eclipse.jetty.websocket:*</include>
<include>org.jetbrains.kotlin:*</include>
<include>org.rocksdb:rocksdbjni</include>
- <include>org.antlr:stringtemplate</include>
<!-- Bundle Jackson JSR310 library since it is not present
in spark 2.x. For spark 3.x this will
bundle the same JSR310 version that is included in
spark runtime -->
<include>com.fasterxml.jackson.datatype:jackson-datatype-jsr310</include>
@@ -99,8 +98,6 @@
<include>com.github.davidmoten:hilbert-curve</include>
<include>com.github.ben-manes.caffeine:caffeine</include>
<include>org.apache.parquet:parquet-avro</include>
-
<include>com.twitter:bijection-avro_${scala.binary.version}</include>
-
<include>com.twitter:bijection-core_${scala.binary.version}</include>
<include>com.twitter:chill-protobuf</include>
<include>io.dropwizard.metrics:metrics-core</include>
@@ -114,8 +111,6 @@
<include>com.uber.m3:tally-m3</include>
<include>com.uber.m3:tally-core</include>
- <include>com.yammer.metrics:metrics-core</include>
-
<include>org.apache.hive:hive-common</include>
<include>org.apache.hive:hive-service</include>
<include>org.apache.hive:hive-service-rpc</include>
@@ -142,10 +137,6 @@
<pattern>javax.servlet.</pattern>
<shadedPattern>org.apache.hudi.javax.servlet.</shadedPattern>
</relocation>
- <relocation>
- <pattern>com.yammer.metrics.</pattern>
-
<shadedPattern>org.apache.hudi.com.yammer.metrics.</shadedPattern>
- </relocation>
<relocation>
<pattern>com.beust.jcommander.</pattern>
<shadedPattern>org.apache.hudi.com.beust.jcommander.</shadedPattern>
diff --git a/packaging/hudi-utilities-slim-bundle/pom.xml
b/packaging/hudi-utilities-slim-bundle/pom.xml
index 24fad51e6c6..bbc5a069927 100644
--- a/packaging/hudi-utilities-slim-bundle/pom.xml
+++ b/packaging/hudi-utilities-slim-bundle/pom.xml
@@ -91,30 +91,13 @@
</transformers>
<artifactSet>
<includes combine.children="append">
- <include>org.apache.hudi:hudi-hadoop-common</include>
- <include>org.apache.hudi:hudi-common</include>
- <include>org.apache.hudi:hudi-client-common</include>
<include>org.apache.hudi:hudi-utilities_${scala.binary.version}</include>
- <include>org.apache.hudi:hudi-hadoop-mr</include>
- <include>org.apache.hudi:hudi-timeline-service</include>
- <include>com.yammer.metrics:metrics-core</include>
- <include>com.beust:jcommander</include>
- <include>io.javalin:javalin</include>
- <!-- Spark only has mortbay jetty -->
- <include>org.eclipse.jetty:*</include>
- <include>org.eclipse.jetty.websocket:*</include>
- <include>org.jetbrains.kotlin:*</include>
- <include>org.rocksdb:rocksdbjni</include>
<include>org.antlr:stringtemplate</include>
-
- <include>com.github.davidmoten:guava-mini</include>
- <include>com.github.davidmoten:hilbert-curve</include>
<!-- SPARK-43489 Spark 3.5+ has marked protobuf as provided
-->
<include>com.google.protobuf:protobuf-java</include>
<include>com.twitter:bijection-avro_${scala.binary.version}</include>
<include>com.twitter:bijection-core_${scala.binary.version}</include>
- <include>com.twitter:chill-protobuf</include>
<include>io.confluent:kafka-avro-serializer</include>
<include>io.confluent:kafka-schema-serializer</include>
<include>io.confluent:kafka-json-schema-serializer</include>
@@ -124,27 +107,11 @@
<include>io.confluent:kafka-schema-registry-client</include>
<include>io.confluent:kafka-protobuf-serializer</include>
<include>io.confluent:kafka-protobuf-provider</include>
- <include>io.dropwizard.metrics:metrics-core</include>
- <include>io.dropwizard.metrics:metrics-graphite</include>
- <include>io.dropwizard.metrics:metrics-jmx</include>
- <include>io.prometheus:simpleclient</include>
- <include>io.prometheus:simpleclient_httpserver</include>
- <include>io.prometheus:simpleclient_dropwizard</include>
- <include>io.prometheus:simpleclient_pushgateway</include>
- <include>io.prometheus:simpleclient_common</include>
- <include>com.uber.m3:tally-m3</include>
- <include>com.uber.m3:tally-core</include>
<include>org.apache.spark:spark-streaming-kafka-0-10_${scala.binary.version}</include>
<include>org.apache.spark:spark-token-provider-kafka-0-10_${scala.binary.version}</include>
<include>org.apache.kafka:kafka_${scala.binary.version}</include>
<include>com.101tec:zkclient</include>
<include>org.apache.kafka:kafka-clients</include>
- <include>org.apache.curator:curator-framework</include>
- <include>org.apache.curator:curator-client</include>
- <include>org.apache.curator:curator-recipes</include>
- <include>commons-codec:commons-codec</include>
- <include>commons-io:commons-io</include>
- <include>org.openjdk.jol:jol-core</include>
</includes>
</artifactSet>
<relocations combine.children="append">
@@ -155,10 +122,6 @@
<pattern>org.apache.spark.sql.avro.</pattern>
<shadedPattern>org.apache.hudi.org.apache.spark.sql.avro.</shadedPattern>
</relocation>
- <relocation>
- <pattern>com.yammer.metrics.</pattern>
-
<shadedPattern>org.apache.hudi.com.yammer.metrics.</shadedPattern>
- </relocation>
<relocation>
<pattern>com.beust.jcommander.</pattern>
<shadedPattern>org.apache.hudi.com.beust.jcommander.</shadedPattern>