This is an automated email from the ASF dual-hosted git repository.

jshao pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/gravitino.git


The following commit(s) were added to refs/heads/main by this push:
     new c780c37d87 [#9106] refactor(bundle-jars): refactor fileset bundle JARs 
(#9147)
c780c37d87 is described below

commit c780c37d87f6005575977eefa26bfa0ca55852b5
Author: mchades <[email protected]>
AuthorDate: Mon Dec 1 11:00:16 2025 +0800

    [#9106] refactor(bundle-jars): refactor fileset bundle JARs (#9147)
    
    ### What changes were proposed in this pull request?
    
    - change `gravitino-{fs}.jar` from fat jar to thin jar
    - bundle `gravitino-{fs}.jar` with `filesystem-hadoop3-runtime` and
    `iceberg-rest-server`
    
    ### Why are the changes needed?
    
    Fix: #9106
    
    ### Does this PR introduce _any_ user-facing change?
    
    yes
    
    ### How was this patch tested?
    
    CI pass
    
    ---------
    
    Co-authored-by: Copilot <[email protected]>
---
 LICENSE.bin                                        |  1 +
 bundles/aliyun-bundle/build.gradle.kts             | 15 +++++
 bundles/aliyun/build.gradle.kts                    | 68 +++-------------------
 bundles/aws-bundle/build.gradle.kts                | 14 ++++-
 bundles/aws/build.gradle.kts                       | 54 +++++------------
 bundles/azure-bundle/build.gradle.kts              | 12 ++++
 bundles/azure/build.gradle.kts                     | 63 ++++----------------
 bundles/gcp-bundle/build.gradle.kts                | 12 ++++
 bundles/gcp/build.gradle.kts                       | 60 ++++---------------
 .../iceberg-aliyun-bundle}/build.gradle.kts        | 23 +++-----
 .../iceberg-aws-bundle}/build.gradle.kts           | 29 ++++-----
 .../iceberg-azure-bundle}/build.gradle.kts         | 26 ++++-----
 .../iceberg-gcp-bundle}/build.gradle.kts           | 21 +++----
 .../filesystem-hadoop3-runtime/build.gradle.kts    |  4 ++
 .../credential/CredentialProviderDelegator.java    |  3 +-
 dev/docker/gravitino/gravitino-dependency.sh       | 49 +++-------------
 .../iceberg-rest-server-dependency.sh              | 54 +++--------------
 docs/fileset-catalog-with-adls.md                  | 23 ++++----
 docs/fileset-catalog-with-gcs.md                   | 20 +++----
 docs/fileset-catalog-with-oss.md                   | 21 ++++---
 docs/fileset-catalog-with-s3.md                    | 21 ++++---
 docs/iceberg-rest-service.md                       | 18 ++----
 docs/lakehouse-iceberg-catalog.md                  | 28 ++++++---
 docs/security/credential-vending.md                | 18 +++---
 gradle/libs.versions.toml                          |  2 +
 iceberg/iceberg-rest-server/build.gradle.kts       | 18 +++---
 .../integration/test/IcebergRESTADLSTokenIT.java   | 21 +------
 .../test/IcebergRESTAzureAccountKeyIT.java         | 21 +------
 .../iceberg/integration/test/IcebergRESTGCSIT.java | 21 +------
 .../iceberg/integration/test/IcebergRESTOSSIT.java | 23 +-------
 .../integration/test/IcebergRESTOSSSecretIT.java   | 23 +-------
 .../integration/test/IcebergRESTS3TokenIT.java     | 21 +------
 settings.gradle.kts                                |  8 +--
 33 files changed, 262 insertions(+), 553 deletions(-)

diff --git a/LICENSE.bin b/LICENSE.bin
index 753915e4d4..8df23ccdf2 100644
--- a/LICENSE.bin
+++ b/LICENSE.bin
@@ -384,6 +384,7 @@
    OGNL
    Google FlatBuffers
    IPAddress
+   Aliyun SDK OSS
 
    This product bundles various third-party components also under the
    Apache Software Foundation License 1.1
diff --git a/bundles/aliyun-bundle/build.gradle.kts 
b/bundles/aliyun-bundle/build.gradle.kts
index 588bbdf14f..2a4df4042f 100644
--- a/bundles/aliyun-bundle/build.gradle.kts
+++ b/bundles/aliyun-bundle/build.gradle.kts
@@ -26,11 +26,16 @@ plugins {
 
 dependencies {
   implementation(project(":bundles:aliyun"))
+
+  implementation(libs.aliyun.credentials.sdk)
   implementation(libs.commons.collections3)
   implementation(libs.hadoop3.client.api)
   implementation(libs.hadoop3.client.runtime)
   implementation(libs.hadoop3.oss)
   implementation(libs.httpclient)
+  // Aliyun oss SDK depends on this package, and JDK >= 9 requires manual add
+  // 
https://www.alibabacloud.com/help/en/oss/developer-reference/java-installation?spm=a2c63.p38356.0.i1
+  implementation(libs.sun.activation)
 }
 
 tasks.withType(ShadowJar::class.java) {
@@ -41,6 +46,16 @@ tasks.withType(ShadowJar::class.java) {
 
   dependencies {
     exclude(dependency("org.slf4j:slf4j-api"))
+
+    // Exclude Gravitino modules to prevent class duplication and "Split 
Packages" issues.
+    // These modules (api, common, catalogs) are already provided by the 
Gravitino server and gravitino-filesystem-hadoop3-runtime.
+    // Including them here would cause the Relocation rules below to 
incorrectly modify
+    // method signatures (e.g., JsonUtils.anyFieldMapper returning a shaded 
ObjectMapper),
+    // leading to java.lang.NoSuchMethodError at runtime.
+    exclude(project(":api"))
+    exclude(project(":common"))
+    exclude(project(":catalogs:catalog-common"))
+    exclude(project(":catalogs:hadoop-common"))
   }
 
   // Relocate dependencies to avoid conflicts
diff --git a/bundles/aliyun/build.gradle.kts b/bundles/aliyun/build.gradle.kts
index 76e73c8669..c12ba1fb8b 100644
--- a/bundles/aliyun/build.gradle.kts
+++ b/bundles/aliyun/build.gradle.kts
@@ -16,89 +16,39 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
 
 plugins {
   `maven-publish`
   id("java")
-  alias(libs.plugins.shadow)
 }
 
 dependencies {
-  compileOnly(project(":api"))
-  compileOnly(libs.hadoop3.client.api)
-  compileOnly(libs.hadoop3.client.runtime)
-  compileOnly(libs.hadoop3.oss)
-  compileOnly(project(":common"))
-
+  implementation(project(":api")) {
+    exclude("*")
+  }
   implementation(project(":catalogs:catalog-common")) {
     exclude("*")
   }
   implementation(project(":catalogs:hadoop-common")) {
     exclude("*")
   }
+  implementation(project(":common")) {
+    exclude("*")
+  }
 
-  implementation(libs.aliyun.credentials.sdk)
-  implementation(libs.commons.collections3)
-
-  // oss needs StringUtils from commons-lang3 or the following error will 
occur in 3.3.0
-  // java.lang.NoClassDefFoundError: org/apache/commons/lang3/StringUtils
-  // 
org.apache.hadoop.fs.aliyun.oss.AliyunOSSFileSystemStore.initialize(AliyunOSSFileSystemStore.java:111)
-  // 
org.apache.hadoop.fs.aliyun.oss.AliyunOSSFileSystem.initialize(AliyunOSSFileSystem.java:323)
-  // org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3611)
   implementation(libs.commons.lang3)
   implementation(libs.guava)
-
-  implementation(libs.httpclient)
   implementation(libs.jackson.databind)
-  implementation(libs.jackson.annotations)
-  implementation(libs.jackson.datatype.jdk8)
-  implementation(libs.jackson.datatype.jsr310)
 
-  // Aliyun oss SDK depends on this package, and JDK >= 9 requires manual add
-  // 
https://www.alibabacloud.com/help/en/oss/developer-reference/java-installation?spm=a2c63.p38356.0.i1
-  implementation(libs.sun.activation)
+  compileOnly(libs.aliyun.credentials.sdk)
+  compileOnly(libs.hadoop3.client.api)
+  compileOnly(libs.hadoop3.oss)
 
-  testImplementation(project(":api"))
-  testImplementation(project(":core"))
-  testImplementation(project(":common"))
   testImplementation(libs.junit.jupiter.api)
   testImplementation(libs.junit.jupiter.params)
   testRuntimeOnly(libs.junit.jupiter.engine)
 }
 
-tasks.withType(ShadowJar::class.java) {
-  isZip64 = true
-  configurations = listOf(project.configurations.runtimeClasspath.get())
-  archiveClassifier.set("")
-  mergeServiceFiles()
-
-  dependencies {
-    exclude(dependency("org.slf4j:slf4j-api"))
-  }
-
-  // Relocate dependencies to avoid conflicts
-  relocate("com.aliyun", "org.apache.gravitino.aliyun.shaded.com.aliyun")
-  relocate("com.fasterxml.jackson", 
"org.apache.gravitino.aliyun.shaded.com.fasterxml.jackson")
-  relocate("com.google", 
"org.apache.gravitino.aliyun.shaded.com.google.common")
-  relocate("com.sun.activation", 
"org.apache.gravitino.aliyun.shaded.com.sun.activation")
-  relocate("com.sun.istack", 
"org.apache.gravitino.aliyun.shaded.com.sun.istack")
-  relocate("com.sun.xml", "org.apache.gravitino.aliyun.shaded.com.sun.xml")
-  relocate("okhttp3", "org.apache.gravitino.aliyun.shaded.okhttp3")
-  relocate("okio", "org.apache.gravitino.aliyun.shaded.okio")
-  relocate("org.apache.commons", 
"org.apache.gravitino.aliyun.shaded.org.apache.commons")
-  relocate("org.apache.http", 
"org.apache.gravitino.aliyun.shaded.org.apache.http")
-  relocate("org.checkerframework", 
"org.apache.gravitino.aliyun.shaded.org.checkerframework")
-  relocate("org.jacoco.agent.rt", 
"org.apache.gravitino.aliyun.shaded.org.jacoco.agent.rt")
-
-  mergeServiceFiles()
-}
-
-tasks.jar {
-  dependsOn(tasks.named("shadowJar"))
-  archiveClassifier.set("empty")
-}
-
 tasks.compileJava {
   dependsOn(":catalogs:catalog-fileset:runtimeJars")
 }
diff --git a/bundles/aws-bundle/build.gradle.kts 
b/bundles/aws-bundle/build.gradle.kts
index 4495b16d5c..b7ee492719 100644
--- a/bundles/aws-bundle/build.gradle.kts
+++ b/bundles/aws-bundle/build.gradle.kts
@@ -28,6 +28,9 @@ plugins {
 dependencies {
   implementation(project(":bundles:aws"))
   implementation(libs.hadoop3.aws)
+  implementation(libs.aws.iam)
+  implementation(libs.aws.policy)
+  implementation(libs.aws.sts)
   implementation(libs.hadoop3.client.api)
   implementation(libs.hadoop3.client.runtime)
 }
@@ -39,6 +42,16 @@ tasks.withType(ShadowJar::class.java) {
 
   dependencies {
     exclude(dependency("org.slf4j:slf4j-api"))
+
+    // Exclude Gravitino modules to prevent class duplication and "Split 
Packages" issues.
+    // These modules (api, common, catalogs) are already provided by the 
Gravitino server and gravitino-filesystem-hadoop3-runtime.
+    // Including them here would cause the Relocation rules below to 
incorrectly modify
+    // method signatures (e.g., JsonUtils.anyFieldMapper returning a shaded 
ObjectMapper),
+    // leading to java.lang.NoSuchMethodError at runtime.
+    exclude(project(":api"))
+    exclude(project(":common"))
+    exclude(project(":catalogs:catalog-common"))
+    exclude(project(":catalogs:hadoop-common"))
   }
 
   relocate("com.fasterxml.jackson", 
"org.apache.gravitino.aws.shaded.com.fasterxml.jackson")
@@ -51,7 +64,6 @@ tasks.withType(ShadowJar::class.java) {
   relocate("org.checkerframework", 
"org.apache.gravitino.aws.shaded.org.checkerframework")
   relocate("org.reactivestreams", 
"org.apache.gravitino.aws.shaded.org.reactivestreams")
   relocate("org.wildfly.openssl", 
"org.apache.gravitino.aws.shaded.org.wildfly.openssl")
-  relocate("software.amazon", 
"org.apache.gravitino.aws.shaded.software.amazon")
 
   mergeServiceFiles()
 }
diff --git a/bundles/aws/build.gradle.kts b/bundles/aws/build.gradle.kts
index 1bc4c55adb..27ba900d5d 100644
--- a/bundles/aws/build.gradle.kts
+++ b/bundles/aws/build.gradle.kts
@@ -16,69 +16,41 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
 
 plugins {
   `maven-publish`
   id("java")
-  alias(libs.plugins.shadow)
 }
 
 dependencies {
-  compileOnly(project(":api"))
-  compileOnly(libs.hadoop3.aws)
-  compileOnly(libs.hadoop3.client.api)
-  compileOnly(libs.hadoop3.client.runtime)
-  compileOnly(project(":common"))
-
+  implementation(project(":api")) {
+    exclude("*")
+  }
   implementation(project(":catalogs:catalog-common")) {
     exclude("*")
   }
   implementation(project(":catalogs:hadoop-common")) {
     exclude("*")
   }
+  implementation(project(":common")) {
+    exclude("*")
+  }
 
-  implementation(libs.aws.iam)
-  implementation(libs.aws.policy)
-  implementation(libs.aws.sts)
   implementation(libs.commons.lang3)
   implementation(libs.guava)
 
-  testImplementation(project(":api"))
-  testImplementation(project(":core"))
-  testImplementation(project(":common"))
+  compileOnly(libs.aws.iam)
+  compileOnly(libs.aws.policy)
+  compileOnly(libs.aws.sts)
+  compileOnly(libs.hadoop3.aws)
+  compileOnly(libs.hadoop3.client.api)
+
+  testImplementation(libs.aws.iam)
   testImplementation(libs.junit.jupiter.api)
   testImplementation(libs.junit.jupiter.params)
   testRuntimeOnly(libs.junit.jupiter.engine)
 }
 
-tasks.withType(ShadowJar::class.java) {
-  isZip64 = true
-  configurations = listOf(project.configurations.runtimeClasspath.get())
-  archiveClassifier.set("")
-
-  dependencies {
-    exclude(dependency("org.slf4j:slf4j-api"))
-  }
-
-  relocate("com.google.common", 
"org.apache.gravitino.aws.shaded.com.google.common")
-  relocate("com.google.errorprone", 
"org.apache.gravitino.aws.shaded.com.google.errorprone")
-  relocate("com.google.thirdparty", 
"org.apache.gravitino.aws.shaded.com.google.thirdparty")
-  relocate("org.apache.commons", 
"org.apache.gravitino.aws.shaded.org.apache.commons")
-  relocate("org.apache.http", 
"org.apache.gravitino.aws.shaded.org.apache.http")
-  relocate("org.checkerframework", 
"org.apache.gravitino.aws.shaded.org.checkerframework")
-  relocate("org.reactivestreams", 
"org.apache.gravitino.aws.shaded.org.reactivestreams")
-  relocate("org.wildfly.openssl", 
"org.apache.gravitino.aws.shaded.org.wildfly.openssl")
-  relocate("software.amazon", 
"org.apache.gravitino.aws.shaded.software.amazon")
-
-  mergeServiceFiles()
-}
-
-tasks.jar {
-  dependsOn(tasks.named("shadowJar"))
-  archiveClassifier.set("empty")
-}
-
 tasks.compileJava {
   dependsOn(":catalogs:catalog-fileset:runtimeJars")
 }
diff --git a/bundles/azure-bundle/build.gradle.kts 
b/bundles/azure-bundle/build.gradle.kts
index df6be64a83..632cc83d53 100644
--- a/bundles/azure-bundle/build.gradle.kts
+++ b/bundles/azure-bundle/build.gradle.kts
@@ -31,6 +31,8 @@ dependencies {
     exclude(group = "commons-logging", module = "commons-logging")
   }
 
+  implementation(libs.azure.identity)
+  implementation(libs.azure.storage.file.datalake)
   implementation(libs.hadoop3.abs)
   implementation(libs.hadoop3.client.api)
   implementation(libs.hadoop3.client.runtime)
@@ -43,6 +45,16 @@ tasks.withType(ShadowJar::class.java) {
 
   dependencies {
     exclude(dependency("org.slf4j:slf4j-api"))
+
+    // Exclude Gravitino modules to prevent class duplication and "Split 
Packages" issues.
+    // These modules (api, common, catalogs) are already provided by the 
Gravitino server and gravitino-filesystem-hadoop3-runtime.
+    // Including them here would cause the Relocation rules below to 
incorrectly modify
+    // method signatures (e.g., JsonUtils.anyFieldMapper returning a shaded 
ObjectMapper),
+    // leading to java.lang.NoSuchMethodError at runtime.
+    exclude(project(":api"))
+    exclude(project(":common"))
+    exclude(project(":catalogs:catalog-common"))
+    exclude(project(":catalogs:hadoop-common"))
   }
 
   // Relocate dependencies to avoid conflicts
diff --git a/bundles/azure/build.gradle.kts b/bundles/azure/build.gradle.kts
index aeb3b7406d..031c7c5e8c 100644
--- a/bundles/azure/build.gradle.kts
+++ b/bundles/azure/build.gradle.kts
@@ -16,79 +16,40 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
 
 plugins {
   `maven-publish`
   id("java")
-  alias(libs.plugins.shadow)
 }
 
 dependencies {
-  compileOnly(project(":api"))
-  compileOnly(libs.hadoop3.abs)
-  compileOnly(libs.hadoop3.client.api)
-  compileOnly(libs.hadoop3.client.runtime)
-  compileOnly(project(":common"))
-
+  implementation(project(":api")) {
+    exclude("*")
+  }
   implementation(project(":catalogs:catalog-common")) {
     exclude("*")
   }
   implementation(project(":catalogs:hadoop-common")) {
     exclude("*")
   }
-
-  implementation(libs.azure.identity)
-  implementation(libs.azure.storage.file.datalake)
+  implementation(project(":common")) {
+    exclude("*")
+  }
 
   implementation(libs.commons.lang3)
-  // runtime used
-  implementation(libs.commons.logging)
   implementation(libs.guava)
 
-  testImplementation(project(":api"))
-  testImplementation(project(":core"))
-  testImplementation(project(":common"))
+  compileOnly(libs.azure.identity)
+  compileOnly(libs.azure.storage.file.datalake)
+  compileOnly(libs.hadoop3.abs)
+  compileOnly(libs.hadoop3.client.api)
+
+  testImplementation(libs.azure.identity)
   testImplementation(libs.junit.jupiter.api)
   testImplementation(libs.junit.jupiter.params)
   testRuntimeOnly(libs.junit.jupiter.engine)
 }
 
-tasks.withType(ShadowJar::class.java) {
-  isZip64 = true
-  configurations = listOf(project.configurations.runtimeClasspath.get())
-  archiveClassifier.set("")
-
-  dependencies {
-    exclude(dependency("org.slf4j:slf4j-api"))
-  }
-
-  // Relocate dependencies to avoid conflicts
-  relocate("com.azure", "org.apache.gravitino.azure.shaded.com.azure")
-  relocate("com.ctc.wstx", "org.apache.gravitino.azure.shaded.com.ctc.wstx")
-  relocate("com.fasterxml", "org.apache.gravitino.azure.shaded.com.fasterxml")
-  relocate("com.google", "org.apache.gravitino.azure.shaded.com.google.common")
-  relocate("com.microsoft.aad", 
"org.apache.gravitino.azure.shaded.com.microsoft.aad")
-  relocate("com.nimbusds", "org.apache.gravitino.azure.shaded.com.nimbusds")
-  relocate("com.sun.jna", "org.apache.gravitino.azure.shaded.com.sun.jna")
-  relocate("io.netty", "org.apache.gravitino.azure.shaded.io.netty")
-  relocate("net.minidev", "org.apache.gravitino.azure.shaded.net.minidev")
-  relocate("net.jcip.annotations", 
"org.apache.gravitino.azure.shaded.net.jcip.annotations")
-  relocate("org.apache.commons", 
"org.apache.gravitino.azure.shaded.org.apache.commons")
-  relocate("org.checkerframework", 
"org.apache.gravitino.azure.shaded.org.checkerframework")
-  relocate("org.codehaus.stax2", 
"org.apache.gravitino.azure.shaded.org.codehaus.stax2")
-  relocate("org.objectweb.asm", 
"org.apache.gravitino.azure.shaded.org.objectweb.asm")
-  relocate("org.reactivestreams", 
"org.apache.gravitino.azure.shaded.org.reactivestreams")
-  relocate("reactor", "org.apache.gravitino.azure.shaded.reactor")
-
-  mergeServiceFiles()
-}
-
-tasks.jar {
-  dependsOn(tasks.named("shadowJar"))
-  archiveClassifier.set("empty")
-}
-
 tasks.compileJava {
   dependsOn(":catalogs:catalog-fileset:runtimeJars")
 }
diff --git a/bundles/gcp-bundle/build.gradle.kts 
b/bundles/gcp-bundle/build.gradle.kts
index e8df40e4eb..cc0f2e1d7a 100644
--- a/bundles/gcp-bundle/build.gradle.kts
+++ b/bundles/gcp-bundle/build.gradle.kts
@@ -29,6 +29,8 @@ dependencies {
     // There is already a dependency on commons-logging v1.2 in hadoop-gcs, so 
exclude the one.
     exclude(group = "commons-logging", module = "commons-logging")
   }
+  implementation(libs.google.auth.credentials)
+  implementation(libs.google.auth.http)
   implementation(libs.hadoop3.client.api)
   implementation(libs.hadoop3.client.runtime)
   implementation(libs.hadoop3.gcs)
@@ -41,6 +43,16 @@ tasks.withType(ShadowJar::class.java) {
 
   dependencies {
     exclude(dependency("org.slf4j:slf4j-api"))
+
+    // Exclude Gravitino modules to prevent class duplication and "Split 
Packages" issues.
+    // These modules (api, common, catalogs) are already provided by the 
Gravitino server and gravitino-filesystem-hadoop3-runtime.
+    // Including them here would cause the Relocation rules below to 
incorrectly modify
+    // method signatures (e.g., JsonUtils.anyFieldMapper returning a shaded 
ObjectMapper),
+    // leading to java.lang.NoSuchMethodError at runtime.
+    exclude(project(":api"))
+    exclude(project(":common"))
+    exclude(project(":catalogs:catalog-common"))
+    exclude(project(":catalogs:hadoop-common"))
   }
 
   // Relocate dependencies to avoid conflicts
diff --git a/bundles/gcp/build.gradle.kts b/bundles/gcp/build.gradle.kts
index 90234f7538..80658d352a 100644
--- a/bundles/gcp/build.gradle.kts
+++ b/bundles/gcp/build.gradle.kts
@@ -16,74 +16,38 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
 
 plugins {
   `maven-publish`
   id("java")
-  alias(libs.plugins.shadow)
 }
 
 dependencies {
-  compileOnly(project(":api"))
-  compileOnly(libs.hadoop3.client.api)
-  compileOnly(libs.hadoop3.client.runtime)
-  compileOnly(libs.hadoop3.gcs)
-  compileOnly(project(":common"))
-
+  implementation(project(":api")) {
+    exclude("*")
+  }
   implementation(project(":catalogs:catalog-common")) {
     exclude("*")
   }
   implementation(project(":catalogs:hadoop-common")) {
     exclude("*")
   }
+  implementation(project(":common")) {
+    exclude("*")
+  }
+
   implementation(libs.commons.lang3)
-  // runtime used
-  implementation(libs.commons.logging)
-  implementation(libs.google.auth.credentials)
-  implementation(libs.google.auth.http)
+  implementation(libs.guava)
+
+  compileOnly(libs.hadoop3.client.api)
+  compileOnly(libs.hadoop3.gcs)
+  compileOnly(libs.google.auth.http)
 
-  testImplementation(project(":api"))
-  testImplementation(project(":core"))
-  testImplementation(project(":common"))
   testImplementation(libs.junit.jupiter.api)
   testImplementation(libs.junit.jupiter.params)
   testRuntimeOnly(libs.junit.jupiter.engine)
 }
 
-tasks.withType(ShadowJar::class.java) {
-  isZip64 = true
-  configurations = listOf(project.configurations.runtimeClasspath.get())
-  archiveClassifier.set("")
-
-  dependencies {
-    exclude(dependency("org.slf4j:slf4j-api"))
-  }
-
-  // Relocate dependencies to avoid conflicts
-  relocate("com.google.api", "org.apache.gravitino.gcp.shaded.com.google.api")
-  relocate("com.google.auth", 
"org.apache.gravitino.gcp.shaded.com.google.auth")
-  relocate("com.google.auto", 
"org.apache.gravitino.gcp.shaded.com.google.auto")
-  relocate("com.google.common", 
"org.apache.gravitino.gcp.shaded.com.google.common")
-  relocate("com.google.errorprone", 
"org.apache.gravitino.gcp.shaded.com.google.errorprone")
-  relocate("com.google.gson", 
"org.apache.gravitino.gcp.shaded.com.google.gson")
-  relocate("com.google.j2objc", 
"org.apache.gravitino.gcp.shaded.com.google.j2objc")
-  relocate("com.google.thirdparty", 
"org.apache.gravitino.gcp.shaded.com.google.thirdparty")
-  relocate("io.grpc", "org.apache.gravitino.gcp.shaded.io.grpc")
-  relocate("io.opencensus", "org.apache.gravitino.gcp.shaded.io.opencensus")
-  relocate("org.apache.commons", 
"org.apache.gravitino.gcp.shaded.org.apache.commons")
-  relocate("org.apache.http", 
"org.apache.gravitino.gcp.shaded.org.apache.http")
-  relocate("org.apache.httpcomponents", 
"org.apache.gravitino.gcp.shaded.org.apache.httpcomponents")
-  relocate("org.checkerframework", 
"org.apache.gravitino.gcp.shaded.org.checkerframework")
-
-  mergeServiceFiles()
-}
-
-tasks.jar {
-  dependsOn(tasks.named("shadowJar"))
-  archiveClassifier.set("empty")
-}
-
 tasks.compileJava {
   dependsOn(":catalogs:catalog-fileset:runtimeJars")
 }
diff --git a/clients/filesystem-hadoop3-runtime/build.gradle.kts 
b/bundles/iceberg-aliyun-bundle/build.gradle.kts
similarity index 60%
copy from clients/filesystem-hadoop3-runtime/build.gradle.kts
copy to bundles/iceberg-aliyun-bundle/build.gradle.kts
index 6a98d621e5..0492861ad2 100644
--- a/clients/filesystem-hadoop3-runtime/build.gradle.kts
+++ b/bundles/iceberg-aliyun-bundle/build.gradle.kts
@@ -16,7 +16,6 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-
 import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
 
 plugins {
@@ -26,25 +25,21 @@ plugins {
 }
 
 dependencies {
-  implementation(project(":clients:filesystem-hadoop3")) {
-    exclude(group = "org.slf4j")
-  }
-  implementation(project(":clients:client-java-runtime", configuration = 
"shadow"))
-  implementation(libs.commons.lang3)
+  implementation(libs.aliyun.credentials.sdk)
+  implementation(libs.aliyun.sdk.oss)
+  // Aliyun oss SDK depends on this package, and JDK >= 9 requires manual add
+  // 
https://www.alibabacloud.com/help/en/oss/developer-reference/java-installation?spm=a2c63.p38356.0.i1
+  implementation(libs.sun.activation)
 }
 
-tasks.withType<ShadowJar>(ShadowJar::class.java) {
+tasks.withType(ShadowJar::class.java) {
   isZip64 = true
   configurations = listOf(project.configurations.runtimeClasspath.get())
   archiveClassifier.set("")
 
-  // Relocate dependencies to avoid conflicts
-  relocate("com.google", "org.apache.gravitino.shaded.com.google")
-  relocate("com.github.benmanes.caffeine", 
"org.apache.gravitino.shaded.com.github.benmanes.caffeine")
-  // relocate common lang3 package
-  relocate("org.apache.commons.lang3", 
"org.apache.gravitino.shaded.org.apache.commons.lang3")
-  relocate("org.apache.hc", "org.apache.gravitino.shaded.org.apache.hc")
-  relocate("org.checkerframework", 
"org.apache.gravitino.shaded.org.checkerframework")
+  dependencies {
+    exclude(dependency("org.slf4j:slf4j-api"))
+  }
 
   mergeServiceFiles()
 }
diff --git a/clients/filesystem-hadoop3-runtime/build.gradle.kts 
b/bundles/iceberg-aws-bundle/build.gradle.kts
similarity index 60%
copy from clients/filesystem-hadoop3-runtime/build.gradle.kts
copy to bundles/iceberg-aws-bundle/build.gradle.kts
index 6a98d621e5..10074b96e7 100644
--- a/clients/filesystem-hadoop3-runtime/build.gradle.kts
+++ b/bundles/iceberg-aws-bundle/build.gradle.kts
@@ -16,7 +16,6 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-
 import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
 
 plugins {
@@ -26,25 +25,29 @@ plugins {
 }
 
 dependencies {
-  implementation(project(":clients:filesystem-hadoop3")) {
-    exclude(group = "org.slf4j")
+  // used by Gravitino credential vending
+  implementation(libs.aws.policy) {
+    exclude("*")
   }
-  implementation(project(":clients:client-java-runtime", configuration = 
"shadow"))
-  implementation(libs.commons.lang3)
+  implementation(libs.iceberg.aws.bundle)
 }
 
-tasks.withType<ShadowJar>(ShadowJar::class.java) {
+tasks.withType(ShadowJar::class.java) {
   isZip64 = true
   configurations = listOf(project.configurations.runtimeClasspath.get())
   archiveClassifier.set("")
 
-  // Relocate dependencies to avoid conflicts
-  relocate("com.google", "org.apache.gravitino.shaded.com.google")
-  relocate("com.github.benmanes.caffeine", 
"org.apache.gravitino.shaded.com.github.benmanes.caffeine")
-  // relocate common lang3 package
-  relocate("org.apache.commons.lang3", 
"org.apache.gravitino.shaded.org.apache.commons.lang3")
-  relocate("org.apache.hc", "org.apache.gravitino.shaded.org.apache.hc")
-  relocate("org.checkerframework", 
"org.apache.gravitino.shaded.org.checkerframework")
+  dependencies {
+    exclude(dependency("org.slf4j:slf4j-api"))
+  }
+
+  // Iceberg AWS bundle includes Log4j (before 1.10.1), so exclude to avoid 
conflicts
+  // see https://github.com/apache/iceberg/pull/14225
+  exclude("org/apache/log4j/**")
+  exclude("org/apache/logging/log4j/**")
+  exclude("log4j.properties")
+  exclude("log4j2.xml")
+  exclude("log4j2.component.properties")
 
   mergeServiceFiles()
 }
diff --git a/clients/filesystem-hadoop3-runtime/build.gradle.kts 
b/bundles/iceberg-azure-bundle/build.gradle.kts
similarity index 60%
copy from clients/filesystem-hadoop3-runtime/build.gradle.kts
copy to bundles/iceberg-azure-bundle/build.gradle.kts
index 6a98d621e5..26092c1a45 100644
--- a/clients/filesystem-hadoop3-runtime/build.gradle.kts
+++ b/bundles/iceberg-azure-bundle/build.gradle.kts
@@ -16,7 +16,6 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-
 import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
 
 plugins {
@@ -25,26 +24,25 @@ plugins {
   alias(libs.plugins.shadow)
 }
 
+configurations.shadow {
+  // Force a newer version of ASM to support class files compiled with higher 
JDK versions.
+  resolutionStrategy.force("org.ow2.asm:asm:9.6", 
"org.ow2.asm:asm-commons:9.6")
+}
+
 dependencies {
-  implementation(project(":clients:filesystem-hadoop3")) {
-    exclude(group = "org.slf4j")
-  }
-  implementation(project(":clients:client-java-runtime", configuration = 
"shadow"))
-  implementation(libs.commons.lang3)
+  // The iceberg-azure-bundle already includes the dependencies
+  // required by Gravitino for credential vending.
+  implementation(libs.iceberg.azure.bundle)
 }
 
-tasks.withType<ShadowJar>(ShadowJar::class.java) {
+tasks.withType(ShadowJar::class.java) {
   isZip64 = true
   configurations = listOf(project.configurations.runtimeClasspath.get())
   archiveClassifier.set("")
 
-  // Relocate dependencies to avoid conflicts
-  relocate("com.google", "org.apache.gravitino.shaded.com.google")
-  relocate("com.github.benmanes.caffeine", 
"org.apache.gravitino.shaded.com.github.benmanes.caffeine")
-  // relocate common lang3 package
-  relocate("org.apache.commons.lang3", 
"org.apache.gravitino.shaded.org.apache.commons.lang3")
-  relocate("org.apache.hc", "org.apache.gravitino.shaded.org.apache.hc")
-  relocate("org.checkerframework", 
"org.apache.gravitino.shaded.org.checkerframework")
+  dependencies {
+    exclude(dependency("org.slf4j:slf4j-api"))
+  }
 
   mergeServiceFiles()
 }
diff --git a/clients/filesystem-hadoop3-runtime/build.gradle.kts 
b/bundles/iceberg-gcp-bundle/build.gradle.kts
similarity index 60%
copy from clients/filesystem-hadoop3-runtime/build.gradle.kts
copy to bundles/iceberg-gcp-bundle/build.gradle.kts
index 6a98d621e5..6ef87fbbc9 100644
--- a/clients/filesystem-hadoop3-runtime/build.gradle.kts
+++ b/bundles/iceberg-gcp-bundle/build.gradle.kts
@@ -16,7 +16,6 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-
 import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
 
 plugins {
@@ -26,25 +25,19 @@ plugins {
 }
 
 dependencies {
-  implementation(project(":clients:filesystem-hadoop3")) {
-    exclude(group = "org.slf4j")
-  }
-  implementation(project(":clients:client-java-runtime", configuration = 
"shadow"))
-  implementation(libs.commons.lang3)
+  implementation(libs.google.auth.credentials)
+  implementation(libs.google.auth.http)
+  implementation(libs.iceberg.gcp.bundle)
 }
 
-tasks.withType<ShadowJar>(ShadowJar::class.java) {
+tasks.withType(ShadowJar::class.java) {
   isZip64 = true
   configurations = listOf(project.configurations.runtimeClasspath.get())
   archiveClassifier.set("")
 
-  // Relocate dependencies to avoid conflicts
-  relocate("com.google", "org.apache.gravitino.shaded.com.google")
-  relocate("com.github.benmanes.caffeine", 
"org.apache.gravitino.shaded.com.github.benmanes.caffeine")
-  // relocate common lang3 package
-  relocate("org.apache.commons.lang3", 
"org.apache.gravitino.shaded.org.apache.commons.lang3")
-  relocate("org.apache.hc", "org.apache.gravitino.shaded.org.apache.hc")
-  relocate("org.checkerframework", 
"org.apache.gravitino.shaded.org.checkerframework")
+  dependencies {
+    exclude(dependency("org.slf4j:slf4j-api"))
+  }
 
   mergeServiceFiles()
 }
diff --git a/clients/filesystem-hadoop3-runtime/build.gradle.kts 
b/clients/filesystem-hadoop3-runtime/build.gradle.kts
index 6a98d621e5..0916f94a70 100644
--- a/clients/filesystem-hadoop3-runtime/build.gradle.kts
+++ b/clients/filesystem-hadoop3-runtime/build.gradle.kts
@@ -26,6 +26,10 @@ plugins {
 }
 
 dependencies {
+  implementation(project(":bundles:aliyun"))
+  implementation(project(":bundles:aws"))
+  implementation(project(":bundles:azure"))
+  implementation(project(":bundles:gcp"))
   implementation(project(":clients:filesystem-hadoop3")) {
     exclude(group = "org.slf4j")
   }
diff --git 
a/common/src/main/java/org/apache/gravitino/credential/CredentialProviderDelegator.java
 
b/common/src/main/java/org/apache/gravitino/credential/CredentialProviderDelegator.java
index d87b7880e6..91832c6ba4 100644
--- 
a/common/src/main/java/org/apache/gravitino/credential/CredentialProviderDelegator.java
+++ 
b/common/src/main/java/org/apache/gravitino/credential/CredentialProviderDelegator.java
@@ -93,7 +93,8 @@ public abstract class CredentialProviderDelegator<T extends 
Credential>
   @SuppressWarnings("unchecked")
   private CredentialGenerator<T> loadGenerator() {
     try {
-      Class<?> generatorClass = Class.forName(getGeneratorClassName());
+      ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
+      Class<?> generatorClass = Class.forName(getGeneratorClassName(), true, 
classLoader);
       Constructor<?> constructor = generatorClass.getDeclaredConstructor();
       constructor.setAccessible(true);
       return (CredentialGenerator<T>) constructor.newInstance();
diff --git a/dev/docker/gravitino/gravitino-dependency.sh 
b/dev/docker/gravitino/gravitino-dependency.sh
index 32429950f6..870acc70f2 100755
--- a/dev/docker/gravitino/gravitino-dependency.sh
+++ b/dev/docker/gravitino/gravitino-dependency.sh
@@ -48,21 +48,6 @@ download_gcs_connector() {
   rm "${temp_file}"
 }
 
-download_aliyun_jars() {
-  local aliyun_sdk_version="3.10.2"
-  local aliyun_sdk="aliyun_java_sdk_${aliyun_sdk_version}.zip"
-  local bundle_dir="${1}"
-  local target_dir="${2}"
-  if [ ! -f "${bundle_dir}/${aliyun_sdk}" ]; then
-    curl -L -s -o "${bundle_dir}/${aliyun_sdk}" 
https://gosspublic.alicdn.com/sdks/java/${aliyun_sdk}
-  fi
-  rm -rf "${bundle_dir}/aliyun"
-  unzip -q "${bundle_dir}/${aliyun_sdk}" -d "${bundle_dir}/aliyun"
-  cp 
"${bundle_dir}/aliyun/aliyun_java_sdk_${aliyun_sdk_version}/aliyun-sdk-oss-3.10.2.jar"
 ${target_dir}
-  cp 
"${bundle_dir}/aliyun/aliyun_java_sdk_${aliyun_sdk_version}/lib/hamcrest-core-1.1.jar"
 ${target_dir}
-  cp 
"${bundle_dir}/aliyun/aliyun_java_sdk_${aliyun_sdk_version}/lib/jdom2-2.0.6.jar"
 ${target_dir}
-}
-
 # Build the Gravitino project
 ${gravitino_home}/gradlew clean build -x test
 
@@ -96,21 +81,6 @@ cp "${gravitino_staging_dir}/${pg_driver}" 
"${gravitino_package_dir}/catalogs/la
 cp "${gravitino_staging_dir}/${pg_driver}" "${gravitino_iceberg_rest_dir}"
 cp "${gravitino_staging_dir}/${pg_driver}" "${gravitino_package_dir}/libs/"
 
-iceberg_version="1.10.0"
-iceberg_aws_bundle="iceberg-aws-bundle-${iceberg_version}.jar"
-wget 
"https://repo1.maven.org/maven2/org/apache/iceberg/iceberg-aws-bundle/${iceberg_version}/${iceberg_aws_bundle}";
 -O "${gravitino_staging_dir}/${iceberg_aws_bundle}"
-cp "${gravitino_staging_dir}/${iceberg_aws_bundle}" 
"${gravitino_iceberg_rest_dir}"
-
-iceberg_gcp_bundle="iceberg-gcp-bundle-${iceberg_version}.jar"
-wget 
"https://repo1.maven.org/maven2/org/apache/iceberg/iceberg-gcp-bundle/${iceberg_version}/${iceberg_gcp_bundle}";
 -O "${gravitino_staging_dir}/${iceberg_gcp_bundle}"
-cp "${gravitino_staging_dir}/${iceberg_gcp_bundle}" 
"${gravitino_iceberg_rest_dir}"
-
-iceberg_azure_bundle="iceberg-azure-bundle-${iceberg_version}.jar"
-wget 
"https://repo1.maven.org/maven2/org/apache/iceberg/iceberg-azure-bundle/${iceberg_version}/${iceberg_azure_bundle}";
 -O "${gravitino_staging_dir}/${iceberg_azure_bundle}"
-cp "${gravitino_staging_dir}/${iceberg_azure_bundle}" 
"${gravitino_iceberg_rest_dir}"
-
-download_aliyun_jars "${gravitino_staging_dir}" "${gravitino_iceberg_rest_dir}"
-
 echo "Finish downloading"
 
 mkdir -p "${gravitino_dir}/packages/gravitino/bin"
@@ -121,21 +91,18 @@ 
fileset_lib_dir="${gravitino_dir}/packages/gravitino/catalogs/fileset/libs"
 
 # Copy the Aliyun, AWS, GCP and Azure bundles to the Fileset catalog libs
 find ${gravitino_home}/bundles/aliyun-bundle/build/libs/ -name 
'gravitino-aliyun-bundle-*.jar' ! -name '*-empty.jar' -exec cp -v {} 
"${fileset_lib_dir}" \;
-find ${gravitino_home}/bundles/aws-bundle/build/libs/ -name 
'gravitino-aws-*.jar' ! -name '*-empty.jar' -exec cp -v {} "${fileset_lib_dir}" 
\;
-find ${gravitino_home}/bundles/gcp-bundle/build/libs/ -name 
'gravitino-gcp-*.jar' ! -name '*-empty.jar' -exec cp -v {} "${fileset_lib_dir}" 
\;
-find ${gravitino_home}/bundles/azure-bundle/build/libs/ -name 
'gravitino-azure-*.jar' ! -name '*-empty.jar' -exec cp -v {} 
"${fileset_lib_dir}" \;
+find ${gravitino_home}/bundles/aws-bundle/build/libs/ -name 
'gravitino-aws-bundle-*.jar' ! -name '*-empty.jar' -exec cp -v {} 
"${fileset_lib_dir}" \;
+find ${gravitino_home}/bundles/gcp-bundle/build/libs/ -name 
'gravitino-gcp-bundle-*.jar' ! -name '*-empty.jar' -exec cp -v {} 
"${fileset_lib_dir}" \;
+find ${gravitino_home}/bundles/azure-bundle/build/libs/ -name 
'gravitino-azure-bundle-*.jar' ! -name '*-empty.jar' -exec cp -v {} 
"${fileset_lib_dir}" \;
 
-find ${gravitino_home}/bundles/gcp/build/libs/ -name 'gravitino-gcp-*.jar' ! 
-name '*-empty.jar' -exec cp -v {} "${gravitino_iceberg_rest_dir}" \;
-find ${gravitino_home}/bundles/aws/build/libs/ -name 'gravitino-aws-*.jar' ! 
-name '*-empty.jar' -exec cp -v {} "${gravitino_iceberg_rest_dir}" \;
-find ${gravitino_home}/bundles/azure/build/libs/ -name 'gravitino-azure-*.jar' 
! -name '*-empty.jar' -exec cp -v {} "${gravitino_iceberg_rest_dir}" \;
-find ${gravitino_home}/bundles/aliyun/build/libs/ -name 
'gravitino-aliyun-*.jar' ! -name '*-empty.jar' -exec cp -v {} 
"${gravitino_iceberg_rest_dir}" \;
+# Copy the Aliyun, AWS, GCP and Azure bundles to the Iceberg REST server libs
+find ${gravitino_home}/bundles/iceberg-gcp-bundle/build/libs/ -name 
'gravitino-iceberg-gcp-bundle-*.jar' ! -name '*-empty.jar' -exec cp -v {} 
"${gravitino_iceberg_rest_dir}" \;
+find ${gravitino_home}/bundles/iceberg-aws-bundle/build/libs/ -name 
'gravitino-iceberg-aws-bundle-*.jar' ! -name '*-empty.jar' -exec cp -v {} 
"${gravitino_iceberg_rest_dir}" \;
+find ${gravitino_home}/bundles/iceberg-azure-bundle/build/libs/ -name 
'gravitino-iceberg-azure-bundle-*.jar' ! -name '*-empty.jar' -exec cp -v {} 
"${gravitino_iceberg_rest_dir}" \;
+find ${gravitino_home}/bundles/iceberg-aliyun-bundle/build/libs/ -name 
'gravitino-iceberg-aliyun-bundle-*.jar' ! -name '*-empty.jar' -exec cp -v {} 
"${gravitino_iceberg_rest_dir}" \;
 
 download_gcs_connector
 
-# Temporary rm log4j from Gravition to prevent class conflict with Iceberg AWS 
bundle jar
-rm -f ${gravitino_iceberg_rest_dir}/log4j-api-*.jar
-rm -f ${gravitino_iceberg_rest_dir}/log4j-core-*.jar
-
 # Keeping the container running at all times
 cat <<EOF >> "${gravitino_dir}/packages/gravitino/bin/gravitino.sh"
 
diff --git a/dev/docker/iceberg-rest-server/iceberg-rest-server-dependency.sh 
b/dev/docker/iceberg-rest-server/iceberg-rest-server-dependency.sh
index e0e2000d1f..96ce550e92 100755
--- a/dev/docker/iceberg-rest-server/iceberg-rest-server-dependency.sh
+++ b/dev/docker/iceberg-rest-server/iceberg-rest-server-dependency.sh
@@ -22,21 +22,6 @@ iceberg_rest_server_dir="$(dirname "${BASH_SOURCE-$0}")"
 iceberg_rest_server_dir="$(cd "${iceberg_rest_server_dir}">/dev/null; pwd)"
 gravitino_home="$(cd "${iceberg_rest_server_dir}/../../..">/dev/null; pwd)"
 
-download_aliyun_jars() {
-  local aliyun_sdk_version="3.10.2"
-  local aliyun_sdk="aliyun_java_sdk_${aliyun_sdk_version}.zip"
-  local bundle_dir="${1}"
-  local target_dir="${2}"
-  if [ ! -f "${bundle_dir}/${aliyun_sdk}" ]; then
-    curl -L -s -o "${bundle_dir}/${aliyun_sdk}" 
https://gosspublic.alicdn.com/sdks/java/${aliyun_sdk}
-  fi
-  rm -rf "${bundle_dir}/aliyun"
-  unzip -q "${bundle_dir}/${aliyun_sdk}" -d "${bundle_dir}/aliyun"
-  cp 
"${bundle_dir}/aliyun/aliyun_java_sdk_${aliyun_sdk_version}/aliyun-sdk-oss-3.10.2.jar"
 ${target_dir}
-  cp 
"${bundle_dir}/aliyun/aliyun_java_sdk_${aliyun_sdk_version}/lib/hamcrest-core-1.1.jar"
 ${target_dir}
-  cp 
"${bundle_dir}/aliyun/aliyun_java_sdk_${aliyun_sdk_version}/lib/jdom2-2.0.6.jar"
 ${target_dir}
-}
-
 # Prepare the Iceberg REST server packages
 cd ${gravitino_home}
 ./gradlew clean assembleIcebergRESTServer -x test
@@ -50,37 +35,18 @@ tar xfz gravitino-iceberg-rest-server-*.tar.gz
 cp -r gravitino-iceberg-rest-server*-bin 
${iceberg_rest_server_dir}/packages/gravitino-iceberg-rest-server
 
 cd ${gravitino_home}
-./gradlew :bundles:gcp:jar
-./gradlew :bundles:aws:jar
-./gradlew :bundles:azure:jar
-./gradlew :bundles:aliyun:jar
+./gradlew :bundles:iceberg-gcp-bundle:shadowJar
+./gradlew :bundles:iceberg-aws-bundle:shadowJar
+./gradlew :bundles:iceberg-azure-bundle:shadowJar
+./gradlew :bundles:iceberg-aliyun-bundle:shadowJar
 
 # prepare bundle jar
 cd ${iceberg_rest_server_dir}
 mkdir -p bundles
-find ${gravitino_home}/bundles/gcp/build/libs/ -name 'gravitino-gcp-*.jar' ! 
-name '*-empty.jar' -exec cp -v {} bundles/ \;
-find ${gravitino_home}/bundles/aws/build/libs/ -name 'gravitino-aws-*.jar' ! 
-name '*-empty.jar' -exec cp -v {} bundles/ \;
-find ${gravitino_home}/bundles/azure/build/libs/ -name 'gravitino-azure-*.jar' 
! -name '*-empty.jar' -exec cp -v {} bundles/ \;
-find ${gravitino_home}/bundles/aliyun/build/libs/ -name 
'gravitino-aliyun-*.jar' ! -name '*-empty.jar' -exec cp -v {} bundles/ \;
-
-iceberg_version="1.10.0"
-
-iceberg_gcp_bundle="iceberg-gcp-bundle-${iceberg_version}.jar"
-if [ ! -f "bundles/${iceberg_gcp_bundle}" ]; then
-  curl -L -s -o bundles/${iceberg_gcp_bundle} 
https://repo1.maven.org/maven2/org/apache/iceberg/iceberg-gcp-bundle/${iceberg_version}/${iceberg_gcp_bundle}
-fi
-
-iceberg_aws_bundle="iceberg-aws-bundle-${iceberg_version}.jar"
-if [ ! -f "bundles/${iceberg_aws_bundle}" ]; then
-  curl -L -s -o bundles/${iceberg_aws_bundle} 
https://repo1.maven.org/maven2/org/apache/iceberg/iceberg-aws-bundle/${iceberg_version}/${iceberg_aws_bundle}
-fi
-
-iceberg_azure_bundle="iceberg-azure-bundle-${iceberg_version}.jar"
-if [ ! -f "bundles/${iceberg_azure_bundle}" ]; then
-  curl -L -s -o bundles/${iceberg_azure_bundle} 
https://repo1.maven.org/maven2/org/apache/iceberg/iceberg-azure-bundle/${iceberg_version}/${iceberg_azure_bundle}
-fi
-
-download_aliyun_jars "bundles" 
"${iceberg_rest_server_dir}/packages/gravitino-iceberg-rest-server/libs/"
+find ${gravitino_home}/bundles/iceberg-gcp-bundle/build/libs/ -name 
'gravitino-iceberg-gcp-bundle-*.jar' ! -name '*-empty.jar' -exec cp -v {} 
bundles/ \;
+find ${gravitino_home}/bundles/iceberg-aws-bundle/build/libs/ -name 
'gravitino-iceberg-aws-bundle-*.jar' ! -name '*-empty.jar' -exec cp -v {} 
bundles/ \;
+find ${gravitino_home}/bundles/iceberg-azure-bundle/build/libs/ -name 
'gravitino-iceberg-azure-bundle-*.jar' ! -name '*-empty.jar' -exec cp -v {} 
bundles/ \;
+find ${gravitino_home}/bundles/iceberg-aliyun-bundle/build/libs/ -name 
'gravitino-iceberg-aliyun-bundle-*.jar' ! -name '*-empty.jar' -exec cp -v {} 
bundles/ \;
 
 # download jdbc driver
 if [ ! -f "bundles/sqlite-jdbc-3.42.0.0.jar" ]; then
@@ -89,9 +55,5 @@ fi
 
 cp bundles/*jar 
${iceberg_rest_server_dir}/packages/gravitino-iceberg-rest-server/libs/
 
-# Temporary rm log4j from Gravition to prevent class conflict with Iceberg AWS 
bundle jar
-rm -f 
${iceberg_rest_server_dir}/packages/gravitino-iceberg-rest-server/libs/log4j-api-*.jar
-rm -f 
${iceberg_rest_server_dir}/packages/gravitino-iceberg-rest-server/libs/log4j-core-*.jar
-
 cp start-iceberg-rest-server.sh 
${iceberg_rest_server_dir}/packages/gravitino-iceberg-rest-server/bin/
 cp rewrite_config.py 
${iceberg_rest_server_dir}/packages/gravitino-iceberg-rest-server/bin/
diff --git a/docs/fileset-catalog-with-adls.md 
b/docs/fileset-catalog-with-adls.md
index fe03a83a7c..31021ad631 100644
--- a/docs/fileset-catalog-with-adls.md
+++ b/docs/fileset-catalog-with-adls.md
@@ -260,7 +260,7 @@ fs.mkdirs(filesetPath);
 
 Similar to Spark configurations, you need to add ADLS (bundle) jars to the 
classpath according to your environment.
 
-If your wants to custom your hadoop version or there is already a hadoop 
version in your project, you can add the following dependencies to your 
`pom.xml`:
+If you want to custom your hadoop version or there is already a hadoop version 
in your project, you can add the following dependencies to your `pom.xml`:
 
 ```xml
   <dependency>
@@ -280,14 +280,12 @@ If your wants to custom your hadoop version or there is 
already a hadoop version
     <artifactId>gravitino-filesystem-hadoop3-runtime</artifactId>
     <version>${GRAVITINO_VERSION}</version>
   </dependency>
-
-  <dependency>
-    <groupId>org.apache.gravitino</groupId>
-    <artifactId>gravitino-azure</artifactId>
-    <version>${GRAVITINO_VERSION}</version>
-  </dependency>
 ```
 
+:::note
+Since version 1.1.0, the `gravitino-azure` JAR is no longer required, as it is 
now included in the `gravitino-filesystem-hadoop3-runtime` JAR.
+:::
+
 Or use the bundle jar with Hadoop environment if there is no Hadoop 
environment:
 
 ```xml
@@ -327,7 +325,7 @@ catalog_name = "your_adls_catalog"
 schema_name = "your_adls_schema"
 fileset_name = "your_adls_fileset"
 # JDK8 as follows, JDK17 will be slightly different, you need to add '--conf 
\"spark.driver.extraJavaOptions=--add-opens=java.base/sun.nio.ch=ALL-UNNAMED\" 
--conf 
\"spark.executor.extraJavaOptions=--add-opens=java.base/sun.nio.ch=ALL-UNNAMED\"'
 to the submit args.
-os.environ["PYSPARK_SUBMIT_ARGS"] = "--jars 
/path/to/gravitino-azure-{gravitino-version}.jar,/path/to/gravitino-filesystem-hadoop3-runtime-{gravitino-version}.jar,/path/to/hadoop-azure-3.3.4.jar,/path/to/azure-storage-7.0.1.jar,/path/to/wildfly-openssl-1.0.7.Final.jar
 --master local[1] pyspark-shell"
+os.environ["PYSPARK_SUBMIT_ARGS"] = "--jars 
/path/to/gravitino-filesystem-hadoop3-runtime-{gravitino-version}.jar,/path/to/hadoop-azure-3.3.4.jar,/path/to/azure-storage-7.0.1.jar,/path/to/wildfly-openssl-1.0.7.Final.jar
 --master local[1] pyspark-shell"
 spark = SparkSession.builder
     .appName("adls_fileset_test")
     .config("spark.hadoop.fs.AbstractFileSystem.gvfs.impl", 
"org.apache.gravitino.filesystem.hadoop.Gvfs")
@@ -374,9 +372,10 @@ os.environ["PYSPARK_SUBMIT_ARGS"] = (
 )
 ```
 
-- 
[`gravitino-azure-bundle-${gravitino-version}.jar`](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-azure-bundle)
 is the Gravitino ADLS jar with Hadoop environment(3.3.1), `hadoop-azure.jar` 
and all packages needed to access ADLS.
-- 
[`gravitino-azure-${gravitino-version}.jar`](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-azure)
 is a condensed version of the Gravitino ADLS bundle jar without Hadoop 
environment and `hadoop-azure.jar`.
-- `hadoop-azure-3.3.4.jar` and `azure-storage-7.0.1.jar` can be found in the 
Hadoop distribution in the `${HADOOP_HOME}/share/hadoop/tools/lib` directory.
+- 
[`gravitino-azure-bundle-${gravitino-version}.jar`](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-azure-bundle):
 A "fat" JAR that includes `gravitino-azure` functionality and all necessary 
dependencies like `hadoop-azure` (3.3.1) and other packages needed to access 
ADLS. Use this if your Spark environment doesn't have a pre-existing Hadoop 
setup.
+- 
[`gravitino-filesystem-hadoop3-runtime-${gravitino-version}.jar`](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-filesystem-hadoop3-runtime):
 A "fat" JAR that bundles Gravitino's virtual filesystem client and includes 
the functionality of `gravitino-azure`. It is required for accessing Gravitino 
filesets.
+- `hadoop-azure-3.3.4.jar`, `azure-storage-7.0.1.jar`, and 
`wildfly-openssl-1.0.7.Final.jar`: Standard Hadoop dependencies for ADLS 
access. If you are running in an existing Hadoop environment, you need to 
provide these JARs. They are typically located in the 
`${HADOOP_HOME}/share/hadoop/tools/lib` directory.
+- 
[`gravitino-azure-${gravitino-version}.jar`](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-azure):
 A "thin" JAR that only provides the Azure integration code. Its functionality 
is already included in the `gravitino-azure-bundle` and 
`gravitino-filesystem-hadoop3-runtime` JARs, so you do not need to add it as a 
direct dependency unless you want to manage all Hadoop and Azure dependencies 
manually.
 
 Please choose the correct jar according to your environment.
 
@@ -423,7 +422,7 @@ The following are examples of how to use the `hadoop fs` 
command to access the f
 
 2. Add the necessary jars to the Hadoop classpath.
 
-For ADLS, you need to add 
`gravitino-filesystem-hadoop3-runtime-${gravitino-version}.jar`, 
`gravitino-azure-${gravitino-version}.jar` and 
`hadoop-azure-${hadoop-version}.jar` located at 
`${HADOOP_HOME}/share/hadoop/tools/lib/` to the Hadoop classpath. 
+For ADLS, you need to add 
`gravitino-filesystem-hadoop3-runtime-${gravitino-version}.jar` and 
`hadoop-azure-${hadoop-version}.jar` located at 
`${HADOOP_HOME}/share/hadoop/tools/lib/` to the Hadoop classpath. 
 
 3. Run the following command to access the fileset:
 
diff --git a/docs/fileset-catalog-with-gcs.md b/docs/fileset-catalog-with-gcs.md
index 57f643a272..3379dac852 100644
--- a/docs/fileset-catalog-with-gcs.md
+++ b/docs/fileset-catalog-with-gcs.md
@@ -271,14 +271,12 @@ If your wants to custom your hadoop version or there is 
already a hadoop version
     <artifactId>gravitino-filesystem-hadoop3-runtime</artifactId>
     <version>${GRAVITINO_VERSION}</version>
   </dependency>
-
-  <dependency>
-    <groupId>org.apache.gravitino</groupId>
-    <artifactId>gravitino-gcp</artifactId>
-    <version>${GRAVITINO_VERSION}</version>
-  </dependency>
 ```
 
+:::note
+Since version 1.1.0, the `gravitino-gcp` JAR is no longer required, as it is 
now included in the `gravitino-filesystem-hadoop3-runtime` JAR.
+:::
+
 Or use the bundle jar with Hadoop environment if there is no Hadoop 
environment:
 
 ```xml
@@ -319,7 +317,7 @@ schema_name = "your_gcs_schema"
 fileset_name = "your_gcs_fileset"
 
 # JDK8 as follows, JDK17 will be slightly different, you need to add '--conf 
\"spark.driver.extraJavaOptions=--add-opens=java.base/sun.nio.ch=ALL-UNNAMED\" 
--conf 
\"spark.executor.extraJavaOptions=--add-opens=java.base/sun.nio.ch=ALL-UNNAMED\"'
 to the submit args.
-os.environ["PYSPARK_SUBMIT_ARGS"] = "--jars 
/path/to/gravitino-gcp-{gravitino-version}.jar,/path/to/gravitino-filesystem-hadoop3-runtime-{gravitino-version}.jar,/path/to/gcs-connector-hadoop3-2.2.22-shaded.jar
 --master local[1] pyspark-shell"
+os.environ["PYSPARK_SUBMIT_ARGS"] = "--jars 
/path/to/gravitino-filesystem-hadoop3-runtime-{gravitino-version}.jar,/path/to/gcs-connector-hadoop3-2.2.22-shaded.jar
 --master local[1] pyspark-shell"
 spark = SparkSession.builder
     .appName("gcs_fielset_test")
     .config("spark.hadoop.fs.AbstractFileSystem.gvfs.impl", 
"org.apache.gravitino.filesystem.hadoop.Gvfs")
@@ -365,8 +363,10 @@ os.environ["PYSPARK_SUBMIT_ARGS"] = (
 )
 ```
 
-- 
[`gravitino-gcp-bundle-${gravitino-version}.jar`](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-gcp-bundle)
 is the Gravitino GCP jar with Hadoop environment(3.3.1) and `gcs-connector`.
-- 
[`gravitino-gcp-${gravitino-version}.jar`](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-gcp)
 is a condensed version of the Gravitino GCP bundle jar without Hadoop 
environment and 
[`gcs-connector`](https://github.com/GoogleCloudDataproc/hadoop-connectors/releases/download/v2.2.22/gcs-connector-hadoop3-2.2.22-shaded.jar)
 
+- 
[`gravitino-gcp-bundle-${gravitino-version}.jar`](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-gcp-bundle):
 A "fat" JAR that includes `gravitino-gcp` functionality and all necessary 
dependencies like `gcs-connector` (hadoop3-2.2.22). Use this if your Spark 
environment doesn't have a pre-existing Hadoop setup.
+- 
[`gravitino-filesystem-hadoop3-runtime-${gravitino-version}.jar`](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-filesystem-hadoop3-runtime):
 A "fat" JAR that bundles Gravitino's virtual filesystem client and includes 
the functionality of `gravitino-gcp`. It is required for accessing Gravitino 
filesets.
+- 
[`gcs-connector-hadoop3-2.2.22-shaded.jar`](https://github.com/GoogleCloudDataproc/hadoop-connectors/releases/download/v2.2.22/gcs-connector-hadoop3-2.2.22-shaded.jar):
 Standard Hadoop dependency for GCS access. If you are running in an existing 
Hadoop environment, you need to provide this JAR.
+- 
[`gravitino-gcp-${gravitino-version}.jar`](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-gcp):
 A "thin" JAR that only provides the GCP integration code. Its functionality is 
already included in the `gravitino-gcp-bundle` and 
`gravitino-filesystem-hadoop3-runtime` JARs, so you do not need to add it as a 
direct dependency unless you want to manage all Hadoop and GCP dependencies 
manually.
 
 Please choose the correct jar according to your environment.
 
@@ -409,7 +409,7 @@ The following are examples of how to use the `hadoop fs` 
command to access the f
 
 2. Add the necessary jars to the Hadoop classpath.
 
-For GCS, you need to add 
`gravitino-filesystem-hadoop3-runtime-${gravitino-version}.jar`, 
`gravitino-gcp-${gravitino-version}.jar` and 
[`gcs-connector-hadoop3-2.2.22-shaded.jar`](https://github.com/GoogleCloudDataproc/hadoop-connectors/releases/download/v2.2.22/gcs-connector-hadoop3-2.2.22-shaded.jar)
 to Hadoop classpath.
+For GCS, you need to add 
`gravitino-filesystem-hadoop3-runtime-${gravitino-version}.jar` and 
[`gcs-connector-hadoop3-2.2.22-shaded.jar`](https://github.com/GoogleCloudDataproc/hadoop-connectors/releases/download/v2.2.22/gcs-connector-hadoop3-2.2.22-shaded.jar)
 to Hadoop classpath.
 
 3. Run the following command to access the fileset:
 
diff --git a/docs/fileset-catalog-with-oss.md b/docs/fileset-catalog-with-oss.md
index 471b16c11d..5bc1ce34ad 100644
--- a/docs/fileset-catalog-with-oss.md
+++ b/docs/fileset-catalog-with-oss.md
@@ -286,14 +286,12 @@ If your wants to custom your hadoop version or there is 
already a hadoop version
     <artifactId>gravitino-filesystem-hadoop3-runtime</artifactId>
     <version>${GRAVITINO_VERSION}</version>
   </dependency>
-
-  <dependency>
-    <groupId>org.apache.gravitino</groupId>
-    <artifactId>gravitino-aliyun</artifactId>
-    <version>${GRAVITINO_VERSION}</version>
-  </dependency>
 ```
 
+:::note
+Since version 1.1.0, the `gravitino-aliyun` JAR is no longer required, as it 
is now included in the `gravitino-filesystem-hadoop3-runtime` JAR.
+:::
+
 Or use the bundle jar with Hadoop environment if there is no Hadoop 
environment:
 
 ```xml
@@ -335,7 +333,7 @@ fileset_name = "your_oss_fileset"
 
 # JDK8 as follows, JDK17 will be slightly different, you need to add '--conf 
\"spark.driver.extraJavaOptions=--add-opens=java.base/sun.nio.ch=ALL-UNNAMED\" 
--conf 
\"spark.executor.extraJavaOptions=--add-opens=java.base/sun.nio.ch=ALL-UNNAMED\"'
 to the submit args.
 os.environ["PYSPARK_SUBMIT_ARGS"] = (
-    "--jars /path/to/gravitino-aliyun-{gravitino-version}.jar,"
+    "--jars "
     "/path/to/gravitino-filesystem-hadoop3-runtime-{gravitino-version}.jar,"
     "/path/to/aliyun-sdk-oss-3.13.0.jar,"
     "/path/to/hadoop-aliyun-3.3.4.jar,"
@@ -374,9 +372,10 @@ If your Spark **without Hadoop environment**, you can use 
the following code sni
 os.environ["PYSPARK_SUBMIT_ARGS"] = "--jars 
/path/to/gravitino-aliyun-bundle-{gravitino-version}.jar,/path/to/gravitino-filesystem-hadoop3-runtime-{gravitino-version}.jar,
 --master local[1] pyspark-shell"
 ```
 
-- 
[`gravitino-aliyun-bundle-${gravitino-version}.jar`](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-aliyun-bundle)
 is the Gravitino Aliyun jar with Hadoop environment(3.3.1) and `hadoop-oss` 
jar.
-- 
[`gravitino-aliyun-${gravitino-version}.jar`](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-aliyun)
 is a condensed version of the Gravitino Aliyun bundle jar without Hadoop 
environment and `hadoop-aliyun` jar.
--`hadoop-aliyun-3.3.4.jar`, `jdom2-2.0.6.jar`, and `aliyun-sdk-oss-3.13.0.jar` 
can be found in the Hadoop distribution in the 
`${HADOOP_HOME}/share/hadoop/tools/lib` directory.
+- 
[`gravitino-aliyun-bundle-${gravitino-version}.jar`](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-aliyun-bundle):
 A "fat" JAR that includes `gravitino-aliyun` functionality and all necessary 
dependencies like `hadoop-aliyun` (3.3.1) and `aliyun-sdk-oss`. Use this if 
your Spark environment doesn't have a pre-existing Hadoop setup.
+- 
[`gravitino-filesystem-hadoop3-runtime-${gravitino-version}.jar`](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-filesystem-hadoop3-runtime):
 A "fat" JAR that bundles Gravitino's virtual filesystem client and includes 
the functionality of `gravitino-aliyun`. It is required for accessing Gravitino 
filesets.
+- `hadoop-aliyun-3.3.4.jar`, `jdom2-2.0.6.jar`, and 
`aliyun-sdk-oss-3.13.0.jar`: Standard Hadoop dependencies for OSS access. If 
you are running in an existing Hadoop environment, you need to provide these 
JARs. They are typically located in the `${HADOOP_HOME}/share/hadoop/tools/lib` 
directory.
+- 
[`gravitino-aliyun-${gravitino-version}.jar`](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-aliyun):
 A "thin" JAR that only provides the Aliyun integration code. Its functionality 
is already included in the `gravitino-aliyun-bundle` and 
`gravitino-filesystem-hadoop3-runtime` JARs, so you do not need to add it as a 
direct dependency unless you want to manage all Hadoop and Aliyun dependencies 
manually.
 
 Please choose the correct jar according to your environment.
 
@@ -429,7 +428,7 @@ The following are examples of how to use the `hadoop fs` 
command to access the f
 
 2. Add the necessary jars to the Hadoop classpath.
 
-For OSS, you need to add 
`gravitino-filesystem-hadoop3-runtime-${gravitino-version}.jar`, 
`gravitino-aliyun-${gravitino-version}.jar` and 
`hadoop-aliyun-${hadoop-version}.jar` located at 
`${HADOOP_HOME}/share/hadoop/tools/lib/` to Hadoop classpath. 
+For OSS, you need to add 
`gravitino-filesystem-hadoop3-runtime-${gravitino-version}.jar` and 
`hadoop-aliyun-${hadoop-version}.jar` located at 
`${HADOOP_HOME}/share/hadoop/tools/lib/` to Hadoop classpath. 
 
 3. Run the following command to access the fileset:
 
diff --git a/docs/fileset-catalog-with-s3.md b/docs/fileset-catalog-with-s3.md
index 774fe9ab54..bcf34e03f9 100644
--- a/docs/fileset-catalog-with-s3.md
+++ b/docs/fileset-catalog-with-s3.md
@@ -288,14 +288,12 @@ Similar to Spark configurations, you need to add S3 
(bundle) jars to the classpa
     <artifactId>gravitino-filesystem-hadoop3-runtime</artifactId>
     <version>${GRAVITINO_VERSION}</version>
   </dependency>
-
-  <dependency>
-    <groupId>org.apache.gravitino</groupId>
-    <artifactId>gravitino-aws</artifactId>
-    <version>${GRAVITINO_VERSION}</version>
-  </dependency>
 ```
 
+:::note
+Since version 1.1.0, the `gravitino-aws` JAR is no longer required, as it is 
now included in the `gravitino-filesystem-hadoop3-runtime` JAR.
+:::
+
 Or use the bundle jar with Hadoop environment if there is no Hadoop 
environment:
 
 
@@ -337,7 +335,7 @@ schema_name = "your_s3_schema"
 fileset_name = "your_s3_fileset"
 
 # JDK8 as follows, JDK17 will be slightly different, you need to add '--conf 
\"spark.driver.extraJavaOptions=--add-opens=java.base/sun.nio.ch=ALL-UNNAMED\" 
--conf 
\"spark.executor.extraJavaOptions=--add-opens=java.base/sun.nio.ch=ALL-UNNAMED\"'
 to the submit args.
-os.environ["PYSPARK_SUBMIT_ARGS"] = "--jars 
/path/to/gravitino-aws-${gravitino-version}.jar,/path/to/gravitino-filesystem-hadoop3-runtime-${gravitino-version}-SNAPSHOT.jar,/path/to/hadoop-aws-3.3.4.jar,/path/to/aws-java-sdk-bundle-1.12.262.jar
 --master local[1] pyspark-shell"
+os.environ["PYSPARK_SUBMIT_ARGS"] = "--jars 
/path/to/gravitino-filesystem-hadoop3-runtime-${gravitino-version}-SNAPSHOT.jar,/path/to/hadoop-aws-3.3.4.jar,/path/to/aws-java-sdk-bundle-1.12.262.jar
 --master local[1] pyspark-shell"
 spark = SparkSession.builder
     .appName("s3_fileset_test")
     .config("spark.hadoop.fs.AbstractFileSystem.gvfs.impl", 
"org.apache.gravitino.filesystem.hadoop.Gvfs")
@@ -369,9 +367,10 @@ If your Spark **without Hadoop environment**, you can use 
the following code sni
 os.environ["PYSPARK_SUBMIT_ARGS"] = "--jars 
/path/to/gravitino-aws-bundle-${gravitino-version}.jar,/path/to/gravitino-filesystem-hadoop3-runtime-${gravitino-version}-SNAPSHOT.jar
 --master local[1] pyspark-shell"
 ```
 
-- 
[`gravitino-aws-bundle-${gravitino-version}.jar`](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-aws-bundle)
 is the Gravitino AWS jar with Hadoop environment(3.3.1) and `hadoop-aws` jar.
-- 
[`gravitino-aws-${gravitino-version}.jar`](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-aws)
 is a condensed version of the Gravitino AWS bundle jar without Hadoop 
environment and `hadoop-aws` jar.
-- `hadoop-aws-3.3.4.jar` and `aws-java-sdk-bundle-1.12.262.jar` can be found 
in the Hadoop distribution in the `${HADOOP_HOME}/share/hadoop/tools/lib` 
directory.
+- 
[`gravitino-aws-bundle-${gravitino-version}.jar`](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-aws-bundle):
 A "fat" JAR that includes `gravitino-aws` functionality and all necessary 
dependencies like `hadoop-aws` (3.3.1) and the `AWS SDK`. Use this if your 
Spark environment doesn't have a pre-existing Hadoop setup.
+- [`gravitino-filesystem-hadoop3-runtime-${gravitino-version}.jar`] 
(https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-filesystem-hadoop3-runtime):
 A "fat" JAR that bundles Gravitino's virtual filesystem client and includes 
the functionality of `gravitino-aws`. It is required for accessing Gravitino 
filesets.
+- `hadoop-aws-3.3.4.jar` and `aws-java-sdk-bundle-1.12.262.jar`: Standard 
Hadoop dependencies for S3 access. If you are running in an existing Hadoop 
environment, you need to provide these JARs. They are typically located in the 
`${HADOOP_HOME}/share/hadoop/tools/lib` directory.
+- 
[`gravitino-aws-${gravitino-version}.jar`](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-aws):
 A "thin" JAR that only provides the AWS integration code. Its functionality is 
already included in the `gravitino-aws-bundle` and 
`gravitino-filesystem-hadoop3-runtime` JARs, so you do not need to add it as a 
direct dependency unless you want to manage all Hadoop and AWS dependencies 
manually.
 
 Please choose the correct jar according to your environment.
 
@@ -424,7 +423,7 @@ The following are examples of how to use the `hadoop fs` 
command to access the f
 
 2. Add the necessary jars to the Hadoop classpath. 
 
-For S3, you need to add 
`gravitino-filesystem-hadoop3-runtime-${gravitino-version}.jar`, 
`gravitino-aws-${gravitino-version}.jar` and `hadoop-aws-${hadoop-version}.jar` 
located at `${HADOOP_HOME}/share/hadoop/tools/lib/` to Hadoop classpath. 
+For S3, you need to add 
`gravitino-filesystem-hadoop3-runtime-${gravitino-version}.jar` and 
`hadoop-aws-${hadoop-version}.jar` located at 
`${HADOOP_HOME}/share/hadoop/tools/lib/` to Hadoop classpath. 
 
 3. Run the following command to access the fileset:
 
diff --git a/docs/iceberg-rest-service.md b/docs/iceberg-rest-service.md
index c809faef36..7acb4ada26 100644
--- a/docs/iceberg-rest-service.md
+++ b/docs/iceberg-rest-service.md
@@ -299,12 +299,10 @@ For other Iceberg s3 properties not managed by Gravitino 
like `s3.sse.type`, you
 
 Please refer to [S3 
credentials](./security/credential-vending.md#s3-credentials) for credential 
related configurations.
 
-:::caution
-To resolve Log4j class conflict issues that may arise when using Iceberg AWS 
1.10 bundle jars alongside the Gravitino Iceberg REST server, it is recommended 
to remove the Log4j JAR files (specifically log4j-core and log4j-api) from the 
`iceberg-rest-server/libs` directory.
-:::
-
 :::info
-To configure the JDBC catalog backend, set the 
`gravitino.iceberg-rest.warehouse` parameter to 
`s3://{bucket_name}/${prefix_name}`. For the Hive catalog backend, set 
`gravitino.iceberg-rest.warehouse` to `s3a://{bucket_name}/${prefix_name}`. 
Additionally, download the [Iceberg AWS 
bundle](https://mvnrepository.com/artifact/org.apache.iceberg/iceberg-aws-bundle)
 and place it in the classpath of Iceberg REST server.
+ - For the JDBC catalog backend, set the `gravitino.iceberg-rest.warehouse` 
parameter to `s3://{bucket_name}/${prefix_name}`. 
+ - For the Hive catalog backend, set `gravitino.iceberg-rest.warehouse` to 
`s3a://{bucket_name}/${prefix_name}`. 
+ - Additionally, download the [Gravitino Iceberg AWS 
bundle](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-iceberg-aws-bundle)
 and place it in the classpath of Iceberg REST server.
 :::
 
 #### OSS configuration
@@ -319,11 +317,7 @@ For other Iceberg OSS properties not managed by Gravitino 
like `client.security-
 
 Please refer to [OSS 
credentials](./security/credential-vending.md#oss-credentials) for credential 
related configurations.
 
-Additionally, Iceberg doesn't provide Iceberg Aliyun bundle jar which contains 
OSS packages, there are two alternatives to use OSS packages:
-1. Use [Gravitino Aliyun bundle jar with hadoop 
packages](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-aliyun-bundle).
-2. Use [Aliyun JAVA 
SDK](https://gosspublic.alicdn.com/sdks/java/aliyun_java_sdk_3.10.2.zip) and 
extract `aliyun-sdk-oss-3.10.2.jar`, `hamcrest-core-1.1.jar`, `jdom2-2.0.6.jar` 
jars.
-
-Please place the above jars in the classpath of Iceberg REST server, please 
refer to [server management](#server-management) for classpath details.
+Additionally, please download the [Gravitino Iceberg Aliyun bundle 
jar](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-iceberg-aliyun-bundle)
 and place it in the classpath of Iceberg REST server, you can refer to [server 
management](#server-management) for classpath details.
 
 :::info
 Please set the `gravitino.iceberg-rest.warehouse` parameter to 
`oss://{bucket_name}/${prefix_name}`.
@@ -346,7 +340,7 @@ Please ensure that the credential file can be accessed by 
the Gravitino server.
 :::
 
 :::info
-Please set `gravitino.iceberg-rest.warehouse` to 
`gs://{bucket_name}/${prefix_name}`, and download [Iceberg gcp 
bundle](https://mvnrepository.com/artifact/org.apache.iceberg/iceberg-gcp-bundle)
 and place it to the classpath of Gravitino Iceberg REST server, 
`iceberg-rest-server/libs` for the auxiliary server, `libs` for the standalone 
server.
+Please set `gravitino.iceberg-rest.warehouse` to 
`gs://{bucket_name}/${prefix_name}`, and download [Gravitino Iceberg gcp 
bundle](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-iceberg-gcp-bundle)
 and place it to the classpath of Gravitino Iceberg REST server, 
`iceberg-rest-server/libs` for the auxiliary server, `libs` for the standalone 
server.
 :::
 
 #### ADLS
@@ -360,7 +354,7 @@ For other Iceberg ADLS properties not managed by Gravitino 
like `adls.read.block
 Please refer to [ADLS 
credentials](./security/credential-vending.md#adls-credentials) for credential 
related configurations.
 
 :::info
-Please set `gravitino.iceberg-rest.warehouse` to 
`abfs[s]://{container-name}@{storage-account-name}.dfs.core.windows.net/{path}`,
 and download the [Iceberg Azure 
bundle](https://mvnrepository.com/artifact/org.apache.iceberg/iceberg-azure-bundle)
 and place it in the classpath of Iceberg REST server.
+Please set `gravitino.iceberg-rest.warehouse` to 
`abfs[s]://{container-name}@{storage-account-name}.dfs.core.windows.net/{path}`,
 and download the [Gravitino Iceberg Azure 
bundle](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-iceberg-azure-bundle)
 and place it in the classpath of Iceberg REST server.
 :::
 
 #### HDFS configuration
diff --git a/docs/lakehouse-iceberg-catalog.md 
b/docs/lakehouse-iceberg-catalog.md
index 92564118f0..90729ecdb4 100644
--- a/docs/lakehouse-iceberg-catalog.md
+++ b/docs/lakehouse-iceberg-catalog.md
@@ -83,12 +83,14 @@ Supports using static access-key-id and secret-access-key 
to access S3 data.
 
 For other Iceberg s3 properties not managed by Gravitino like `s3.sse.type`, 
you could config it directly by `gravitino.bypass.s3.sse.type`.
 
-:::caution
-To resolve Log4j class conflict issues that may arise when using Iceberg AWS 
1.9 bundle jars alongside the Gravitino server, it is recommended to remove the 
Log4j JAR files (specifically log4j-core and log4j-api) from the 
`catalogs/lakehouse-iceberg/libs` directory.
+:::info
+ - For the JDBC catalog backend, set the `warehouse` parameter to 
`s3://{bucket_name}/${prefix_name}`. 
+ - For the Hive catalog backend, set `warehouse` to 
`s3a://{bucket_name}/${prefix_name}`. 
+ - Additionally, download the [Gravitino Iceberg AWS 
bundle](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-iceberg-aws-bundle)
 and place it in the `catalogs/lakehouse-iceberg/libs/` directory.
 :::
 
-:::info
-To configure the JDBC catalog backend, set the `warehouse` parameter to 
`s3://{bucket_name}/${prefix_name}`. For the Hive catalog backend, set 
`warehouse` to `s3a://{bucket_name}/${prefix_name}`. Additionally, download the 
[Iceberg AWS 
bundle](https://mvnrepository.com/artifact/org.apache.iceberg/iceberg-aws-bundle)
 and place it in the `catalogs/lakehouse-iceberg/libs/` directory.
+:::note
+Since Gravitino 1.1.0, the Gravitino Iceberg AWS bundle jar has already 
included the Iceberg AWS bundle jar, no need to download and include it 
separately.
 :::
 
 #### OSS
@@ -105,7 +107,11 @@ Gravitino Iceberg REST service supports using static 
access-key-id and secret-ac
 For other Iceberg OSS properties not managed by Gravitino like 
`client.security-token`, you could config it directly by 
`gravitino.bypass.client.security-token`.
 
 :::info
-Please set the `warehouse` parameter to `oss://{bucket_name}/${prefix_name}`. 
Additionally, download the [Aliyun OSS 
SDK](https://gosspublic.alicdn.com/sdks/java/aliyun_java_sdk_3.10.2.zip) and 
copy `aliyun-sdk-oss-3.10.2.jar`, `hamcrest-core-1.1.jar`, `jdom2-2.0.6.jar` in 
the `catalogs/lakehouse-iceberg/libs/` directory.
+Please set the `warehouse` parameter to `oss://{bucket_name}/${prefix_name}`. 
Additionally, download the [Gravitino Iceberg Aliyun 
bundle](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-iceberg-aliyun-bundle)
 and place it in the `catalogs/lakehouse-iceberg/libs/` directory.
+:::
+
+:::note
+Since Gravitino 1.1.0, the Gravitino Iceberg aliyun bundle jar has already 
included the Iceberg aliyun necessary dependency jars, no need to download and 
include them separately.
 :::
 
 #### GCS
@@ -121,7 +127,11 @@ For other Iceberg GCS properties not managed by Gravitino 
like `gcs.project-id`,
 Please make sure the credential file is accessible by Gravitino, like using 
`export 
GOOGLE_APPLICATION_CREDENTIALS=/xx/application_default_credentials.json` before 
Gravitino server is started.
 
 :::info
-Please set `warehouse` to `gs://{bucket_name}/${prefix_name}`, and download 
[Iceberg GCP bundle 
jar](https://mvnrepository.com/artifact/org.apache.iceberg/iceberg-gcp-bundle) 
and place it to `catalogs/lakehouse-iceberg/libs/`.
+Please set `warehouse` to `gs://{bucket_name}/${prefix_name}`, and download 
[Gravitino Iceberg GCP bundle 
jar](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-iceberg-gcp-bundle)
 and place it to `catalogs/lakehouse-iceberg/libs/`.
+:::
+
+:::note
+Since Gravitino 1.1.0, the Gravitino Iceberg GCP bundle jar has already 
included the Iceberg GCP bundle jar, no need to download and include it 
separately.
 :::
 
 #### ADLS
@@ -137,7 +147,11 @@ Supports using Azure account name and secret key to access 
ADLS data.
 For other Iceberg ADLS properties not managed by Gravitino like 
`adls.read.block-size-bytes`, you could config it directly by 
`gravitino.iceberg-rest.adls.read.block-size-bytes`.
 
 :::info
-Please set `warehouse` to 
`abfs[s]://{container-name}@{storage-account-name}.dfs.core.windows.net/{path}`,
 and download the [Iceberg Azure 
bundle](https://mvnrepository.com/artifact/org.apache.iceberg/iceberg-azure-bundle)
 and place it to `catalogs/lakehouse-iceberg/libs/`.
+Please set `warehouse` to 
`abfs[s]://{container-name}@{storage-account-name}.dfs.core.windows.net/{path}`,
 and download the [Gravitino Iceberg Azure 
bundle](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-iceberg-azure-bundle)
 and place it to `catalogs/lakehouse-iceberg/libs/`.
+:::
+
+:::note
+Since Gravitino 1.1.0, the Gravitino Iceberg Azure bundle jar has already 
included the Iceberg Azure bundle jar, no need to download and include it 
separately.
 :::
 
 #### Other storages
diff --git a/docs/security/credential-vending.md 
b/docs/security/credential-vending.md
index 28cb71da43..21bcdfef74 100644
--- a/docs/security/credential-vending.md
+++ b/docs/security/credential-vending.md
@@ -159,7 +159,7 @@ Gravitino supports custom credentials, you can implement 
the `org.apache.graviti
 
 ## Deployment
 
-Besides setting credentials related configuration, please download Gravitino 
cloud bundle jar and place it in the classpath of Iceberg REST server or Hadoop 
catalog.
+Besides setting credentials related configuration, please download the related 
cloud bundle jar and place it in the classpath of Iceberg REST server or 
Fileset catalog.
 
 For Fileset catalog, please use Gravitino cloud bundle jar with Hadoop and 
cloud packages:
 
@@ -168,15 +168,15 @@ For Fileset catalog, please use Gravitino cloud bundle 
jar with Hadoop and cloud
 - [Gravitino GCP bundle jar with Hadoop and cloud 
packages](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-gcp-bundle)
 - [Gravitino Azure bundle jar with Hadoop and cloud 
packages](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-azure-bundle)
 
-For Iceberg REST catalog server, please use the Gravitino cloud bundle jar 
without Hadoop and cloud packages. Additionally, download the corresponding 
Iceberg cloud packages.
+For Iceberg REST catalog server, please download the corresponding Gravitino 
cloud packages.
 
-- [Gravitino AWS bundle jar without Hadoop and cloud 
packages](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-aws)
-- [Gravitino Aliyun bundle jar without Hadoop and cloud 
packages](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-aliyun)
-- [Gravitino GCP bundle jar without Hadoop and cloud 
packages](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-gcp)
-- [Gravitino Azure bundle jar without Hadoop and cloud 
packages](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-azure)
+- [Gravitino Iceberg AWS bundle 
JAR](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-iceberg-aws-bundle)
+- [Gravitino Iceberg GCP bundle 
JAR](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-iceberg-aliyun-bundle)
+- [Gravitino Iceberg Aliyun bundle 
JAR](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-iceberg-gcp-bundle)
+- [Gravitino Iceberg Azure bundle 
JAR](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-iceberg-azure-bundle)
 
 :::note
-For OSS, Iceberg doesn't provide Iceberg Aliyun bundle jar which contains OSS 
packages, you could provide the OSS jar by yourself or use [Gravitino Aliyun 
bundle jar with Hadoop and cloud 
packages](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-aliyun-bundle),
 please refer to [OSS 
configuration](../iceberg-rest-service.md#oss-configuration) for more details.
+Since Gravitino 1.1.0, the above Gravitino Iceberg cloud bundle jars have 
already included the Iceberg cloud bundle jars, no need to download and include 
them separately.
 :::
 
 The classpath of the server:
@@ -190,7 +190,7 @@ The classpath of the server:
 
 Suppose the Iceberg table data is stored in S3, follow the steps below:
 
-1. Download the [Gravitino AWS bundle jar without hadoop 
packages](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-aws),
 and place it to the classpath of Iceberg REST server.
+1. Download the [Gravitino Iceberg AWS bundle 
JAR](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-iceberg-aws-bundle),
 and place it in the classpath of Iceberg REST server.
 
 2. Add s3 token credential configurations.
 
@@ -204,7 +204,7 @@ gravitino.iceberg-rest.s3-region = {region_name}
 gravitino.iceberg-rest.s3-role-arn = {role_arn}
 ```
 
-3. Exploring the Iceberg table with Spark client with credential vending 
enabled.
+3. Exploring the Iceberg table with a Spark client with credential vending 
enabled.
 
 ```shell
 ./bin/spark-sql -v \
diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml
index 4d650636eb..84b47ab09d 100644
--- a/gradle/libs.versions.toml
+++ b/gradle/libs.versions.toml
@@ -120,6 +120,7 @@ datanucleus-jdo = "3.2.0-m3"
 hudi = "0.15.0"
 google-auth = "1.28.0"
 aliyun-credentials = "0.3.12"
+aliyun-sdk-oss = "3.10.2"
 openlineage = "1.29.0"
 jcstress = "0.8.15"
 jmh-plugin = "0.7.3"
@@ -299,6 +300,7 @@ google-auth-http = { group = "com.google.auth", name = 
"google-auth-library-oaut
 google-auth-credentials = { group = "com.google.auth", name = 
"google-auth-library-credentials", version.ref = "google-auth" }
 
 aliyun-credentials-sdk = { group='com.aliyun', name='credentials-java', 
version.ref='aliyun-credentials' }
+aliyun-sdk-oss = { module = "com.aliyun.oss:aliyun-sdk-oss", version.ref = 
"aliyun-sdk-oss" }
 flinkjdbc = {group='org.apache.flink',name='flink-connector-jdbc', 
version.ref='flinkjdbc'}
 concurrent-trees = { group = "com.googlecode.concurrent-trees", name = 
"concurrent-trees", version.ref = "concurrent-trees" }
 jcasbin = { group='org.casbin', name='jcasbin', version.ref="jcasbin" }
diff --git a/iceberg/iceberg-rest-server/build.gradle.kts 
b/iceberg/iceberg-rest-server/build.gradle.kts
index 62c8f63f41..25db4d9db8 100644
--- a/iceberg/iceberg-rest-server/build.gradle.kts
+++ b/iceberg/iceberg-rest-server/build.gradle.kts
@@ -32,6 +32,10 @@ val scalaCollectionCompatVersion: String = 
libs.versions.scala.collection.compat
 
 dependencies {
   implementation(project(":api"))
+  implementation(project(":bundles:aliyun"))
+  implementation(project(":bundles:aws"))
+  implementation(project(":bundles:azure"))
+  implementation(project(":bundles:gcp"))
   implementation(project(":catalogs:catalog-common"))
   implementation(project(":clients:client-java"))
   implementation(project(":core")) {
@@ -70,11 +74,10 @@ dependencies {
   annotationProcessor(libs.lombok)
   compileOnly(libs.lombok)
 
-  // Iceberg doesn't provide Aliyun bundle jar, use Gravitino Aliyun bundle to 
provide OSS packages
-  testImplementation(project(":bundles:aliyun-bundle"))
-  testImplementation(project(":bundles:aws", configuration = "shadow"))
-  testImplementation(project(":bundles:gcp", configuration = "shadow"))
-  testImplementation(project(":bundles:azure", configuration = "shadow"))
+  testImplementation(project(":bundles:iceberg-aliyun-bundle"))
+  testImplementation(project(":bundles:iceberg-aws-bundle"))
+  testImplementation(project(":bundles:iceberg-gcp-bundle"))
+  testImplementation(project(":bundles:iceberg-azure-bundle"))
   testImplementation(project(":integration-test-common", "testArtifacts"))
   testImplementation(project(":server"))
 
@@ -91,11 +94,6 @@ dependencies {
   testImplementation(libs.h2db)
   testImplementation(libs.mysql.driver)
   testImplementation(libs.postgresql.driver)
-  testImplementation(libs.iceberg.aws.bundle)
-  testImplementation(libs.iceberg.gcp.bundle)
-  testImplementation(libs.iceberg.azure.bundle) {
-    exclude("com.google.guava", "guava")
-  }
   testImplementation(libs.jersey.test.framework.core) {
     exclude(group = "org.junit.jupiter")
   }
diff --git 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTADLSTokenIT.java
 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTADLSTokenIT.java
index b4c70a3829..1305c5af69 100644
--- 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTADLSTokenIT.java
+++ 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTADLSTokenIT.java
@@ -19,7 +19,6 @@
 
 package org.apache.gravitino.iceberg.integration.test;
 
-import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
 import org.apache.gravitino.abs.credential.ADLSLocationUtils;
@@ -28,7 +27,6 @@ import org.apache.gravitino.credential.ADLSTokenCredential;
 import org.apache.gravitino.credential.CredentialConstants;
 import org.apache.gravitino.iceberg.common.IcebergConfig;
 import org.apache.gravitino.integration.test.util.BaseIT;
-import org.apache.gravitino.integration.test.util.DownloaderUtils;
 import org.apache.gravitino.integration.test.util.ITUtils;
 import org.apache.gravitino.storage.AzureProperties;
 import org.junit.jupiter.api.Assertions;
@@ -67,12 +65,6 @@ public class IcebergRESTADLSTokenIT extends 
IcebergRESTJdbcCatalogIT {
     if (ITUtils.isEmbedded()) {
       return;
     }
-    try {
-      downloadIcebergAzureBundleJar();
-    } catch (IOException e) {
-      LOG.warn("Download Iceberg Azure bundle jar failed,", e);
-      throw new RuntimeException(e);
-    }
     copyAzureBundleJar();
   }
 
@@ -116,21 +108,10 @@ public class IcebergRESTADLSTokenIT extends 
IcebergRESTJdbcCatalogIT {
     return configMap;
   }
 
-  private void downloadIcebergAzureBundleJar() throws IOException {
-    String icebergBundleJarUri =
-        String.format(
-            "https://repo1.maven.org/maven2/org/apache/iceberg/";
-                + "iceberg-azure-bundle/%s/iceberg-azure-bundle-%s.jar",
-            ITUtils.icebergVersion(), ITUtils.icebergVersion());
-    String gravitinoHome = System.getenv("GRAVITINO_HOME");
-    String targetDir = String.format("%s/iceberg-rest-server/libs/", 
gravitinoHome);
-    DownloaderUtils.downloadFile(icebergBundleJarUri, targetDir);
-  }
-
   private void copyAzureBundleJar() {
     String gravitinoHome = System.getenv("GRAVITINO_HOME");
     String targetDir = String.format("%s/iceberg-rest-server/libs/", 
gravitinoHome);
-    BaseIT.copyBundleJarsToDirectory("azure", targetDir);
+    BaseIT.copyBundleJarsToDirectory("iceberg-azure-bundle", targetDir);
   }
 
   @Test
diff --git 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTAzureAccountKeyIT.java
 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTAzureAccountKeyIT.java
index e74a8db677..3b5c7fff97 100644
--- 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTAzureAccountKeyIT.java
+++ 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTAzureAccountKeyIT.java
@@ -19,7 +19,6 @@
 
 package org.apache.gravitino.iceberg.integration.test;
 
-import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
 import org.apache.gravitino.catalog.lakehouse.iceberg.IcebergConstants;
@@ -27,7 +26,6 @@ import 
org.apache.gravitino.credential.AzureAccountKeyCredential;
 import org.apache.gravitino.credential.CredentialConstants;
 import org.apache.gravitino.iceberg.common.IcebergConfig;
 import org.apache.gravitino.integration.test.util.BaseIT;
-import org.apache.gravitino.integration.test.util.DownloaderUtils;
 import org.apache.gravitino.integration.test.util.ITUtils;
 import org.apache.gravitino.storage.AzureProperties;
 import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
@@ -57,12 +55,6 @@ public class IcebergRESTAzureAccountKeyIT extends 
IcebergRESTJdbcCatalogIT {
     if (ITUtils.isEmbedded()) {
       return;
     }
-    try {
-      downloadIcebergAzureBundleJar();
-    } catch (IOException e) {
-      LOG.warn("Download Iceberg Azure bundle jar failed,", e);
-      throw new RuntimeException(e);
-    }
     copyAzureBundleJar();
   }
 
@@ -99,20 +91,9 @@ public class IcebergRESTAzureAccountKeyIT extends 
IcebergRESTJdbcCatalogIT {
     return configMap;
   }
 
-  private void downloadIcebergAzureBundleJar() throws IOException {
-    String icebergBundleJarUri =
-        String.format(
-            "https://repo1.maven.org/maven2/org/apache/iceberg/";
-                + "iceberg-azure-bundle/%s/iceberg-azure-bundle-%s.jar",
-            ITUtils.icebergVersion(), ITUtils.icebergVersion());
-    String gravitinoHome = System.getenv("GRAVITINO_HOME");
-    String targetDir = String.format("%s/iceberg-rest-server/libs/", 
gravitinoHome);
-    DownloaderUtils.downloadFile(icebergBundleJarUri, targetDir);
-  }
-
   private void copyAzureBundleJar() {
     String gravitinoHome = System.getenv("GRAVITINO_HOME");
     String targetDir = String.format("%s/iceberg-rest-server/libs/", 
gravitinoHome);
-    BaseIT.copyBundleJarsToDirectory("azure", targetDir);
+    BaseIT.copyBundleJarsToDirectory("iceberg-azure-bundle", targetDir);
   }
 }
diff --git 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTGCSIT.java
 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTGCSIT.java
index 1ada58189e..4c97810a4f 100644
--- 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTGCSIT.java
+++ 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTGCSIT.java
@@ -19,7 +19,6 @@
 
 package org.apache.gravitino.iceberg.integration.test;
 
-import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
 import org.apache.gravitino.catalog.lakehouse.iceberg.IcebergConstants;
@@ -27,7 +26,6 @@ import org.apache.gravitino.credential.CredentialConstants;
 import org.apache.gravitino.credential.GCSTokenCredential;
 import org.apache.gravitino.iceberg.common.IcebergConfig;
 import org.apache.gravitino.integration.test.util.BaseIT;
-import org.apache.gravitino.integration.test.util.DownloaderUtils;
 import org.apache.gravitino.integration.test.util.ITUtils;
 import org.apache.gravitino.storage.GCSProperties;
 import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
@@ -49,12 +47,6 @@ public class IcebergRESTGCSIT extends 
IcebergRESTJdbcCatalogIT {
     if (ITUtils.isEmbedded()) {
       return;
     }
-
-    try {
-      downloadIcebergBundleJar();
-    } catch (IOException e) {
-      throw new RuntimeException(e);
-    }
     copyGCSBundleJar();
   }
 
@@ -89,17 +81,6 @@ public class IcebergRESTGCSIT extends 
IcebergRESTJdbcCatalogIT {
   private void copyGCSBundleJar() {
     String gravitinoHome = System.getenv("GRAVITINO_HOME");
     String targetDir = String.format("%s/iceberg-rest-server/libs/", 
gravitinoHome);
-    BaseIT.copyBundleJarsToDirectory("gcp", targetDir);
-  }
-
-  private void downloadIcebergBundleJar() throws IOException {
-    String icebergBundleJarUri =
-        String.format(
-            "https://repo1.maven.org/maven2/org/apache/iceberg/";
-                + "iceberg-gcp-bundle/%s/iceberg-gcp-bundle-%s.jar",
-            ITUtils.icebergVersion(), ITUtils.icebergVersion());
-    String gravitinoHome = System.getenv("GRAVITINO_HOME");
-    String targetDir = String.format("%s/iceberg-rest-server/libs/", 
gravitinoHome);
-    DownloaderUtils.downloadFile(icebergBundleJarUri, targetDir);
+    BaseIT.copyBundleJarsToDirectory("iceberg-gcp-bundle", targetDir);
   }
 }
diff --git 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTOSSIT.java
 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTOSSIT.java
index a867071dda..28e35c2657 100644
--- 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTOSSIT.java
+++ 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTOSSIT.java
@@ -19,7 +19,6 @@
 
 package org.apache.gravitino.iceberg.integration.test;
 
-import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
 import org.apache.gravitino.catalog.lakehouse.iceberg.IcebergConstants;
@@ -27,7 +26,6 @@ import org.apache.gravitino.credential.CredentialConstants;
 import org.apache.gravitino.credential.OSSTokenCredential;
 import org.apache.gravitino.iceberg.common.IcebergConfig;
 import org.apache.gravitino.integration.test.util.BaseIT;
-import org.apache.gravitino.integration.test.util.DownloaderUtils;
 import org.apache.gravitino.integration.test.util.ITUtils;
 import org.apache.gravitino.storage.OSSProperties;
 import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
@@ -61,12 +59,6 @@ public class IcebergRESTOSSIT extends 
IcebergRESTJdbcCatalogIT {
     if (ITUtils.isEmbedded()) {
       return;
     }
-    try {
-      downloadIcebergForAliyunJar();
-    } catch (IOException e) {
-      LOG.warn("Download Iceberg Aliyun bundle jar failed,", e);
-      throw new RuntimeException(e);
-    }
     copyAliyunOSSJar();
   }
 
@@ -112,22 +104,9 @@ public class IcebergRESTOSSIT extends 
IcebergRESTJdbcCatalogIT {
     return configMap;
   }
 
-  private void downloadIcebergForAliyunJar() throws IOException {
-    String icebergBundleJarUri =
-        String.format(
-            "https://repo1.maven.org/maven2/org/apache/iceberg/";
-                + "iceberg-aliyun/%s/iceberg-aliyun-%s.jar",
-            ITUtils.icebergVersion(), ITUtils.icebergVersion());
-    String gravitinoHome = System.getenv("GRAVITINO_HOME");
-    String targetDir = String.format("%s/iceberg-rest-server/libs/", 
gravitinoHome);
-    DownloaderUtils.downloadFile(icebergBundleJarUri, targetDir);
-  }
-
   private void copyAliyunOSSJar() {
     String gravitinoHome = System.getenv("GRAVITINO_HOME");
     String targetDir = String.format("%s/iceberg-rest-server/libs/", 
gravitinoHome);
-    // Iceberg doesn't provide Iceberg Aliyun bundle jar, so use Gravitino 
aliyun bundle to provide
-    // OSS packages.
-    BaseIT.copyBundleJarsToDirectory("aliyun-bundle", targetDir);
+    BaseIT.copyBundleJarsToDirectory("iceberg-aliyun-bundle", targetDir);
   }
 }
diff --git 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTOSSSecretIT.java
 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTOSSSecretIT.java
index 38239bb1d9..5a8f93cff8 100644
--- 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTOSSSecretIT.java
+++ 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTOSSSecretIT.java
@@ -19,7 +19,6 @@
 
 package org.apache.gravitino.iceberg.integration.test;
 
-import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
 import org.apache.gravitino.catalog.lakehouse.iceberg.IcebergConstants;
@@ -27,7 +26,6 @@ import org.apache.gravitino.credential.CredentialConstants;
 import org.apache.gravitino.credential.OSSSecretKeyCredential;
 import org.apache.gravitino.iceberg.common.IcebergConfig;
 import org.apache.gravitino.integration.test.util.BaseIT;
-import org.apache.gravitino.integration.test.util.DownloaderUtils;
 import org.apache.gravitino.integration.test.util.ITUtils;
 import org.apache.gravitino.storage.OSSProperties;
 import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
@@ -54,12 +52,6 @@ public class IcebergRESTOSSSecretIT extends 
IcebergRESTJdbcCatalogIT {
     if (ITUtils.isEmbedded()) {
       return;
     }
-    try {
-      downloadIcebergForAliyunJar();
-    } catch (IOException e) {
-      LOG.warn("Download Iceberg Aliyun bundle jar failed,", e);
-      throw new RuntimeException(e);
-    }
     copyAliyunOSSJar();
   }
 
@@ -97,22 +89,9 @@ public class IcebergRESTOSSSecretIT extends 
IcebergRESTJdbcCatalogIT {
     return configMap;
   }
 
-  private void downloadIcebergForAliyunJar() throws IOException {
-    String icebergBundleJarUri =
-        String.format(
-            "https://repo1.maven.org/maven2/org/apache/iceberg/";
-                + "iceberg-aliyun/%s/iceberg-aliyun-%s.jar",
-            ITUtils.icebergVersion(), ITUtils.icebergVersion());
-    String gravitinoHome = System.getenv("GRAVITINO_HOME");
-    String targetDir = String.format("%s/iceberg-rest-server/libs/", 
gravitinoHome);
-    DownloaderUtils.downloadFile(icebergBundleJarUri, targetDir);
-  }
-
   private void copyAliyunOSSJar() {
     String gravitinoHome = System.getenv("GRAVITINO_HOME");
     String targetDir = String.format("%s/iceberg-rest-server/libs/", 
gravitinoHome);
-    // Iceberg doesn't provide Iceberg Aliyun bundle jar, so use Gravitino 
aliyun bundle to provide
-    // OSS packages.
-    BaseIT.copyBundleJarsToDirectory("aliyun-bundle", targetDir);
+    BaseIT.copyBundleJarsToDirectory("iceberg-aliyun-bundle", targetDir);
   }
 }
diff --git 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTS3TokenIT.java
 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTS3TokenIT.java
index b393833db8..743f3bbc48 100644
--- 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTS3TokenIT.java
+++ 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTS3TokenIT.java
@@ -20,7 +20,6 @@
 package org.apache.gravitino.iceberg.integration.test;
 
 import com.google.common.collect.ImmutableList;
-import java.io.IOException;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -29,7 +28,6 @@ import org.apache.gravitino.credential.CredentialConstants;
 import org.apache.gravitino.credential.S3TokenCredential;
 import org.apache.gravitino.iceberg.common.IcebergConfig;
 import org.apache.gravitino.integration.test.util.BaseIT;
-import org.apache.gravitino.integration.test.util.DownloaderUtils;
 import org.apache.gravitino.integration.test.util.ITUtils;
 import org.apache.gravitino.storage.S3Properties;
 import org.apache.iceberg.TableProperties;
@@ -62,12 +60,6 @@ public class IcebergRESTS3TokenIT extends 
IcebergRESTJdbcCatalogIT {
     if (ITUtils.isEmbedded()) {
       return;
     }
-    try {
-      downloadIcebergAwsBundleJar();
-    } catch (IOException e) {
-      LOG.warn("Download Iceberg AWS bundle jar failed,", e);
-      throw new RuntimeException(e);
-    }
     copyS3BundleJar();
   }
 
@@ -110,21 +102,10 @@ public class IcebergRESTS3TokenIT extends 
IcebergRESTJdbcCatalogIT {
     return configMap;
   }
 
-  private void downloadIcebergAwsBundleJar() throws IOException {
-    String icebergBundleJarUri =
-        String.format(
-            "https://repo1.maven.org/maven2/org/apache/iceberg/";
-                + "iceberg-aws-bundle/%s/iceberg-aws-bundle-%s.jar",
-            ITUtils.icebergVersion(), ITUtils.icebergVersion());
-    String gravitinoHome = System.getenv("GRAVITINO_HOME");
-    String targetDir = String.format("%s/iceberg-rest-server/libs/", 
gravitinoHome);
-    DownloaderUtils.downloadFile(icebergBundleJarUri, targetDir);
-  }
-
   private void copyS3BundleJar() {
     String gravitinoHome = System.getenv("GRAVITINO_HOME");
     String targetDir = String.format("%s/iceberg-rest-server/libs/", 
gravitinoHome);
-    BaseIT.copyBundleJarsToDirectory("aws", targetDir);
+    BaseIT.copyBundleJarsToDirectory("iceberg-aws-bundle", targetDir);
   }
 
   /**
diff --git a/settings.gradle.kts b/settings.gradle.kts
index 98f6df0361..cde9547aad 100644
--- a/settings.gradle.kts
+++ b/settings.gradle.kts
@@ -80,10 +80,10 @@ project(":spark-connector:spark-runtime-3.5").projectDir = 
file("spark-connector
 include("web:web", "web:integration-test")
 include("docs")
 include("integration-test-common")
-include(":bundles:aws", ":bundles:aws-bundle")
-include(":bundles:gcp", ":bundles:gcp-bundle")
-include(":bundles:aliyun", ":bundles:aliyun-bundle")
-include(":bundles:azure", ":bundles:azure-bundle")
+include(":bundles:aws", ":bundles:aws-bundle", ":bundles:iceberg-aws-bundle")
+include(":bundles:gcp", ":bundles:gcp-bundle", ":bundles:iceberg-gcp-bundle")
+include(":bundles:aliyun", ":bundles:aliyun-bundle", 
":bundles:iceberg-aliyun-bundle")
+include(":bundles:azure", ":bundles:azure-bundle", 
":bundles:iceberg-azure-bundle")
 include(":catalogs:hadoop-common")
 include(":lineage")
 include(":mcp-server")


Reply via email to