This is an automated email from the ASF dual-hosted git repository.

yufei pushed a commit to branch release/1.0.x
in repository https://gitbox.apache.org/repos/asf/polaris.git

commit 8b25377bb44d6ae599f4779c988e3586628511c5
Author: Yun Zou <yunzou.colost...@gmail.com>
AuthorDate: Wed Jun 18 13:54:54 2025 -0700

    Reuse shadowJar for spark client bundle jar maven publish (#1857)
    
    * fix spark client
    
    * fix test failure and address feedback
    
    * fix error
    
    * update regression test
    
    * update classifier name
    
    * address comment
    
    * add change
    
    * update doc
    
    * update build and readme
    
    * add back jr
    
    * udpate dependency
    
    * add change
    
    * update
    
    * update tests
    
    * remove merge service file
    
    * update readme
    
    * update readme
---
 .../src/main/kotlin/publishing/PublishingHelperPlugin.kt   |  5 -----
 plugins/spark/README.md                                    |  8 +++++---
 plugins/spark/v3.5/spark/build.gradle.kts                  | 14 ++++++++++----
 site/content/in-dev/unreleased/polaris-spark-client.md     | 11 +++++++++++
 4 files changed, 26 insertions(+), 12 deletions(-)

diff --git a/build-logic/src/main/kotlin/publishing/PublishingHelperPlugin.kt 
b/build-logic/src/main/kotlin/publishing/PublishingHelperPlugin.kt
index d4d412a30..04b04225e 100644
--- a/build-logic/src/main/kotlin/publishing/PublishingHelperPlugin.kt
+++ b/build-logic/src/main/kotlin/publishing/PublishingHelperPlugin.kt
@@ -133,11 +133,6 @@ constructor(private val softwareComponentFactory: 
SoftwareComponentFactory) : Pl
 
                 suppressPomMetadataWarningsFor("testFixturesApiElements")
                 suppressPomMetadataWarningsFor("testFixturesRuntimeElements")
-
-                if (project.tasks.findByName("createPolarisSparkJar") != null) 
{
-                  // if the project contains spark client jar, also publish 
the jar to maven
-                  artifact(project.tasks.named("createPolarisSparkJar").get())
-                }
               }
 
               if (
diff --git a/plugins/spark/README.md b/plugins/spark/README.md
index c7d6bc876..3f4acc31c 100644
--- a/plugins/spark/README.md
+++ b/plugins/spark/README.md
@@ -29,15 +29,17 @@ Right now, the plugin only provides support for Spark 3.5, 
Scala version 2.12 an
 and depends on iceberg-spark-runtime 1.9.0.
 
 # Build Plugin Jar
-A task createPolarisSparkJar is added to build a jar for the Polaris Spark 
plugin, the jar is named as:
+A shadowJar task is added to build a jar for the Polaris Spark plugin, the jar 
is named as:
 `polaris-spark-<sparkVersion>_<scalaVersion>-<polarisVersion>-bundle.jar`. For 
example:
 `polaris-spark-3.5_2.12-0.11.0-beta-incubating-SNAPSHOT-bundle.jar`.
 
-- `./gradlew :polaris-spark-3.5_2.12:createPolarisSparkJar` -- build jar for 
Spark 3.5 with Scala version 2.12.
-- `./gradlew :polaris-spark-3.5_2.13:createPolarisSparkJar` -- build jar for 
Spark 3.5 with Scala version 2.13.
+- `./gradlew :polaris-spark-3.5_2.12:shadowJar` -- build jar for Spark 3.5 
with Scala version 2.12.
+- `./gradlew :polaris-spark-3.5_2.13:shadowJar` -- build jar for Spark 3.5 
with Scala version 2.13.
 
 The result jar is located at plugins/spark/v3.5/build/<scala_version>/libs 
after the build.
 
+The shadowJar task is also executed automatically when you run `gradlew 
assemble` or `gradlew build`.
+
 # Start Spark with Local Polaris Service using built Jar
 Once the jar is built, we can manually test it with Spark and a local Polaris 
service.
 
diff --git a/plugins/spark/v3.5/spark/build.gradle.kts 
b/plugins/spark/v3.5/spark/build.gradle.kts
index a2a54e26b..c328bb23e 100644
--- a/plugins/spark/v3.5/spark/build.gradle.kts
+++ b/plugins/spark/v3.5/spark/build.gradle.kts
@@ -19,7 +19,10 @@
 
 import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
 
-plugins { id("polaris-client") }
+plugins {
+  id("polaris-client")
+  id("com.gradleup.shadow")
+}
 
 // get version information
 val sparkMajorVersion = "3.5"
@@ -112,7 +115,7 @@ dependencies {
   }
 }
 
-tasks.register<ShadowJar>("createPolarisSparkJar") {
+tasks.named<ShadowJar>("shadowJar") {
   archiveClassifier = "bundle"
   isZip64 = true
 
@@ -135,8 +138,11 @@ tasks.register<ShadowJar>("createPolarisSparkJar") {
     exclude(dependency("org.apache.avro:avro*.*"))
   }
 
-  relocate("com.fasterxml", "org.apache.polaris.shaded.com.fasterxml.jackson")
+  relocate("com.fasterxml", "org.apache.polaris.shaded.com.fasterxml")
   relocate("org.apache.avro", "org.apache.polaris.shaded.org.apache.avro")
 }
 
-tasks.withType(Jar::class).named("sourcesJar") { 
dependsOn("createPolarisSparkJar") }
+// ensure the shadowJar job is run for both `assemble` and `build` task
+tasks.named("assemble") { dependsOn("shadowJar") }
+
+tasks.named("build") { dependsOn("shadowJar") }
diff --git a/site/content/in-dev/unreleased/polaris-spark-client.md 
b/site/content/in-dev/unreleased/polaris-spark-client.md
index 4ceb536a9..a34bceece 100644
--- a/site/content/in-dev/unreleased/polaris-spark-client.md
+++ b/site/content/in-dev/unreleased/polaris-spark-client.md
@@ -128,3 +128,14 @@ The Polaris Spark client has the following functionality 
limitations:
 3) Rename a Delta table is not supported.
 4) ALTER TABLE ... SET LOCATION is not supported for DELTA table.
 5) For other non-Iceberg tables like csv, it is not supported.
+
+## Iceberg Spark Client compatibility with Polaris Spark Client
+The Polaris Spark client today depends on a specific Iceberg client version, 
and the version dependency is described
+in the following table:
+
+| Spark Client Version | Iceberg Spark Client Version |
+|----------------------|------------------------------|
+| 1.0.0                | 1.9.0                        |
+
+The Iceberg dependency is automatically downloaded when the Polaris package is 
downloaded, so there is no need to
+add the Iceberg Spark client in the `packages` configuration.

Reply via email to