This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-connect-swift.git


The following commit(s) were added to refs/heads/main by this push:
     new 6076c74  [SPARK-55381] Use Spark `4.0.2` instead of `4.0.1`
6076c74 is described below

commit 6076c744828ff1fd6658fae852bb2972f8d042e6
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Thu Feb 5 18:25:44 2026 -0800

    [SPARK-55381] Use Spark `4.0.2` instead of `4.0.1`
    
    ### What changes were proposed in this pull request?
    
    This PR aims to upgrade CI to use Spark `4.0.2` instead of `4.0.1`.
    
    ### Why are the changes needed?
    
    To test against on the latest Spark 4.0.2 version.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No, this is a change of test infra.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    Generated-by: `Gemini 3 Pro (High)` on `Antigravity`
    
    Closes #285 from dongjoon-hyun/SPARK-55381.
    
    Authored-by: Dongjoon Hyun <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .github/workflows/build_and_test.yml | 16 ++++++++--------
 1 file changed, 8 insertions(+), 8 deletions(-)

diff --git a/.github/workflows/build_and_test.yml 
b/.github/workflows/build_and_test.yml
index 5c977db..cc0bb92 100644
--- a/.github/workflows/build_and_test.yml
+++ b/.github/workflows/build_and_test.yml
@@ -84,7 +84,7 @@ jobs:
       SPARK_REMOTE: "sc://localhost:15003"
     services:
       spark:
-        image: apache/spark:4.0.1
+        image: apache/spark:4.0.2
         env:
           SPARK_NO_DAEMONIZE: 1
         ports:
@@ -165,9 +165,9 @@ jobs:
       run: swift test --filter NOTHING -c release
     - name: Test
       run: |
-        curl -LO 
https://www.apache.org/dyn/closer.lua/spark/spark-4.0.1/spark-4.0.1-bin-hadoop3.tgz?action=download
-        tar xvfz spark-4.0.1-bin-hadoop3.tgz && rm spark-4.0.1-bin-hadoop3.tgz
-        mv spark-4.0.1-bin-hadoop3 /tmp/spark
+        curl -LO 
https://www.apache.org/dyn/closer.lua/spark/spark-4.0.2/spark-4.0.2-bin-hadoop3.tgz?action=download
+        tar xvfz spark-4.0.2-bin-hadoop3.tgz && rm spark-4.0.2-bin-hadoop3.tgz
+        mv spark-4.0.2-bin-hadoop3 /tmp/spark
         cd /tmp/spark/sbin
         ./start-connect-server.sh
         cd -
@@ -260,11 +260,11 @@ jobs:
       run: swift test --filter NOTHING -c release
     - name: Test
       run: |
-        curl -LO 
https://www.apache.org/dyn/closer.lua/spark/spark-4.0.1/spark-4.0.1-bin-hadoop3.tgz?action=download
-        tar xvfz spark-4.0.1-bin-hadoop3.tgz && rm spark-4.0.1-bin-hadoop3.tgz
-        mv spark-4.0.1-bin-hadoop3 /tmp/spark
+        curl -LO 
https://www.apache.org/dyn/closer.lua/spark/spark-4.0.2/spark-4.0.2-bin-hadoop3.tgz?action=download
+        tar xvfz spark-4.0.2-bin-hadoop3.tgz && rm spark-4.0.2-bin-hadoop3.tgz
+        mv spark-4.0.2-bin-hadoop3 /tmp/spark
         cd /tmp/spark/sbin
-        ./start-connect-server.sh --packages 
org.apache.spark:spark-connect_2.13:4.0.1,org.apache.iceberg:iceberg-spark-runtime-4.0_2.13:1.10.1
 -c spark.sql.catalog.local=org.apache.iceberg.spark.SparkCatalog -c 
spark.sql.catalog.local.type=hadoop -c 
spark.sql.catalog.local.warehouse=/tmp/spark/warehouse -c 
spark.sql.defaultCatalog=local
+        ./start-connect-server.sh --packages 
org.apache.spark:spark-connect_2.13:4.0.2,org.apache.iceberg:iceberg-spark-runtime-4.0_2.13:1.10.1
 -c spark.sql.catalog.local=org.apache.iceberg.spark.SparkCatalog -c 
spark.sql.catalog.local.type=hadoop -c 
spark.sql.catalog.local.warehouse=/tmp/spark/warehouse -c 
spark.sql.defaultCatalog=local
         cd -
         swift test --filter DataFrameWriterV2Tests -c release
         swift test --filter IcebergTest -c release


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to