This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new d2be5d2fab67 [SPARK-55604][INFRA] Make `actions/*` GitHub Actions jobs
up-to-date
d2be5d2fab67 is described below
commit d2be5d2fab678fa0e6d30341c044e7b0b461547e
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Thu Feb 19 07:34:11 2026 -0800
[SPARK-55604][INFRA] Make `actions/*` GitHub Actions jobs up-to-date
### What changes were proposed in this pull request?
This PR aims to make `actions/*` GitHub Actions jobs up-to-date.
### Why are the changes needed?
To keep the CIs up-to-date.
| Action | Old Version | New Version |
| :--- | :--- | :--- |
| `actions/checkout` | `v4` | **`v6`** |
| `actions/cache` | `v4` | **`v5`** |
| `actions/setup-java` | `v4` | **`v5`** |
| `actions/setup-python` | `v5` | **`v6`** |
| `actions/upload-artifact` | `v4` | **`v6`** |
| `actions/download-artifact` | `v5` | **`v6`** |
| `actions/github-script` | `v7` | **`v8`** |
| `actions/stale` | `c201d45...` (v1.1.0) | **`v10`** |
### How was this patch tested?
Pass the CIs.
### Was this patch authored or co-authored using generative AI tooling?
Generated-by: `Gemini 3 Pro (High)` on `Antigravity`
Closes #54377 from dongjoon-hyun/SPARK-55604.
Authored-by: Dongjoon Hyun <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
.github/workflows/benchmark.yml | 24 ++---
.github/workflows/build_and_test.yml | 116 ++++++++++++------------
.github/workflows/build_infra_images_cache.yml | 2 +-
.github/workflows/build_python_connect.yml | 14 +--
.github/workflows/build_python_connect40.yml | 14 +--
.github/workflows/build_sparkr_window.yml | 10 +-
.github/workflows/maven_test.yml | 14 +--
.github/workflows/notify_test_workflow.yml | 2 +-
.github/workflows/pages.yml | 6 +-
.github/workflows/publish_snapshot.yml | 8 +-
.github/workflows/python_hosted_runner_test.yml | 14 +--
.github/workflows/release.yml | 6 +-
.github/workflows/stale.yml | 2 +-
.github/workflows/test_report.yml | 2 +-
.github/workflows/update_build_status.yml | 2 +-
15 files changed, 118 insertions(+), 118 deletions(-)
diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml
index 3c2bf490f0d7..c45796850d8b 100644
--- a/.github/workflows/benchmark.yml
+++ b/.github/workflows/benchmark.yml
@@ -78,12 +78,12 @@ jobs:
SPARK_LOCAL_IP: localhost
steps:
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
# In order to get diff files
with:
fetch-depth: 0
- name: Cache SBT and Maven
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: |
build/apache-maven-*
@@ -93,7 +93,7 @@ jobs:
restore-keys: |
build-
- name: Cache Coursier local repository
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: ~/.cache/coursier
key: benchmark-coursier-${{ inputs.jdk }}-${{
hashFiles('**/pom.xml', '**/plugins.sbt') }}
@@ -101,7 +101,7 @@ jobs:
benchmark-coursier-${{ inputs.jdk }}
- name: Cache TPC-DS generated data
id: cache-tpcds-sf-1
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: |
./tpcds-sf-1
@@ -109,7 +109,7 @@ jobs:
key: tpcds-${{ hashFiles('.github/workflows/benchmark.yml',
'sql/core/src/test/scala/org/apache/spark/sql/TPCDSSchema.scala') }}
- name: Checkout tpcds-kit repository
if: steps.cache-tpcds-sf-1.outputs.cache-hit != 'true'
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
with:
repository: databricks/tpcds-kit
ref: 1b7fb7529edae091684201fab142d956d6afd881
@@ -119,7 +119,7 @@ jobs:
run: cd tpcds-kit/tools && make OS=LINUX
- name: Install Java ${{ inputs.jdk }}
if: steps.cache-tpcds-sf-1.outputs.cache-hit != 'true'
- uses: actions/setup-java@v4
+ uses: actions/setup-java@v5
with:
distribution: zulu
java-version: ${{ inputs.jdk }}
@@ -151,12 +151,12 @@ jobs:
SPARK_TPCDS_DATA_TEXT: ${{ github.workspace }}/tpcds-sf-1-text
steps:
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
# In order to get diff files
with:
fetch-depth: 0
- name: Cache SBT and Maven
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: |
build/apache-maven-*
@@ -166,21 +166,21 @@ jobs:
restore-keys: |
build-
- name: Cache Coursier local repository
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: ~/.cache/coursier
key: benchmark-coursier-${{ inputs.jdk }}-${{ hashFiles('**/pom.xml',
'**/plugins.sbt') }}
restore-keys: |
benchmark-coursier-${{ inputs.jdk }}
- name: Install Java ${{ inputs.jdk }}
- uses: actions/setup-java@v4
+ uses: actions/setup-java@v5
with:
distribution: zulu
java-version: ${{ inputs.jdk }}
- name: Cache TPC-DS generated data
if: contains(inputs.class, 'TPCDSQueryBenchmark') ||
contains(inputs.class, 'LZ4TPCDSDataBenchmark') || contains(inputs.class,
'ZStandardTPCDSDataBenchmark') || contains(inputs.class, '*')
id: cache-tpcds-sf-1
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: |
./tpcds-sf-1
@@ -224,7 +224,7 @@ jobs:
echo "Error: Failed to push after 5 attempts."
exit 1
- name: Upload benchmark results
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: benchmark-results-${{ inputs.jdk }}-${{ inputs.scala }}-${{
matrix.split }}
path: target/benchmark-results-${{ inputs.jdk }}-${{ inputs.scala
}}.tar
diff --git a/.github/workflows/build_and_test.yml
b/.github/workflows/build_and_test.yml
index ecbb304c382b..6cc57ea0f52a 100644
--- a/.github/workflows/build_and_test.yml
+++ b/.github/workflows/build_and_test.yml
@@ -75,7 +75,7 @@ jobs:
image_pyspark_url_link: ${{
steps.infra-image-link.outputs.image_pyspark_url_link }}
steps:
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
with:
fetch-depth: 0
repository: apache/spark
@@ -320,7 +320,7 @@ jobs:
SKIP_PACKAGING: true
steps:
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
# In order to fetch changed files
with:
fetch-depth: 0
@@ -335,7 +335,7 @@ jobs:
git -c user.name='Apache Spark Test Account' -c
user.email='[email protected]' commit -m "Merged commit" --allow-empty
# Cache local repositories. Note that GitHub Actions cache has a 10G limit.
- name: Cache SBT and Maven
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: |
build/apache-maven-*
@@ -345,7 +345,7 @@ jobs:
restore-keys: |
build-
- name: Cache Coursier local repository
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: ~/.cache/coursier
key: ${{ matrix.java }}-${{ matrix.hadoop }}-coursier-${{
hashFiles('**/pom.xml', '**/plugins.sbt') }}
@@ -357,12 +357,12 @@ jobs:
./dev/free_disk_space
fi
- name: Install Java ${{ matrix.java }}
- uses: actions/setup-java@v4
+ uses: actions/setup-java@v5
with:
distribution: zulu
java-version: ${{ matrix.java }}
- name: Install Python 3.12
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
# We should install one Python that is higher than 3+ for SQL and Yarn
because:
# - SQL component also has Python related tests, for example,
IntegratedUDFTestUtils.
# - Yarn has a Python specific test too, for example, YarnClusterSuite.
@@ -392,7 +392,7 @@ jobs:
./dev/run-tests --parallelism 1 --modules "$MODULES_TO_TEST"
--included-tags "$INCLUDED_TAGS" --excluded-tags "$EXCLUDED_TAGS"
- name: Upload test results to report
if: always()
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: test-results-${{ matrix.modules }}-${{ matrix.comment }}-${{
matrix.java }}-${{ matrix.hadoop }}-${{ matrix.hive }}
path: |
@@ -407,13 +407,13 @@ jobs:
**/target/surefire-reports/*.xml
- name: Upload unit tests log files
if: ${{ !success() }}
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: unit-tests-log-${{ matrix.modules }}-${{ matrix.comment }}-${{
matrix.java }}-${{ matrix.hadoop }}-${{ matrix.hive }}
path: "**/target/*.log"
- name: Upload yarn app log files
if: ${{ !success() && contains(matrix.modules, 'yarn') }}
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: yarn-app-log-${{ matrix.comment }}-${{ matrix.java }}-${{
matrix.hadoop }}-${{ matrix.hive }}
path: "**/target/test/data/"
@@ -438,7 +438,7 @@ jobs:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
# In order to fetch changed files
with:
fetch-depth: 0
@@ -585,7 +585,7 @@ jobs:
PYSPARK_TEST_TIMEOUT: 450
steps:
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
# In order to fetch changed files
with:
fetch-depth: 0
@@ -603,7 +603,7 @@ jobs:
git -c user.name='Apache Spark Test Account' -c
user.email='[email protected]' commit -m "Merged commit" --allow-empty
# Cache local repositories. Note that GitHub Actions cache has a 10G limit.
- name: Cache SBT and Maven
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: |
build/apache-maven-*
@@ -613,7 +613,7 @@ jobs:
restore-keys: |
build-
- name: Cache Coursier local repository
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: ~/.cache/coursier
key: pyspark-coursier-${{ hashFiles('**/pom.xml', '**/plugins.sbt') }}
@@ -623,7 +623,7 @@ jobs:
shell: 'script -q -e -c "bash {0}"'
run: ./dev/free_disk_space_container
- name: Install Java ${{ matrix.java }}
- uses: actions/setup-java@v4
+ uses: actions/setup-java@v5
with:
distribution: zulu
java-version: ${{ matrix.java }}
@@ -677,7 +677,7 @@ jobs:
- name: Upload test results to report
env: ${{ fromJSON(inputs.envs) }}
if: always()
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: test-results-${{ matrix.modules }}--${{ matrix.java }}-${{
inputs.hadoop }}-hive2.3-${{ env.PYTHON_TO_TEST }}
path: |
@@ -693,7 +693,7 @@ jobs:
- name: Upload unit tests log files
env: ${{ fromJSON(inputs.envs) }}
if: ${{ !success() }}
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: unit-tests-log-${{ matrix.modules }}--${{ matrix.java }}-${{
inputs.hadoop }}-hive2.3-${{ env.PYTHON_TO_TEST }}
path: "**/target/unit-tests.log"
@@ -717,7 +717,7 @@ jobs:
SKIP_PACKAGING: true
steps:
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
# In order to fetch changed files
with:
fetch-depth: 0
@@ -735,7 +735,7 @@ jobs:
git -c user.name='Apache Spark Test Account' -c
user.email='[email protected]' commit -m "Merged commit" --allow-empty
# Cache local repositories. Note that GitHub Actions cache has a 10G limit.
- name: Cache SBT and Maven
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: |
build/apache-maven-*
@@ -745,7 +745,7 @@ jobs:
restore-keys: |
build-
- name: Cache Coursier local repository
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: ~/.cache/coursier
key: sparkr-coursier-${{ hashFiles('**/pom.xml', '**/plugins.sbt') }}
@@ -754,7 +754,7 @@ jobs:
- name: Free up disk space
run: ./dev/free_disk_space_container
- name: Install Java ${{ inputs.java }}
- uses: actions/setup-java@v4
+ uses: actions/setup-java@v5
with:
distribution: zulu
java-version: ${{ inputs.java }}
@@ -768,7 +768,7 @@ jobs:
./dev/run-tests --parallelism 1 --modules sparkr
- name: Upload test results to report
if: always()
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: test-results-sparkr--${{ inputs.java }}-${{ inputs.hadoop
}}-hive2.3
path: |
@@ -789,7 +789,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
with:
fetch-depth: 0
repository: apache/spark
@@ -814,7 +814,7 @@ jobs:
input: sql/connect/common/src/main
against:
'https://github.com/apache/spark.git#branch=branch-4.0,subdir=sql/connect/common/src/main'
- name: Install Python 3.12
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: '3.12'
- name: Install dependencies for Python CodeGen check
@@ -846,7 +846,7 @@ jobs:
image: ${{ needs.precondition.outputs.image_lint_url_link }}
steps:
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
with:
fetch-depth: 0
repository: apache/spark
@@ -863,7 +863,7 @@ jobs:
git -c user.name='Apache Spark Test Account' -c
user.email='[email protected]' commit -m "Merged commit" --allow-empty
# Cache local repositories. Note that GitHub Actions cache has a 10G limit.
- name: Cache SBT and Maven
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: |
build/apache-maven-*
@@ -873,14 +873,14 @@ jobs:
restore-keys: |
build-
- name: Cache Coursier local repository
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: ~/.cache/coursier
key: docs-coursier-${{ hashFiles('**/pom.xml', '**/plugins.sbt') }}
restore-keys: |
docs-coursier-
- name: Cache Maven local repository
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: ~/.m2/repository
key: docs-maven-${{ hashFiles('**/pom.xml') }}
@@ -889,7 +889,7 @@ jobs:
- name: Free up disk space
run: ./dev/free_disk_space_container
- name: Install Java ${{ inputs.java }}
- uses: actions/setup-java@v4
+ uses: actions/setup-java@v5
with:
distribution: zulu
java-version: ${{ inputs.java }}
@@ -991,8 +991,8 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 120
steps:
- - uses: actions/checkout@v4
- - uses: actions/setup-java@v4
+ - uses: actions/checkout@v6
+ - uses: actions/setup-java@v5
with:
distribution: zulu
java-version: 17
@@ -1009,8 +1009,8 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 120
steps:
- - uses: actions/checkout@v4
- - uses: actions/setup-java@v4
+ - uses: actions/checkout@v6
+ - uses: actions/setup-java@v5
with:
distribution: zulu
java-version: 25
@@ -1039,7 +1039,7 @@ jobs:
image: ${{ needs.precondition.outputs.image_docs_url_link }}
steps:
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
with:
fetch-depth: 0
repository: apache/spark
@@ -1056,7 +1056,7 @@ jobs:
git -c user.name='Apache Spark Test Account' -c
user.email='[email protected]' commit -m "Merged commit" --allow-empty
# Cache local repositories. Note that GitHub Actions cache has a 10G limit.
- name: Cache SBT and Maven
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: |
build/apache-maven-*
@@ -1066,14 +1066,14 @@ jobs:
restore-keys: |
build-
- name: Cache Coursier local repository
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: ~/.cache/coursier
key: docs-coursier-${{ hashFiles('**/pom.xml', '**/plugins.sbt') }}
restore-keys: |
docs-coursier-
- name: Cache Maven local repository
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: ~/.m2/repository
key: docs-maven-${{ hashFiles('**/pom.xml') }}
@@ -1082,7 +1082,7 @@ jobs:
- name: Free up disk space
run: ./dev/free_disk_space_container
- name: Install Java ${{ inputs.java }}
- uses: actions/setup-java@v4
+ uses: actions/setup-java@v5
with:
distribution: zulu
java-version: ${{ inputs.java }}
@@ -1195,7 +1195,7 @@ jobs:
run: tar cjf site.tar.bz2 docs/_site
- name: Upload documentation
if: github.repository != 'apache/spark'
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: site
path: site.tar.bz2
@@ -1212,7 +1212,7 @@ jobs:
SPARK_LOCAL_IP: localhost
steps:
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
with:
fetch-depth: 0
repository: apache/spark
@@ -1224,7 +1224,7 @@ jobs:
git -c user.name='Apache Spark Test Account' -c
user.email='[email protected]' merge --no-commit --progress --squash
FETCH_HEAD
git -c user.name='Apache Spark Test Account' -c
user.email='[email protected]' commit -m "Merged commit" --allow-empty
- name: Cache SBT and Maven
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: |
build/apache-maven-*
@@ -1234,26 +1234,26 @@ jobs:
restore-keys: |
build-
- name: Cache Coursier local repository
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: ~/.cache/coursier
key: tpcds-coursier-${{ hashFiles('**/pom.xml', '**/plugins.sbt') }}
restore-keys: |
tpcds-coursier-
- name: Install Java ${{ inputs.java }}
- uses: actions/setup-java@v4
+ uses: actions/setup-java@v5
with:
distribution: zulu
java-version: ${{ inputs.java }}
- name: Cache TPC-DS generated data
id: cache-tpcds-sf-1
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: ./tpcds-sf-1
key: tpcds-${{ hashFiles('.github/workflows/build_and_test.yml',
'sql/core/src/test/scala/org/apache/spark/sql/TPCDSSchema.scala') }}
- name: Checkout tpcds-kit repository
if: steps.cache-tpcds-sf-1.outputs.cache-hit != 'true'
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
with:
repository: databricks/tpcds-kit
ref: 1b7fb7529edae091684201fab142d956d6afd881
@@ -1293,7 +1293,7 @@ jobs:
SPARK_TPCDS_DATA=`pwd`/tpcds-sf-1 build/sbt "sql/testOnly
org.apache.spark.sql.TPCDSCollationQueryTestSuite"
- name: Upload test results to report
if: always()
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: test-results-tpcds--${{ inputs.java }}-${{ inputs.hadoop
}}-hive2.3
path: |
@@ -1308,7 +1308,7 @@ jobs:
**/target/surefire-reports/*.xml
- name: Upload unit tests log files
if: ${{ !success() }}
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: unit-tests-log-tpcds--${{ inputs.java }}-${{ inputs.hadoop
}}-hive2.3
path: "**/target/unit-tests.log"
@@ -1329,7 +1329,7 @@ jobs:
SKIP_PACKAGING: true
steps:
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
with:
fetch-depth: 0
repository: apache/spark
@@ -1342,7 +1342,7 @@ jobs:
git -c user.name='Apache Spark Test Account' -c
user.email='[email protected]' merge --no-commit --progress --squash
FETCH_HEAD
git -c user.name='Apache Spark Test Account' -c
user.email='[email protected]' commit -m "Merged commit" --allow-empty
- name: Cache SBT and Maven
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: |
build/apache-maven-*
@@ -1352,14 +1352,14 @@ jobs:
restore-keys: |
build-
- name: Cache Coursier local repository
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: ~/.cache/coursier
key: docker-integration-coursier-${{ hashFiles('**/pom.xml',
'**/plugins.sbt') }}
restore-keys: |
docker-integration-coursier-
- name: Install Java ${{ inputs.java }}
- uses: actions/setup-java@v4
+ uses: actions/setup-java@v5
with:
distribution: zulu
java-version: ${{ inputs.java }}
@@ -1369,7 +1369,7 @@ jobs:
./dev/run-tests --parallelism 1 --modules docker-integration-tests
--included-tags org.apache.spark.tags.DockerTest
- name: Upload test results to report
if: always()
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: test-results-docker-integration--${{ inputs.java }}-${{
inputs.hadoop }}-hive2.3
path: |
@@ -1384,7 +1384,7 @@ jobs:
**/target/surefire-reports/*.xml
- name: Upload unit tests log files
if: ${{ !success() }}
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: unit-tests-log-docker-integration--${{ inputs.java }}-${{
inputs.hadoop }}-hive2.3
path: "**/target/unit-tests.log"
@@ -1397,7 +1397,7 @@ jobs:
timeout-minutes: 120
steps:
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
with:
fetch-depth: 0
repository: apache/spark
@@ -1410,7 +1410,7 @@ jobs:
git -c user.name='Apache Spark Test Account' -c
user.email='[email protected]' merge --no-commit --progress --squash
FETCH_HEAD
git -c user.name='Apache Spark Test Account' -c
user.email='[email protected]' commit -m "Merged commit" --allow-empty
- name: Cache SBT and Maven
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: |
build/apache-maven-*
@@ -1420,7 +1420,7 @@ jobs:
restore-keys: |
build-
- name: Cache Coursier local repository
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: ~/.cache/coursier
key: k8s-integration-coursier-${{ hashFiles('**/pom.xml',
'**/plugins.sbt') }}
@@ -1432,7 +1432,7 @@ jobs:
./dev/free_disk_space
fi
- name: Install Java ${{ inputs.java }}
- uses: actions/setup-java@v4
+ uses: actions/setup-java@v5
with:
distribution: zulu
java-version: ${{ inputs.java }}
@@ -1470,7 +1470,7 @@ jobs:
build/sbt -Phadoop-3 -Psparkr -Pkubernetes -Pvolcano
-Pkubernetes-integration-tests
-Dspark.kubernetes.test.volcanoMaxConcurrencyJobNum=1 -Dtest.exclude.tags=local
"kubernetes-integration-tests/test"
- name: Upload Spark on K8S integration tests log files
if: ${{ !success() }}
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: spark-on-kubernetes-it-log
path: "**/target/integration-tests.log"
@@ -1482,7 +1482,7 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 120
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v6
- name: Use Node.js
uses: actions/setup-node@v6
with:
diff --git a/.github/workflows/build_infra_images_cache.yml
b/.github/workflows/build_infra_images_cache.yml
index 210e413bbbc5..befcee540876 100644
--- a/.github/workflows/build_infra_images_cache.yml
+++ b/.github/workflows/build_infra_images_cache.yml
@@ -54,7 +54,7 @@ jobs:
packages: write
steps:
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
diff --git a/.github/workflows/build_python_connect.yml
b/.github/workflows/build_python_connect.yml
index 631ec06a4e22..80fef3a5e4f6 100644
--- a/.github/workflows/build_python_connect.yml
+++ b/.github/workflows/build_python_connect.yml
@@ -33,9 +33,9 @@ jobs:
if: github.repository == 'apache/spark'
steps:
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
- name: Cache SBT and Maven
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: |
build/apache-maven-*
@@ -45,19 +45,19 @@ jobs:
restore-keys: |
build-spark-connect-python-only-
- name: Cache Coursier local repository
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: ~/.cache/coursier
key: coursier-build-spark-connect-python-only-${{
hashFiles('**/pom.xml') }}
restore-keys: |
coursier-build-spark-connect-python-only-
- name: Install Java 17
- uses: actions/setup-java@v4
+ uses: actions/setup-java@v5
with:
distribution: zulu
java-version: 17
- name: Install Python 3.11
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: '3.11'
architecture: x64
@@ -126,7 +126,7 @@ jobs:
mv pyspark.back python/pyspark
- name: Upload test results to report
if: always()
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: test-results-spark-connect-python-only
path: |
@@ -134,7 +134,7 @@ jobs:
**/target/surefire-reports/*.xml
- name: Upload Spark Connect server log file
if: ${{ !success() }}
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: unit-tests-log-spark-connect-python-only
path: logs/*.out
diff --git a/.github/workflows/build_python_connect40.yml
b/.github/workflows/build_python_connect40.yml
index a9f9d0ecc168..dc01b2085272 100644
--- a/.github/workflows/build_python_connect40.yml
+++ b/.github/workflows/build_python_connect40.yml
@@ -33,11 +33,11 @@ jobs:
if: github.repository == 'apache/spark'
steps:
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
with:
fetch-depth: 0
- name: Cache SBT and Maven
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: |
build/apache-maven-*
@@ -47,19 +47,19 @@ jobs:
restore-keys: |
build-spark-connect-python-only-
- name: Cache Coursier local repository
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: ~/.cache/coursier
key: coursier-build-spark-connect-python-only-${{
hashFiles('**/pom.xml') }}
restore-keys: |
coursier-build-spark-connect-python-only-
- name: Install Java 17
- uses: actions/setup-java@v4
+ uses: actions/setup-java@v5
with:
distribution: zulu
java-version: 17
- name: Install Python 3.11
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: '3.11'
architecture: x64
@@ -106,7 +106,7 @@ jobs:
./python/run-tests --parallelism=1 --python-executables=python3
--modules pyspark-pandas-connect,pyspark-pandas-slow-connect
- name: Upload test results to report
if: always()
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: test-results-spark-connect-python-only
path: |
@@ -114,7 +114,7 @@ jobs:
**/target/surefire-reports/*.xml
- name: Upload Spark Connect server log file
if: ${{ !success() }}
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: unit-tests-log-spark-connect-python-only
path: logs/*.out
diff --git a/.github/workflows/build_sparkr_window.yml
b/.github/workflows/build_sparkr_window.yml
index 8bbcdf79bd58..ebd44178a317 100644
--- a/.github/workflows/build_sparkr_window.yml
+++ b/.github/workflows/build_sparkr_window.yml
@@ -31,23 +31,23 @@ jobs:
if: github.repository == 'apache/spark'
steps:
- name: Download winutils Hadoop binary
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
with:
repository: cdarlint/winutils
- name: Move Hadoop winutil into home directory
run: |
Move-Item -Path hadoop-3.3.6 -Destination ~\
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
- name: Cache Maven local repository
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: ~/.m2/repository
key: build-sparkr-windows-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |
build-sparkr-windows-maven-
- name: Install Java 17
- uses: actions/setup-java@v4
+ uses: actions/setup-java@v5
with:
distribution: zulu
java-version: 17
@@ -65,7 +65,7 @@ jobs:
# includes Python 3.7, which Spark does not support. Therefore, we simply
install the proper Python
# for simplicity, see SPARK-47116.
- name: Install Python 3.11
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: '3.11'
architecture: x64
diff --git a/.github/workflows/maven_test.yml b/.github/workflows/maven_test.yml
index 15cea30bb3c2..fbad46aac022 100644
--- a/.github/workflows/maven_test.yml
+++ b/.github/workflows/maven_test.yml
@@ -130,7 +130,7 @@ jobs:
GITHUB_PREV_SHA: ${{ github.event.before }}
steps:
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
# In order to fetch changed files
with:
fetch-depth: 0
@@ -147,7 +147,7 @@ jobs:
- name: Cache SBT and Maven
# TODO(SPARK-54466):
https://github.com/actions/runner-images/issues/13341
if: ${{ runner.os != 'macOS' }}
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: |
build/apache-maven-*
@@ -159,19 +159,19 @@ jobs:
- name: Cache Maven local repository
# TODO(SPARK-54466):
https://github.com/actions/runner-images/issues/13341
if: ${{ runner.os != 'macOS' }}
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: ~/.m2/repository
key: java${{ matrix.java }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |
java${{ matrix.java }}-maven-
- name: Install Java ${{ matrix.java }}
- uses: actions/setup-java@v4
+ uses: actions/setup-java@v5
with:
distribution: zulu
java-version: ${{ matrix.java }}
- name: Install Python 3.12
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
# We should install one Python that is higher than 3+ for SQL and Yarn
because:
# - SQL component also has Python related tests, for example,
IntegratedUDFTestUtils.
# - Yarn has a Python specific test too, for example, YarnClusterSuite.
@@ -238,7 +238,7 @@ jobs:
rm -rf ~/.m2/repository/org/apache/spark
- name: Upload test results to report
if: always()
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: test-results-${{ matrix.modules }}-${{ matrix.comment }}-${{
matrix.java }}-${{ matrix.hadoop }}-${{ matrix.hive }}
path: |
@@ -246,7 +246,7 @@ jobs:
**/target/surefire-reports/*.xml
- name: Upload unit tests log files
if: failure()
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: unit-tests-log-${{ matrix.modules }}-${{ matrix.comment }}-${{
matrix.java }}-${{ matrix.hadoop }}-${{ matrix.hive }}
path: "**/target/unit-tests.log"
diff --git a/.github/workflows/notify_test_workflow.yml
b/.github/workflows/notify_test_workflow.yml
index 93b627763445..53a9fd19cd09 100644
--- a/.github/workflows/notify_test_workflow.yml
+++ b/.github/workflows/notify_test_workflow.yml
@@ -36,7 +36,7 @@ jobs:
checks: write
steps:
- name: "Notify test workflow"
- uses: actions/github-script@v7
+ uses: actions/github-script@v8
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
diff --git a/.github/workflows/pages.yml b/.github/workflows/pages.yml
index 2b54c08ddfe4..276b822b4dc1 100644
--- a/.github/workflows/pages.yml
+++ b/.github/workflows/pages.yml
@@ -43,17 +43,17 @@ jobs:
if: github.repository == 'apache/spark'
steps:
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
with:
repository: apache/spark
ref: 'master'
- name: Install Java 17
- uses: actions/setup-java@v4
+ uses: actions/setup-java@v5
with:
distribution: zulu
java-version: 17
- name: Install Python 3.11
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: '3.11'
architecture: x64
diff --git a/.github/workflows/publish_snapshot.yml
b/.github/workflows/publish_snapshot.yml
index 2a78ed7dfb8d..57c16337e110 100644
--- a/.github/workflows/publish_snapshot.yml
+++ b/.github/workflows/publish_snapshot.yml
@@ -42,11 +42,11 @@ jobs:
branch: ${{ fromJSON( inputs.branch || '["master", "branch-4.1",
"branch-4.0", "branch-3.5"]' ) }}
steps:
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
with:
ref: ${{ matrix.branch }}
- name: Cache Maven local repository
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: ~/.m2/repository
key: snapshot-maven-${{ hashFiles('**/pom.xml') }}
@@ -54,13 +54,13 @@ jobs:
snapshot-maven-
- name: Install Java 8 for branch-3.x
if: matrix.branch == 'branch-3.5'
- uses: actions/setup-java@v4
+ uses: actions/setup-java@v5
with:
distribution: temurin
java-version: 8
- name: Install Java 17
if: matrix.branch != 'branch-3.5'
- uses: actions/setup-java@v4
+ uses: actions/setup-java@v5
with:
distribution: temurin
java-version: 17
diff --git a/.github/workflows/python_hosted_runner_test.yml
b/.github/workflows/python_hosted_runner_test.yml
index b32d2d1a5628..eac38ee1e911 100644
--- a/.github/workflows/python_hosted_runner_test.yml
+++ b/.github/workflows/python_hosted_runner_test.yml
@@ -104,7 +104,7 @@ jobs:
BRANCH: ${{ inputs.branch }}
steps:
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
# In order to fetch changed files
with:
fetch-depth: 0
@@ -121,7 +121,7 @@ jobs:
- name: Cache SBT and Maven
# TODO(SPARK-54466):
https://github.com/actions/runner-images/issues/13341
if: ${{ runner.os != 'macOS' }}
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: |
build/apache-maven-*
@@ -133,19 +133,19 @@ jobs:
- name: Cache Coursier local repository
# TODO(SPARK-54466):
https://github.com/actions/runner-images/issues/13341
if: ${{ runner.os != 'macOS' }}
- uses: actions/cache@v4
+ uses: actions/cache@v5
with:
path: ~/.cache/coursier
key: pyspark-coursier-${{ hashFiles('**/pom.xml', '**/plugins.sbt')
}}
restore-keys: |
pyspark-coursier-
- name: Install Java ${{ matrix.java }}
- uses: actions/setup-java@v4
+ uses: actions/setup-java@v5
with:
distribution: zulu
java-version: ${{ matrix.java }}
- name: Install Python ${{matrix.python}}
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: ${{matrix.python}}
architecture: ${{ inputs.arch }}
@@ -170,7 +170,7 @@ jobs:
- name: Upload test results to report
env: ${{ fromJSON(inputs.envs) }}
if: always()
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: test-results-${{ inputs.os }}-${{ matrix.modules }}--${{
matrix.java }}-${{ inputs.hadoop }}-hive2.3-${{ env.PYTHON_TO_TEST }}
path: |
@@ -179,7 +179,7 @@ jobs:
- name: Upload unit tests log files
env: ${{ fromJSON(inputs.envs) }}
if: ${{ !success() }}
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: unit-tests-log-${{ inputs.os }}-${{ matrix.modules }}--${{
matrix.java }}-${{ inputs.hadoop }}-hive2.3-${{ env.PYTHON_TO_TEST }}
path: "**/target/unit-tests.log"
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 594b2e4a6c39..67af207c1ce6 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -115,7 +115,7 @@ jobs:
)
steps:
- name: Checkout Spark repository
- uses: actions/checkout@v4
+ uses: actions/checkout@v6
with:
repository: apache/spark
ref: "${{ inputs.branch }}"
@@ -283,13 +283,13 @@ jobs:
fi
- name: Upload logs
if: always()
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: build-logs
path: logs.zip
- name: Upload output
if: always()
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v6
with:
name: build-output
path: output.zip
diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml
index e2db95083efe..0fef90959075 100644
--- a/.github/workflows/stale.yml
+++ b/.github/workflows/stale.yml
@@ -28,7 +28,7 @@ jobs:
if: github.repository == 'apache/spark'
runs-on: ubuntu-latest
steps:
- - uses: actions/stale@c201d45ef4b0ccbd3bb0616f93bae13e73d0a080 # [email protected]
+ - uses: actions/stale@v10
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
stale-pr-message: >
diff --git a/.github/workflows/test_report.yml
b/.github/workflows/test_report.yml
index 47e09da1bbaf..8b0af24e3cbe 100644
--- a/.github/workflows/test_report.yml
+++ b/.github/workflows/test_report.yml
@@ -36,7 +36,7 @@ jobs:
contents: read
steps:
- name: Download test results to report
- uses: actions/download-artifact@v5
+ uses: actions/download-artifact@v6
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ github.event.workflow_run.id }}
diff --git a/.github/workflows/update_build_status.yml
b/.github/workflows/update_build_status.yml
index 542fa567dea6..82c9a6d17b2f 100644
--- a/.github/workflows/update_build_status.yml
+++ b/.github/workflows/update_build_status.yml
@@ -32,7 +32,7 @@ jobs:
checks: write
steps:
- name: "Update build status"
- uses: actions/github-script@v7
+ uses: actions/github-script@v8
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]