martin-g commented on code in PR #9:
URL: https://github.com/apache/spark-docker/pull/9#discussion_r994270823
##########
.github/workflows/main.yml:
##########
@@ -76,29 +69,136 @@ jobs:
IMAGE_NAME=spark
IMAGE_PATH=${{ matrix.spark_version }}/$TAG
UNIQUE_IMAGE_TAG=${{ matrix.spark_version }}-$TAG
+ IMAGE_URL=$TEST_REPO/$IMAGE_NAME:$UNIQUE_IMAGE_TAG
- # Unique image tag in each version: scala2.12-java11-python3-ubuntu
+ # Unique image tag in each version:
3.3.0-scala2.12-java11-python3-ubuntu
echo "UNIQUE_IMAGE_TAG=${UNIQUE_IMAGE_TAG}" >> $GITHUB_ENV
# Test repo: ghcr.io/apache/spark-docker
echo "TEST_REPO=${TEST_REPO}" >> $GITHUB_ENV
# Image name: spark
echo "IMAGE_NAME=${IMAGE_NAME}" >> $GITHUB_ENV
# Image dockerfile path: 3.3.0/scala2.12-java11-python3-ubuntu
echo "IMAGE_PATH=${IMAGE_PATH}" >> $GITHUB_ENV
+ # Image URL:
ghcr.io/apache/spark-docker/spark:3.3.0-scala2.12-java11-python3-ubuntu
+ echo "IMAGE_URL=${IMAGE_URL}" >> $GITHUB_ENV
- name: Print Image tags
run: |
echo "UNIQUE_IMAGE_TAG: "${UNIQUE_IMAGE_TAG}
echo "TEST_REPO: "${TEST_REPO}
echo "IMAGE_NAME: "${IMAGE_NAME}
echo "IMAGE_PATH: "${IMAGE_PATH}
+ echo "IMAGE_URL: "${IMAGE_URL}
- - name: Build and push test image
+ - name: Build image
uses: docker/build-push-action@v2
with:
context: ${{ env.IMAGE_PATH }}
- tags: ${{ env.TEST_REPO }}/${{ env.IMAGE_NAME }}:${{
env.UNIQUE_IMAGE_TAG }}
+ tags: ${{ env.IMAGE_URL }}
platforms: linux/amd64,linux/arm64
- - name: Image digest
- run: echo ${{ steps.docker_build.outputs.digest }}
+ - name: Test - Checkout Spark repository
+ uses: actions/checkout@v2
+ with:
+ fetch-depth: 0
+ repository: apache/spark
+ ref: v${{ matrix.spark_version }}
+ path: ${{ github.workspace }}/spark
+
+ - name: Test - Cherry pick commits
+ # Apache Spark enable resource limited k8s IT since v3.3.1, cherrpick
patches for old release
Review Comment:
```suggestion
# Apache Spark enable resource limited k8s IT since v3.3.1,
cherry-pick patches for old release
```
##########
.github/workflows/main.yml:
##########
@@ -76,29 +69,136 @@ jobs:
IMAGE_NAME=spark
IMAGE_PATH=${{ matrix.spark_version }}/$TAG
UNIQUE_IMAGE_TAG=${{ matrix.spark_version }}-$TAG
+ IMAGE_URL=$TEST_REPO/$IMAGE_NAME:$UNIQUE_IMAGE_TAG
- # Unique image tag in each version: scala2.12-java11-python3-ubuntu
+ # Unique image tag in each version:
3.3.0-scala2.12-java11-python3-ubuntu
echo "UNIQUE_IMAGE_TAG=${UNIQUE_IMAGE_TAG}" >> $GITHUB_ENV
# Test repo: ghcr.io/apache/spark-docker
echo "TEST_REPO=${TEST_REPO}" >> $GITHUB_ENV
# Image name: spark
echo "IMAGE_NAME=${IMAGE_NAME}" >> $GITHUB_ENV
# Image dockerfile path: 3.3.0/scala2.12-java11-python3-ubuntu
echo "IMAGE_PATH=${IMAGE_PATH}" >> $GITHUB_ENV
+ # Image URL:
ghcr.io/apache/spark-docker/spark:3.3.0-scala2.12-java11-python3-ubuntu
+ echo "IMAGE_URL=${IMAGE_URL}" >> $GITHUB_ENV
- name: Print Image tags
run: |
echo "UNIQUE_IMAGE_TAG: "${UNIQUE_IMAGE_TAG}
echo "TEST_REPO: "${TEST_REPO}
echo "IMAGE_NAME: "${IMAGE_NAME}
echo "IMAGE_PATH: "${IMAGE_PATH}
+ echo "IMAGE_URL: "${IMAGE_URL}
- - name: Build and push test image
+ - name: Build image
uses: docker/build-push-action@v2
with:
context: ${{ env.IMAGE_PATH }}
- tags: ${{ env.TEST_REPO }}/${{ env.IMAGE_NAME }}:${{
env.UNIQUE_IMAGE_TAG }}
+ tags: ${{ env.IMAGE_URL }}
platforms: linux/amd64,linux/arm64
- - name: Image digest
- run: echo ${{ steps.docker_build.outputs.digest }}
+ - name: Test - Checkout Spark repository
+ uses: actions/checkout@v2
Review Comment:
there is a newer version - v3
##########
.github/workflows/main.yml:
##########
@@ -76,29 +69,136 @@ jobs:
IMAGE_NAME=spark
IMAGE_PATH=${{ matrix.spark_version }}/$TAG
UNIQUE_IMAGE_TAG=${{ matrix.spark_version }}-$TAG
+ IMAGE_URL=$TEST_REPO/$IMAGE_NAME:$UNIQUE_IMAGE_TAG
- # Unique image tag in each version: scala2.12-java11-python3-ubuntu
+ # Unique image tag in each version:
3.3.0-scala2.12-java11-python3-ubuntu
echo "UNIQUE_IMAGE_TAG=${UNIQUE_IMAGE_TAG}" >> $GITHUB_ENV
# Test repo: ghcr.io/apache/spark-docker
echo "TEST_REPO=${TEST_REPO}" >> $GITHUB_ENV
# Image name: spark
echo "IMAGE_NAME=${IMAGE_NAME}" >> $GITHUB_ENV
# Image dockerfile path: 3.3.0/scala2.12-java11-python3-ubuntu
echo "IMAGE_PATH=${IMAGE_PATH}" >> $GITHUB_ENV
+ # Image URL:
ghcr.io/apache/spark-docker/spark:3.3.0-scala2.12-java11-python3-ubuntu
+ echo "IMAGE_URL=${IMAGE_URL}" >> $GITHUB_ENV
- name: Print Image tags
run: |
echo "UNIQUE_IMAGE_TAG: "${UNIQUE_IMAGE_TAG}
echo "TEST_REPO: "${TEST_REPO}
echo "IMAGE_NAME: "${IMAGE_NAME}
echo "IMAGE_PATH: "${IMAGE_PATH}
+ echo "IMAGE_URL: "${IMAGE_URL}
- - name: Build and push test image
+ - name: Build image
uses: docker/build-push-action@v2
with:
context: ${{ env.IMAGE_PATH }}
- tags: ${{ env.TEST_REPO }}/${{ env.IMAGE_NAME }}:${{
env.UNIQUE_IMAGE_TAG }}
+ tags: ${{ env.IMAGE_URL }}
platforms: linux/amd64,linux/arm64
- - name: Image digest
- run: echo ${{ steps.docker_build.outputs.digest }}
+ - name: Test - Checkout Spark repository
+ uses: actions/checkout@v2
+ with:
+ fetch-depth: 0
+ repository: apache/spark
+ ref: v${{ matrix.spark_version }}
+ path: ${{ github.workspace }}/spark
+
+ - name: Test - Cherry pick commits
+ # Apache Spark enable resource limited k8s IT since v3.3.1, cherrpick
patches for old release
+ # https://github.com/apache/spark/pull/36087#issuecomment-1251756266
+ if: matrix.spark_version == '3.3.0'
+ working-directory: ${{ github.workspace }}/spark
+ run: |
+ # SPARK-38802: Add driverRequestCores/executorRequestCores supported
+ #
https://github.com/apache/spark/commit/83963828b54bffe99527a004057272bc584cbc26
+ git -c user.name='Apache Spark Test Account' -c
user.email='[email protected]' cherry-pick
83963828b54bffe99527a004057272bc584cbc26
+ # SPARK-38803: Lower minio cpu to 250m
+ #
https://github.com/apache/spark/commit/5ea2b386eb866e20540660cdb6ed43792cb29969
+ git -c user.name='Apache Spark Test Account' -c
user.email='[email protected]' cherry-pick
5ea2b386eb866e20540660cdb6ed43792cb29969
+
+ - name: Test - Install Java ${{ inputs.java }}
+ uses: actions/setup-java@v1
+ with:
+ java-version: ${{ matrix.java_version }}
+
+ - name: Test - Cache Scala, SBT and Maven
+ uses: actions/cache@v2
Review Comment:
v3
##########
.github/workflows/main.yml:
##########
@@ -76,29 +69,136 @@ jobs:
IMAGE_NAME=spark
IMAGE_PATH=${{ matrix.spark_version }}/$TAG
UNIQUE_IMAGE_TAG=${{ matrix.spark_version }}-$TAG
+ IMAGE_URL=$TEST_REPO/$IMAGE_NAME:$UNIQUE_IMAGE_TAG
- # Unique image tag in each version: scala2.12-java11-python3-ubuntu
+ # Unique image tag in each version:
3.3.0-scala2.12-java11-python3-ubuntu
echo "UNIQUE_IMAGE_TAG=${UNIQUE_IMAGE_TAG}" >> $GITHUB_ENV
# Test repo: ghcr.io/apache/spark-docker
echo "TEST_REPO=${TEST_REPO}" >> $GITHUB_ENV
# Image name: spark
echo "IMAGE_NAME=${IMAGE_NAME}" >> $GITHUB_ENV
# Image dockerfile path: 3.3.0/scala2.12-java11-python3-ubuntu
echo "IMAGE_PATH=${IMAGE_PATH}" >> $GITHUB_ENV
+ # Image URL:
ghcr.io/apache/spark-docker/spark:3.3.0-scala2.12-java11-python3-ubuntu
+ echo "IMAGE_URL=${IMAGE_URL}" >> $GITHUB_ENV
- name: Print Image tags
run: |
echo "UNIQUE_IMAGE_TAG: "${UNIQUE_IMAGE_TAG}
echo "TEST_REPO: "${TEST_REPO}
echo "IMAGE_NAME: "${IMAGE_NAME}
echo "IMAGE_PATH: "${IMAGE_PATH}
+ echo "IMAGE_URL: "${IMAGE_URL}
- - name: Build and push test image
+ - name: Build image
uses: docker/build-push-action@v2
with:
context: ${{ env.IMAGE_PATH }}
- tags: ${{ env.TEST_REPO }}/${{ env.IMAGE_NAME }}:${{
env.UNIQUE_IMAGE_TAG }}
+ tags: ${{ env.IMAGE_URL }}
platforms: linux/amd64,linux/arm64
- - name: Image digest
- run: echo ${{ steps.docker_build.outputs.digest }}
+ - name: Test - Checkout Spark repository
+ uses: actions/checkout@v2
+ with:
+ fetch-depth: 0
+ repository: apache/spark
+ ref: v${{ matrix.spark_version }}
+ path: ${{ github.workspace }}/spark
+
+ - name: Test - Cherry pick commits
+ # Apache Spark enable resource limited k8s IT since v3.3.1, cherrpick
patches for old release
+ # https://github.com/apache/spark/pull/36087#issuecomment-1251756266
+ if: matrix.spark_version == '3.3.0'
+ working-directory: ${{ github.workspace }}/spark
+ run: |
+ # SPARK-38802: Add driverRequestCores/executorRequestCores supported
+ #
https://github.com/apache/spark/commit/83963828b54bffe99527a004057272bc584cbc26
+ git -c user.name='Apache Spark Test Account' -c
user.email='[email protected]' cherry-pick
83963828b54bffe99527a004057272bc584cbc26
+ # SPARK-38803: Lower minio cpu to 250m
+ #
https://github.com/apache/spark/commit/5ea2b386eb866e20540660cdb6ed43792cb29969
+ git -c user.name='Apache Spark Test Account' -c
user.email='[email protected]' cherry-pick
5ea2b386eb866e20540660cdb6ed43792cb29969
+
+ - name: Test - Install Java ${{ inputs.java }}
+ uses: actions/setup-java@v1
Review Comment:
v3 ?
##########
.github/workflows/main.yml:
##########
@@ -76,29 +69,136 @@ jobs:
IMAGE_NAME=spark
IMAGE_PATH=${{ matrix.spark_version }}/$TAG
UNIQUE_IMAGE_TAG=${{ matrix.spark_version }}-$TAG
+ IMAGE_URL=$TEST_REPO/$IMAGE_NAME:$UNIQUE_IMAGE_TAG
- # Unique image tag in each version: scala2.12-java11-python3-ubuntu
+ # Unique image tag in each version:
3.3.0-scala2.12-java11-python3-ubuntu
echo "UNIQUE_IMAGE_TAG=${UNIQUE_IMAGE_TAG}" >> $GITHUB_ENV
# Test repo: ghcr.io/apache/spark-docker
echo "TEST_REPO=${TEST_REPO}" >> $GITHUB_ENV
# Image name: spark
echo "IMAGE_NAME=${IMAGE_NAME}" >> $GITHUB_ENV
# Image dockerfile path: 3.3.0/scala2.12-java11-python3-ubuntu
echo "IMAGE_PATH=${IMAGE_PATH}" >> $GITHUB_ENV
+ # Image URL:
ghcr.io/apache/spark-docker/spark:3.3.0-scala2.12-java11-python3-ubuntu
+ echo "IMAGE_URL=${IMAGE_URL}" >> $GITHUB_ENV
- name: Print Image tags
run: |
echo "UNIQUE_IMAGE_TAG: "${UNIQUE_IMAGE_TAG}
echo "TEST_REPO: "${TEST_REPO}
echo "IMAGE_NAME: "${IMAGE_NAME}
echo "IMAGE_PATH: "${IMAGE_PATH}
+ echo "IMAGE_URL: "${IMAGE_URL}
- - name: Build and push test image
+ - name: Build image
uses: docker/build-push-action@v2
with:
context: ${{ env.IMAGE_PATH }}
- tags: ${{ env.TEST_REPO }}/${{ env.IMAGE_NAME }}:${{
env.UNIQUE_IMAGE_TAG }}
+ tags: ${{ env.IMAGE_URL }}
platforms: linux/amd64,linux/arm64
- - name: Image digest
- run: echo ${{ steps.docker_build.outputs.digest }}
+ - name: Test - Checkout Spark repository
+ uses: actions/checkout@v2
+ with:
+ fetch-depth: 0
+ repository: apache/spark
+ ref: v${{ matrix.spark_version }}
+ path: ${{ github.workspace }}/spark
+
+ - name: Test - Cherry pick commits
+ # Apache Spark enable resource limited k8s IT since v3.3.1, cherrpick
patches for old release
+ # https://github.com/apache/spark/pull/36087#issuecomment-1251756266
+ if: matrix.spark_version == '3.3.0'
Review Comment:
```suggestion
if: startsWith(matrix.spark_version, '3.3.')
```
to make it future proof ?!
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]