This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new f979bc8d37d2 [SPARK-50554][INFRA] Add a separate docker file for 
Python 3.11 daily coverage build
f979bc8d37d2 is described below

commit f979bc8d37d241d65986dfcf2105db481fef6eef
Author: Ruifeng Zheng <[email protected]>
AuthorDate: Thu Dec 12 10:07:48 2024 -0800

    [SPARK-50554][INFRA] Add a separate docker file for Python 3.11 daily 
coverage build
    
    ### What changes were proposed in this pull request?
    1, Add a separate docker file for Python 3.11;
    2, use it in daily coverage build; (will use it for other jobs later)
    
    ### Why are the changes needed?
    to isolate the environments
    
    ### Does this PR introduce _any_ user-facing change?
    no, infra-only
    
    ### How was this patch tested?
    PR builder with:
    ```
    default: '{"PYSPARK_IMAGE_TO_TEST": "python-311", "PYTHON_TO_TEST": 
"python3.11", "PYSPARK_CODECOV": "true"}'
    ```
    
    https://github.com/zhengruifeng/spark/runs/34308729714
    
    ### Was this patch authored or co-authored using generative AI tooling?
    no
    
    Closes #49165 from zhengruifeng/py_image_311.
    
    Authored-by: Ruifeng Zheng <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .github/workflows/build_coverage.yml           |  1 +
 .github/workflows/build_infra_images_cache.yml | 14 +++++
 dev/spark-test-image/python-311/Dockerfile     | 80 ++++++++++++++++++++++++++
 3 files changed, 95 insertions(+)

diff --git a/.github/workflows/build_coverage.yml 
b/.github/workflows/build_coverage.yml
index 64f65bd777a0..8da65927f37f 100644
--- a/.github/workflows/build_coverage.yml
+++ b/.github/workflows/build_coverage.yml
@@ -36,6 +36,7 @@ jobs:
       hadoop: hadoop3
       envs: >-
         {
+          "PYSPARK_IMAGE_TO_TEST": "python-311",
           "PYTHON_TO_TEST": "python3.11",
           "PYSPARK_CODECOV": "true"
         }
diff --git a/.github/workflows/build_infra_images_cache.yml 
b/.github/workflows/build_infra_images_cache.yml
index df927da57c24..cd57e070e21b 100644
--- a/.github/workflows/build_infra_images_cache.yml
+++ b/.github/workflows/build_infra_images_cache.yml
@@ -33,6 +33,7 @@ on:
     - 'dev/spark-test-image/pypy-310/Dockerfile'
     - 'dev/spark-test-image/python-309/Dockerfile'
     - 'dev/spark-test-image/python-310/Dockerfile'
+    - 'dev/spark-test-image/python-311/Dockerfile'
     - 'dev/spark-test-image/python-312/Dockerfile'
     - 'dev/spark-test-image/python-313/Dockerfile'
     - '.github/workflows/build_infra_images_cache.yml'
@@ -146,6 +147,19 @@ jobs:
       - name: Image digest (PySpark with Python 3.10)
         if: hashFiles('dev/spark-test-image/python-310/Dockerfile') != ''
         run: echo ${{ steps.docker_build_pyspark_python_310.outputs.digest }}
+      - name: Build and push (PySpark with Python 3.11)
+        if: hashFiles('dev/spark-test-image/python-311/Dockerfile') != ''
+        id: docker_build_pyspark_python_311
+        uses: docker/build-push-action@v6
+        with:
+          context: ./dev/spark-test-image/python-311/
+          push: true
+          tags: 
ghcr.io/apache/spark/apache-spark-github-action-image-pyspark-python-311-cache:${{
 github.ref_name }}-static
+          cache-from: 
type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-pyspark-python-311-cache:${{
 github.ref_name }}
+          cache-to: 
type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-pyspark-python-311-cache:${{
 github.ref_name }},mode=max
+      - name: Image digest (PySpark with Python 3.11)
+        if: hashFiles('dev/spark-test-image/python-311/Dockerfile') != ''
+        run: echo ${{ steps.docker_build_pyspark_python_311.outputs.digest }}
       - name: Build and push (PySpark with Python 3.12)
         if: hashFiles('dev/spark-test-image/python-312/Dockerfile') != ''
         id: docker_build_pyspark_python_312
diff --git a/dev/spark-test-image/python-311/Dockerfile 
b/dev/spark-test-image/python-311/Dockerfile
new file mode 100644
index 000000000000..6287caca648f
--- /dev/null
+++ b/dev/spark-test-image/python-311/Dockerfile
@@ -0,0 +1,80 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Image for building and testing Spark branches. Based on Ubuntu 22.04.
+# See also in https://hub.docker.com/_/ubuntu
+FROM ubuntu:jammy-20240911.1
+LABEL org.opencontainers.image.authors="Apache Spark project 
<[email protected]>"
+LABEL org.opencontainers.image.licenses="Apache-2.0"
+LABEL org.opencontainers.image.ref.name="Apache Spark Infra Image For PySpark 
with Python 3.11"
+# Overwrite this label to avoid exposing the underlying Ubuntu OS version label
+LABEL org.opencontainers.image.version=""
+
+ENV FULL_REFRESH_DATE 20241212
+
+ENV DEBIAN_FRONTEND noninteractive
+ENV DEBCONF_NONINTERACTIVE_SEEN true
+
+RUN apt-get update && apt-get install -y \
+    build-essential \
+    ca-certificates \
+    curl \
+    gfortran \
+    git \
+    gnupg \
+    libcurl4-openssl-dev \
+    libfontconfig1-dev \
+    libfreetype6-dev \
+    libfribidi-dev \
+    libgit2-dev \
+    libharfbuzz-dev \
+    libjpeg-dev \
+    liblapack-dev \
+    libopenblas-dev \
+    libpng-dev \
+    libpython3-dev \
+    libssl-dev \
+    libtiff5-dev \
+    libxml2-dev \
+    openjdk-17-jdk-headless \
+    pkg-config \
+    qpdf \
+    tzdata \
+    software-properties-common \
+    wget \
+    zlib1g-dev
+
+# Install Python 3.11
+RUN add-apt-repository ppa:deadsnakes/ppa
+RUN apt-get update && apt-get install -y \
+    python3.11 \
+    && apt-get autoremove --purge -y \
+    && apt-get clean \
+    && rm -rf /var/lib/apt/lists/*
+
+
+ARG BASIC_PIP_PKGS="numpy pyarrow>=18.0.0 six==1.16.0 pandas==2.2.3 scipy 
plotly>=4.8 mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0 
scikit-learn>=1.3.2"
+# Python deps for Spark Connect
+ARG CONNECT_PIP_PKGS="grpcio==1.67.0 grpcio-status==1.67.0 protobuf==5.28.3 
googleapis-common-protos==1.65.0 graphviz==0.20.3"
+
+# Install Python 3.11 packages
+RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.11
+RUN python3.11 -m pip install --ignore-installed blinker>=1.6.2 # mlflow needs 
this
+RUN python3.11 -m pip install $BASIC_PIP_PKGS unittest-xml-reporting 
$CONNECT_PIP_PKGS && \
+    python3.11 -m pip install torch torchvision --index-url 
https://download.pytorch.org/whl/cpu && \
+    python3.11 -m pip install deepspeed torcheval && \
+    python3.11 -m pip cache purge


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to