This is an automated email from the ASF dual-hosted git repository.

yangjie01 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 30ace9fc412e [SPARK-55309][BUILD] Upgrade protobuf to 33.5
30ace9fc412e is described below

commit 30ace9fc412e448a2252551cc24fff858eda80fd
Author: yangjie01 <[email protected]>
AuthorDate: Tue Feb 3 13:31:43 2026 +0800

    [SPARK-55309][BUILD] Upgrade protobuf to 33.5
    
    ### What changes were proposed in this pull request?
    This pr aims to upgrade protobuf from 33.0 to 33.5:
    - For Java, upgrading from version 4.33.0 to 4.33.5
    - For Python, upgrading from version 6.33.0 to 6.33.5
    
    ### Why are the changes needed?
    The new version brings  CVE fixes for CVE-2026-0994,  and the full release 
notes as follows:
    
    - https://github.com/protocolbuffers/protobuf/releases/tag/v33.5
    - https://github.com/protocolbuffers/protobuf/releases/tag/v33.4
    - https://github.com/protocolbuffers/protobuf/releases/tag/v33.3
    - https://github.com/protocolbuffers/protobuf/releases/tag/v33.2
    - https://github.com/protocolbuffers/protobuf/releases/tag/v33.1
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Pass Github Actions
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #54090 from LuciferYang/protobuf-33.5.
    
    Authored-by: yangjie01 <[email protected]>
    Signed-off-by: yangjie01 <[email protected]>
---
 .github/workflows/build_and_test.yml                    |  4 ++--
 .github/workflows/build_python_connect.yml              |  2 +-
 .github/workflows/build_python_connect40.yml            |  2 +-
 .github/workflows/maven_test.yml                        |  2 +-
 .github/workflows/pages.yml                             |  2 +-
 .github/workflows/python_hosted_runner_test.yml         |  2 +-
 dev/create-release/spark-rm/Dockerfile                  |  2 +-
 dev/infra/Dockerfile                                    |  2 +-
 dev/requirements.txt                                    |  2 +-
 dev/spark-test-image/docs/Dockerfile                    |  2 +-
 dev/spark-test-image/python-310/Dockerfile              |  2 +-
 dev/spark-test-image/python-311/Dockerfile              |  2 +-
 dev/spark-test-image/python-312-pandas-3/Dockerfile     |  2 +-
 dev/spark-test-image/python-312/Dockerfile              |  2 +-
 dev/spark-test-image/python-313/Dockerfile              |  2 +-
 dev/spark-test-image/python-314-nogil/Dockerfile        |  2 +-
 dev/spark-test-image/python-314/Dockerfile              |  2 +-
 pom.xml                                                 |  2 +-
 project/SparkBuild.scala                                |  2 +-
 python/pyspark/sql/connect/proto/base_pb2.py            |  4 ++--
 python/pyspark/sql/connect/proto/catalog_pb2.py         |  4 ++--
 python/pyspark/sql/connect/proto/commands_pb2.py        |  4 ++--
 python/pyspark/sql/connect/proto/common_pb2.py          |  4 ++--
 python/pyspark/sql/connect/proto/example_plugins_pb2.py |  4 ++--
 python/pyspark/sql/connect/proto/expressions_pb2.py     |  4 ++--
 python/pyspark/sql/connect/proto/ml_common_pb2.py       |  4 ++--
 python/pyspark/sql/connect/proto/ml_pb2.py              |  4 ++--
 python/pyspark/sql/connect/proto/pipelines_pb2.py       |  4 ++--
 python/pyspark/sql/connect/proto/relations_pb2.py       |  4 ++--
 python/pyspark/sql/connect/proto/types_pb2.py           |  4 ++--
 python/pyspark/sql/streaming/proto/StateMessage_pb2.py  |  4 ++--
 sql/connect/common/src/main/buf.gen.yaml                | 10 +++++-----
 sql/core/src/main/buf.gen.yaml                          |  2 +-
 33 files changed, 50 insertions(+), 50 deletions(-)

diff --git a/.github/workflows/build_and_test.yml 
b/.github/workflows/build_and_test.yml
index dffb75ab3450..4a0d4f358bec 100644
--- a/.github/workflows/build_and_test.yml
+++ b/.github/workflows/build_and_test.yml
@@ -368,7 +368,7 @@ jobs:
     - name: Install Python packages (Python 3.12)
       if: (contains(matrix.modules, 'sql') && !contains(matrix.modules, 
'sql-')) || contains(matrix.modules, 'connect') || contains(matrix.modules, 
'yarn')
       run: |
-        python3.12 -m pip install 'numpy>=1.22' pyarrow 'pandas==2.3.3' pyyaml 
scipy unittest-xml-reporting 'lxml==4.9.4' 'grpcio==1.76.0' 
'grpcio-status==1.76.0' 'protobuf==6.33.0' 'zstandard==0.25.0'
+        python3.12 -m pip install 'numpy>=1.22' pyarrow 'pandas==2.3.3' pyyaml 
scipy unittest-xml-reporting 'lxml==4.9.4' 'grpcio==1.76.0' 
'grpcio-status==1.76.0' 'protobuf==6.33.5' 'zstandard==0.25.0'
         python3.12 -m pip list
     # Run the tests.
     - name: Run tests
@@ -818,7 +818,7 @@ jobs:
         python-version: '3.12'
     - name: Install dependencies for Python CodeGen check
       run: |
-        python3.12 -m pip install 'black==23.12.1' 'protobuf==6.33.0' 
'mypy==1.8.0' 'mypy-protobuf==3.3.0'
+        python3.12 -m pip install 'black==23.12.1' 'protobuf==6.33.5' 
'mypy==1.8.0' 'mypy-protobuf==3.3.0'
         python3.12 -m pip list
     - name: Python CodeGen check for branch-3.5
       if: inputs.branch == 'branch-3.5'
diff --git a/.github/workflows/build_python_connect.yml 
b/.github/workflows/build_python_connect.yml
index fcd177f15f1f..631ec06a4e22 100644
--- a/.github/workflows/build_python_connect.yml
+++ b/.github/workflows/build_python_connect.yml
@@ -72,7 +72,7 @@ jobs:
           python packaging/client/setup.py sdist
           cd dist
           pip install pyspark*client-*.tar.gz
-          pip install 'grpcio==1.76.0' 'grpcio-status==1.76.0' 
'protobuf==6.33.0' 'googleapis-common-protos==1.71.0' 'graphviz==0.20.3' 
'six==1.16.0' 'pandas==2.3.3' scipy 'plotly<6.0.0' 'mlflow>=2.8.1' coverage 
matplotlib openpyxl 'memory-profiler>=0.61.0' 'scikit-learn>=1.3.2' 
'graphviz==0.20.3' 'torch<2.6.0' torchvision torcheval deepspeed 
unittest-xml-reporting 'zstandard==0.25.0'
+          pip install 'grpcio==1.76.0' 'grpcio-status==1.76.0' 
'protobuf==6.33.5' 'googleapis-common-protos==1.71.0' 'graphviz==0.20.3' 
'six==1.16.0' 'pandas==2.3.3' scipy 'plotly<6.0.0' 'mlflow>=2.8.1' coverage 
matplotlib openpyxl 'memory-profiler>=0.61.0' 'scikit-learn>=1.3.2' 
'graphviz==0.20.3' 'torch<2.6.0' torchvision torcheval deepspeed 
unittest-xml-reporting 'zstandard==0.25.0'
       - name: List Python packages
         run: python -m pip list
       - name: Run tests (local)
diff --git a/.github/workflows/build_python_connect40.yml 
b/.github/workflows/build_python_connect40.yml
index c4ffa08e6ccd..a9f9d0ecc168 100644
--- a/.github/workflows/build_python_connect40.yml
+++ b/.github/workflows/build_python_connect40.yml
@@ -71,7 +71,7 @@ jobs:
           pip install 'numpy' 'pyarrow>=18.0.0' 'pandas==2.2.3' scipy 
unittest-xml-reporting 'plotly<6.0.0' 'mlflow>=2.8.1' coverage 'matplotlib' 
openpyxl 'memory-profiler==0.61.0' 'scikit-learn>=1.3.2'
 
           # Add Python deps for Spark Connect.
-          pip install 'grpcio==1.76.0' 'grpcio-status==1.76.0' 
'protobuf==6.33.0' 'googleapis-common-protos==1.71.0' 'graphviz==0.20.3' 
'zstandard==0.25.0'
+          pip install 'grpcio==1.76.0' 'grpcio-status==1.76.0' 
'protobuf==6.33.5' 'googleapis-common-protos==1.71.0' 'graphviz==0.20.3' 
'zstandard==0.25.0'
 
           # Add torch as a testing dependency for TorchDistributor
           pip install 'torch==2.0.1' 'torchvision==0.15.2' torcheval
diff --git a/.github/workflows/maven_test.yml b/.github/workflows/maven_test.yml
index 31c783190004..25471790c315 100644
--- a/.github/workflows/maven_test.yml
+++ b/.github/workflows/maven_test.yml
@@ -181,7 +181,7 @@ jobs:
       - name: Install Python packages (Python 3.12)
         if: contains(matrix.modules, 'resource-managers#yarn') || 
(contains(matrix.modules, 'sql#core')) || contains(matrix.modules, 'connect')
         run: |
-          python3.12 -m pip install 'numpy>=1.22' pyarrow 'pandas==2.3.3' 
pyyaml scipy unittest-xml-reporting 'grpcio==1.76.0' 'grpcio-status==1.76.0' 
'protobuf==6.33.0' 'zstandard==0.25.0'
+          python3.12 -m pip install 'numpy>=1.22' pyarrow 'pandas==2.3.3' 
pyyaml scipy unittest-xml-reporting 'grpcio==1.76.0' 'grpcio-status==1.76.0' 
'protobuf==6.33.5' 'zstandard==0.25.0'
           python3.12 -m pip list
       # Run the tests using script command.
       # BSD's script command doesn't support -c option, and the usage is 
different from Linux's one.
diff --git a/.github/workflows/pages.yml b/.github/workflows/pages.yml
index 2bba3dcaf176..2b54c08ddfe4 100644
--- a/.github/workflows/pages.yml
+++ b/.github/workflows/pages.yml
@@ -63,7 +63,7 @@ jobs:
          pip install 'sphinx==4.5.0' mkdocs 'pydata_sphinx_theme>=0.13' 
sphinx-copybutton nbsphinx numpydoc jinja2 markupsafe 'pyzmq<24.0.0' \
             ipython ipython_genutils sphinx_plotly_directive 'numpy>=1.22' 
pyarrow 'pandas==2.3.3' 'plotly>=4.8' 'docutils<0.18.0' \
             'flake8==3.9.0' 'mypy==1.8.0' 'pytest==7.1.3' 
'pytest-mypy-plugins==1.9.3' 'black==23.12.1' \
-            'pandas-stubs==1.2.0.53' 'grpcio==1.76.0' 'grpcio-status==1.76.0' 
'protobuf==6.33.0' 'grpc-stubs==1.24.11' 
'googleapis-common-protos-stubs==2.2.0' \
+            'pandas-stubs==1.2.0.53' 'grpcio==1.76.0' 'grpcio-status==1.76.0' 
'protobuf==6.33.5' 'grpc-stubs==1.24.11' 
'googleapis-common-protos-stubs==2.2.0' \
             'sphinxcontrib-applehelp==1.0.4' 'sphinxcontrib-devhelp==1.0.2' 
'sphinxcontrib-htmlhelp==2.0.1' 'sphinxcontrib-qthelp==1.0.3' 
'sphinxcontrib-serializinghtml==1.1.5'
       - name: Install Ruby for documentation generation
         uses: ruby/setup-ruby@v1
diff --git a/.github/workflows/python_hosted_runner_test.yml 
b/.github/workflows/python_hosted_runner_test.yml
index b82c7447464f..49dbb9a1c941 100644
--- a/.github/workflows/python_hosted_runner_test.yml
+++ b/.github/workflows/python_hosted_runner_test.yml
@@ -153,7 +153,7 @@ jobs:
           python${{matrix.python}} -m pip install --ignore-installed 
'blinker>=1.6.2'
           python${{matrix.python}} -m pip install --ignore-installed 
'six==1.16.0'
           python${{matrix.python}} -m pip install numpy 'pyarrow>=22.0.0' 
'six==1.16.0' 'pandas==2.3.3' scipy 'plotly<6.0.0' 'mlflow>=2.8.1' coverage 
matplotlib openpyxl 'memory-profiler>=0.61.0' 'scikit-learn>=1.3.2' 
unittest-xml-reporting && \
-          python${{matrix.python}} -m pip install 'grpcio==1.76.0' 
'grpcio-status==1.76.0' 'protobuf==6.33.0' 'googleapis-common-protos==1.71.0' 
'zstandard==0.25.0' 'graphviz==0.20.3' && \
+          python${{matrix.python}} -m pip install 'grpcio==1.76.0' 
'grpcio-status==1.76.0' 'protobuf==6.33.5' 'googleapis-common-protos==1.71.0' 
'zstandard==0.25.0' 'graphviz==0.20.3' && \
           python${{matrix.python}} -m pip cache purge
       - name: List Python packages
         run: python${{matrix.python}} -m pip list
diff --git a/dev/create-release/spark-rm/Dockerfile 
b/dev/create-release/spark-rm/Dockerfile
index c5a6a7f44422..56d63a1d33fa 100644
--- a/dev/create-release/spark-rm/Dockerfile
+++ b/dev/create-release/spark-rm/Dockerfile
@@ -43,7 +43,7 @@ ARG BASIC_PIP_PKGS="numpy pyarrow>=18.0.0 six==1.16.0 
pandas==2.3.3 scipy plotly
     mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0 
scikit-learn>=1.3.2 twine==3.4.1"
 
 # Python deps for Spark Connect
-ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.0 \
+ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.5 \
     googleapis-common-protos==1.71.0 graphviz==0.20.3"
 
 # Install Python 3.10 packages
diff --git a/dev/infra/Dockerfile b/dev/infra/Dockerfile
index 655e93d9eecc..9945a681d0d8 100644
--- a/dev/infra/Dockerfile
+++ b/dev/infra/Dockerfile
@@ -97,7 +97,7 @@ RUN pypy3 -m pip install numpy 'six==1.16.0' 'pandas==2.3.3' 
scipy coverage matp
 
 ARG BASIC_PIP_PKGS="numpy pyarrow>=18.0.0 six==1.16.0 pandas==2.3.3 scipy 
plotly>=4.8 mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0 
scikit-learn>=1.3.2"
 # Python deps for Spark Connect
-ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.0 
googleapis-common-protos==1.71.0 graphviz==0.20.3"
+ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.5 
googleapis-common-protos==1.71.0 graphviz==0.20.3"
 
 # Install Python 3.10 packages
 RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10
diff --git a/dev/requirements.txt b/dev/requirements.txt
index 0cfbd650d9f0..840d104bd8ab 100644
--- a/dev/requirements.txt
+++ b/dev/requirements.txt
@@ -64,7 +64,7 @@ py
 grpcio>=1.76.0
 grpcio-status>=1.76.0
 googleapis-common-protos>=1.71.0
-protobuf==6.33.0
+protobuf==6.33.5
 zstandard>=0.25.0
 
 # Spark Connect python proto generation plugin (optional)
diff --git a/dev/spark-test-image/docs/Dockerfile 
b/dev/spark-test-image/docs/Dockerfile
index e268ea7a8351..5e04b17e4b7d 100644
--- a/dev/spark-test-image/docs/Dockerfile
+++ b/dev/spark-test-image/docs/Dockerfile
@@ -91,6 +91,6 @@ RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.11
 RUN python3.11 -m pip install 'sphinx==4.5.0' mkdocs 
'pydata_sphinx_theme>=0.13' sphinx-copybutton nbsphinx numpydoc jinja2 
markupsafe 'pyzmq<24.0.0' \
   ipython ipython_genutils sphinx_plotly_directive 'numpy>=1.22' pyarrow 
'pandas==2.3.3' 'plotly>=4.8' 'docutils<0.18.0' \
   'flake8==3.9.0' 'mypy==1.8.0' 'pytest==7.1.3' 'pytest-mypy-plugins==1.9.3' 
'black==23.12.1' \
-  'pandas-stubs==1.2.0.53' 'grpcio==1.76.0' 'grpcio-status==1.76.0' 
'protobuf==6.33.0' 'grpc-stubs==1.24.11' 
'googleapis-common-protos-stubs==2.2.0' \
+  'pandas-stubs==1.2.0.53' 'grpcio==1.76.0' 'grpcio-status==1.76.0' 
'protobuf==6.33.5' 'grpc-stubs==1.24.11' 
'googleapis-common-protos-stubs==2.2.0' \
   'sphinxcontrib-applehelp==1.0.4' 'sphinxcontrib-devhelp==1.0.2' 
'sphinxcontrib-htmlhelp==2.0.1' 'sphinxcontrib-qthelp==1.0.3' 
'sphinxcontrib-serializinghtml==1.1.5' \
   && python3.11 -m pip cache purge
diff --git a/dev/spark-test-image/python-310/Dockerfile 
b/dev/spark-test-image/python-310/Dockerfile
index 35882ccf842a..8db320a41355 100644
--- a/dev/spark-test-image/python-310/Dockerfile
+++ b/dev/spark-test-image/python-310/Dockerfile
@@ -58,7 +58,7 @@ RUN apt-get update && apt-get install -y \
 
 ARG BASIC_PIP_PKGS="numpy pyarrow>=22.0.0 six==1.16.0 pandas==2.3.3 scipy 
plotly<6.0.0 mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0 
scikit-learn>=1.3.2"
 # Python deps for Spark Connect
-ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.0 
googleapis-common-protos==1.71.0 zstandard==0.25.0 graphviz==0.20.3"
+ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.5 
googleapis-common-protos==1.71.0 zstandard==0.25.0 graphviz==0.20.3"
 
 # Install Python 3.10 packages
 RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10
diff --git a/dev/spark-test-image/python-311/Dockerfile 
b/dev/spark-test-image/python-311/Dockerfile
index 48b65b572062..4ec4e70498d0 100644
--- a/dev/spark-test-image/python-311/Dockerfile
+++ b/dev/spark-test-image/python-311/Dockerfile
@@ -57,7 +57,7 @@ RUN apt-get update && apt-get install -y \
 
 ARG BASIC_PIP_PKGS="numpy pyarrow>=22.0.0 six==1.16.0 pandas==2.3.3 scipy 
plotly<6.0.0 mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0 
scikit-learn>=1.3.2 pystack psutil"
 # Python deps for Spark Connect
-ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.0 
googleapis-common-protos==1.71.0 zstandard==0.25.0 graphviz==0.20.3"
+ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.5 
googleapis-common-protos==1.71.0 zstandard==0.25.0 graphviz==0.20.3"
 
 # Install Python 3.11 packages
 RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.11
diff --git a/dev/spark-test-image/python-312-pandas-3/Dockerfile 
b/dev/spark-test-image/python-312-pandas-3/Dockerfile
index 316596ee7486..6b2d61be529e 100644
--- a/dev/spark-test-image/python-312-pandas-3/Dockerfile
+++ b/dev/spark-test-image/python-312-pandas-3/Dockerfile
@@ -60,7 +60,7 @@ RUN apt-get update && apt-get install -y \
 # Note that mlflow is execluded since it requires pandas<3
 ARG BASIC_PIP_PKGS="numpy pyarrow>=22.0.0 six==1.16.0 pandas>=3 scipy 
plotly<6.0.0 coverage matplotlib openpyxl memory-profiler>=0.61.0 
scikit-learn>=1.3.2"
 # Python deps for Spark Connect
-ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.0 
googleapis-common-protos==1.71.0 zstandard==0.25.0 graphviz==0.20.3"
+ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.5 
googleapis-common-protos==1.71.0 zstandard==0.25.0 graphviz==0.20.3"
 
 # Install Python 3.12 packages
 RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.12
diff --git a/dev/spark-test-image/python-312/Dockerfile 
b/dev/spark-test-image/python-312/Dockerfile
index 4d15f5203124..eae01b72e054 100644
--- a/dev/spark-test-image/python-312/Dockerfile
+++ b/dev/spark-test-image/python-312/Dockerfile
@@ -57,7 +57,7 @@ RUN apt-get update && apt-get install -y \
 
 ARG BASIC_PIP_PKGS="numpy pyarrow>=22.0.0 six==1.16.0 pandas==2.3.3 scipy 
plotly<6.0.0 mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0 
scikit-learn>=1.3.2"
 # Python deps for Spark Connect
-ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.0 
googleapis-common-protos==1.71.0 zstandard==0.25.0 graphviz==0.20.3"
+ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.5 
googleapis-common-protos==1.71.0 zstandard==0.25.0 graphviz==0.20.3"
 
 # Install Python 3.12 packages
 RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.12
diff --git a/dev/spark-test-image/python-313/Dockerfile 
b/dev/spark-test-image/python-313/Dockerfile
index 4bf2dee2e620..0280d9cbeaa8 100644
--- a/dev/spark-test-image/python-313/Dockerfile
+++ b/dev/spark-test-image/python-313/Dockerfile
@@ -57,7 +57,7 @@ RUN apt-get update && apt-get install -y \
 
 ARG BASIC_PIP_PKGS="numpy pyarrow>=22.0.0 six==1.16.0 pandas==2.3.3 scipy 
plotly<6.0.0 mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0 
scikit-learn>=1.3.2"
 # Python deps for Spark Connect
-ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.0 
googleapis-common-protos==1.71.0 zstandard==0.25.0 graphviz==0.20.3"
+ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.5 
googleapis-common-protos==1.71.0 zstandard==0.25.0 graphviz==0.20.3"
 
 # Install Python 3.13 packages
 RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.13
diff --git a/dev/spark-test-image/python-314-nogil/Dockerfile 
b/dev/spark-test-image/python-314-nogil/Dockerfile
index 1145e1260e5c..b745557fb496 100644
--- a/dev/spark-test-image/python-314-nogil/Dockerfile
+++ b/dev/spark-test-image/python-314-nogil/Dockerfile
@@ -56,7 +56,7 @@ RUN apt-get update && apt-get install -y \
 
 
 ARG BASIC_PIP_PKGS="numpy pyarrow>=22.0.0 six==1.16.0 pandas==2.3.3 scipy 
plotly<6.0.0 mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0 
scikit-learn>=1.3.2"
-ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.0 
googleapis-common-protos==1.71.0 zstandard==0.25.0 graphviz==0.20.3"
+ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.5 
googleapis-common-protos==1.71.0 zstandard==0.25.0 graphviz==0.20.3"
 
 
 # Install Python 3.14 packages
diff --git a/dev/spark-test-image/python-314/Dockerfile 
b/dev/spark-test-image/python-314/Dockerfile
index 8644df708ec6..df2a73b12fb8 100644
--- a/dev/spark-test-image/python-314/Dockerfile
+++ b/dev/spark-test-image/python-314/Dockerfile
@@ -57,7 +57,7 @@ RUN apt-get update && apt-get install -y \
 
 ARG BASIC_PIP_PKGS="numpy pyarrow>=22.0.0 six==1.16.0 pandas==2.3.3 scipy 
plotly<6.0.0 mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0 
scikit-learn>=1.3.2"
 # Python deps for Spark Connect
-ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.0 
googleapis-common-protos==1.71.0 zstandard==0.25.0 graphviz==0.20.3"
+ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.5 
googleapis-common-protos==1.71.0 zstandard==0.25.0 graphviz==0.20.3"
 
 # Install Python 3.14 packages
 RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.14
diff --git a/pom.xml b/pom.xml
index 69474c1ee1c9..388f8ba377cb 100644
--- a/pom.xml
+++ b/pom.xml
@@ -129,7 +129,7 @@
     <!-- make sure to update IsolatedClientLoader whenever this version is 
changed -->
     <hadoop.version>3.4.2</hadoop.version>
     <!-- SPARK-41247: When updating `protobuf.version`, also need to update 
`protoVersion` in `SparkBuild.scala` -->
-    <protobuf.version>4.33.0</protobuf.version>
+    <protobuf.version>4.33.5</protobuf.version>
     <protoc-jar-maven-plugin.version>3.11.4</protoc-jar-maven-plugin.version>
     <zookeeper.version>3.9.4</zookeeper.version>
     <curator.version>5.9.0</curator.version>
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 02ebdce375cc..912d959afa66 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -91,7 +91,7 @@ object BuildCommons {
 
   // Google Protobuf version used for generating the protobuf.
   // SPARK-41247: needs to be consistent with `protobuf.version` in `pom.xml`.
-  val protoVersion = "4.33.0"
+  val protoVersion = "4.33.5"
 }
 
 object SparkBuild extends PomBuild {
diff --git a/python/pyspark/sql/connect/proto/base_pb2.py 
b/python/pyspark/sql/connect/proto/base_pb2.py
index 32b2840dffad..08950134babb 100644
--- a/python/pyspark/sql/connect/proto/base_pb2.py
+++ b/python/pyspark/sql/connect/proto/base_pb2.py
@@ -18,7 +18,7 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # NO CHECKED-IN PROTOBUF GENCODE
 # source: spark/connect/base.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 6.33.5
 """Generated protocol buffer code."""
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as 
_symbol_database
 from google.protobuf.internal import builder as _builder
 
 _runtime_version.ValidateProtobufRuntimeVersion(
-    _runtime_version.Domain.PUBLIC, 6, 33, 0, "", "spark/connect/base.proto"
+    _runtime_version.Domain.PUBLIC, 6, 33, 5, "", "spark/connect/base.proto"
 )
 # @@protoc_insertion_point(imports)
 
diff --git a/python/pyspark/sql/connect/proto/catalog_pb2.py 
b/python/pyspark/sql/connect/proto/catalog_pb2.py
index 054b367bd3b3..f99c6d18cda1 100644
--- a/python/pyspark/sql/connect/proto/catalog_pb2.py
+++ b/python/pyspark/sql/connect/proto/catalog_pb2.py
@@ -18,7 +18,7 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # NO CHECKED-IN PROTOBUF GENCODE
 # source: spark/connect/catalog.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 6.33.5
 """Generated protocol buffer code."""
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as 
_symbol_database
 from google.protobuf.internal import builder as _builder
 
 _runtime_version.ValidateProtobufRuntimeVersion(
-    _runtime_version.Domain.PUBLIC, 6, 33, 0, "", "spark/connect/catalog.proto"
+    _runtime_version.Domain.PUBLIC, 6, 33, 5, "", "spark/connect/catalog.proto"
 )
 # @@protoc_insertion_point(imports)
 
diff --git a/python/pyspark/sql/connect/proto/commands_pb2.py 
b/python/pyspark/sql/connect/proto/commands_pb2.py
index 53c192fda6e2..53a15ae9c30a 100644
--- a/python/pyspark/sql/connect/proto/commands_pb2.py
+++ b/python/pyspark/sql/connect/proto/commands_pb2.py
@@ -18,7 +18,7 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # NO CHECKED-IN PROTOBUF GENCODE
 # source: spark/connect/commands.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 6.33.5
 """Generated protocol buffer code."""
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as 
_symbol_database
 from google.protobuf.internal import builder as _builder
 
 _runtime_version.ValidateProtobufRuntimeVersion(
-    _runtime_version.Domain.PUBLIC, 6, 33, 0, "", 
"spark/connect/commands.proto"
+    _runtime_version.Domain.PUBLIC, 6, 33, 5, "", 
"spark/connect/commands.proto"
 )
 # @@protoc_insertion_point(imports)
 
diff --git a/python/pyspark/sql/connect/proto/common_pb2.py 
b/python/pyspark/sql/connect/proto/common_pb2.py
index 8abd8fa6dc04..9868a198e685 100644
--- a/python/pyspark/sql/connect/proto/common_pb2.py
+++ b/python/pyspark/sql/connect/proto/common_pb2.py
@@ -18,7 +18,7 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # NO CHECKED-IN PROTOBUF GENCODE
 # source: spark/connect/common.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 6.33.5
 """Generated protocol buffer code."""
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as 
_symbol_database
 from google.protobuf.internal import builder as _builder
 
 _runtime_version.ValidateProtobufRuntimeVersion(
-    _runtime_version.Domain.PUBLIC, 6, 33, 0, "", "spark/connect/common.proto"
+    _runtime_version.Domain.PUBLIC, 6, 33, 5, "", "spark/connect/common.proto"
 )
 # @@protoc_insertion_point(imports)
 
diff --git a/python/pyspark/sql/connect/proto/example_plugins_pb2.py 
b/python/pyspark/sql/connect/proto/example_plugins_pb2.py
index 423768ee63d6..47041d38ae46 100644
--- a/python/pyspark/sql/connect/proto/example_plugins_pb2.py
+++ b/python/pyspark/sql/connect/proto/example_plugins_pb2.py
@@ -18,7 +18,7 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # NO CHECKED-IN PROTOBUF GENCODE
 # source: spark/connect/example_plugins.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 6.33.5
 """Generated protocol buffer code."""
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as 
_symbol_database
 from google.protobuf.internal import builder as _builder
 
 _runtime_version.ValidateProtobufRuntimeVersion(
-    _runtime_version.Domain.PUBLIC, 6, 33, 0, "", 
"spark/connect/example_plugins.proto"
+    _runtime_version.Domain.PUBLIC, 6, 33, 5, "", 
"spark/connect/example_plugins.proto"
 )
 # @@protoc_insertion_point(imports)
 
diff --git a/python/pyspark/sql/connect/proto/expressions_pb2.py 
b/python/pyspark/sql/connect/proto/expressions_pb2.py
index 0c466aeb67a0..2aed56b108de 100644
--- a/python/pyspark/sql/connect/proto/expressions_pb2.py
+++ b/python/pyspark/sql/connect/proto/expressions_pb2.py
@@ -18,7 +18,7 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # NO CHECKED-IN PROTOBUF GENCODE
 # source: spark/connect/expressions.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 6.33.5
 """Generated protocol buffer code."""
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as 
_symbol_database
 from google.protobuf.internal import builder as _builder
 
 _runtime_version.ValidateProtobufRuntimeVersion(
-    _runtime_version.Domain.PUBLIC, 6, 33, 0, "", 
"spark/connect/expressions.proto"
+    _runtime_version.Domain.PUBLIC, 6, 33, 5, "", 
"spark/connect/expressions.proto"
 )
 # @@protoc_insertion_point(imports)
 
diff --git a/python/pyspark/sql/connect/proto/ml_common_pb2.py 
b/python/pyspark/sql/connect/proto/ml_common_pb2.py
index de547fc2a102..136f7c21d595 100644
--- a/python/pyspark/sql/connect/proto/ml_common_pb2.py
+++ b/python/pyspark/sql/connect/proto/ml_common_pb2.py
@@ -18,7 +18,7 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # NO CHECKED-IN PROTOBUF GENCODE
 # source: spark/connect/ml_common.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 6.33.5
 """Generated protocol buffer code."""
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as 
_symbol_database
 from google.protobuf.internal import builder as _builder
 
 _runtime_version.ValidateProtobufRuntimeVersion(
-    _runtime_version.Domain.PUBLIC, 6, 33, 0, "", 
"spark/connect/ml_common.proto"
+    _runtime_version.Domain.PUBLIC, 6, 33, 5, "", 
"spark/connect/ml_common.proto"
 )
 # @@protoc_insertion_point(imports)
 
diff --git a/python/pyspark/sql/connect/proto/ml_pb2.py 
b/python/pyspark/sql/connect/proto/ml_pb2.py
index 3bd141815c8e..48b6fde046fd 100644
--- a/python/pyspark/sql/connect/proto/ml_pb2.py
+++ b/python/pyspark/sql/connect/proto/ml_pb2.py
@@ -18,7 +18,7 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # NO CHECKED-IN PROTOBUF GENCODE
 # source: spark/connect/ml.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 6.33.5
 """Generated protocol buffer code."""
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as 
_symbol_database
 from google.protobuf.internal import builder as _builder
 
 _runtime_version.ValidateProtobufRuntimeVersion(
-    _runtime_version.Domain.PUBLIC, 6, 33, 0, "", "spark/connect/ml.proto"
+    _runtime_version.Domain.PUBLIC, 6, 33, 5, "", "spark/connect/ml.proto"
 )
 # @@protoc_insertion_point(imports)
 
diff --git a/python/pyspark/sql/connect/proto/pipelines_pb2.py 
b/python/pyspark/sql/connect/proto/pipelines_pb2.py
index 7a30def861d2..ddd60cf1ac7c 100644
--- a/python/pyspark/sql/connect/proto/pipelines_pb2.py
+++ b/python/pyspark/sql/connect/proto/pipelines_pb2.py
@@ -18,7 +18,7 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # NO CHECKED-IN PROTOBUF GENCODE
 # source: spark/connect/pipelines.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 6.33.5
 """Generated protocol buffer code."""
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as 
_symbol_database
 from google.protobuf.internal import builder as _builder
 
 _runtime_version.ValidateProtobufRuntimeVersion(
-    _runtime_version.Domain.PUBLIC, 6, 33, 0, "", 
"spark/connect/pipelines.proto"
+    _runtime_version.Domain.PUBLIC, 6, 33, 5, "", 
"spark/connect/pipelines.proto"
 )
 # @@protoc_insertion_point(imports)
 
diff --git a/python/pyspark/sql/connect/proto/relations_pb2.py 
b/python/pyspark/sql/connect/proto/relations_pb2.py
index 79d834033634..835587c3333d 100644
--- a/python/pyspark/sql/connect/proto/relations_pb2.py
+++ b/python/pyspark/sql/connect/proto/relations_pb2.py
@@ -18,7 +18,7 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # NO CHECKED-IN PROTOBUF GENCODE
 # source: spark/connect/relations.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 6.33.5
 """Generated protocol buffer code."""
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as 
_symbol_database
 from google.protobuf.internal import builder as _builder
 
 _runtime_version.ValidateProtobufRuntimeVersion(
-    _runtime_version.Domain.PUBLIC, 6, 33, 0, "", 
"spark/connect/relations.proto"
+    _runtime_version.Domain.PUBLIC, 6, 33, 5, "", 
"spark/connect/relations.proto"
 )
 # @@protoc_insertion_point(imports)
 
diff --git a/python/pyspark/sql/connect/proto/types_pb2.py 
b/python/pyspark/sql/connect/proto/types_pb2.py
index fc5b14d068a8..8fda65044262 100644
--- a/python/pyspark/sql/connect/proto/types_pb2.py
+++ b/python/pyspark/sql/connect/proto/types_pb2.py
@@ -18,7 +18,7 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # NO CHECKED-IN PROTOBUF GENCODE
 # source: spark/connect/types.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 6.33.5
 """Generated protocol buffer code."""
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import descriptor_pool as _descriptor_pool
@@ -27,7 +27,7 @@ from google.protobuf import symbol_database as 
_symbol_database
 from google.protobuf.internal import builder as _builder
 
 _runtime_version.ValidateProtobufRuntimeVersion(
-    _runtime_version.Domain.PUBLIC, 6, 33, 0, "", "spark/connect/types.proto"
+    _runtime_version.Domain.PUBLIC, 6, 33, 5, "", "spark/connect/types.proto"
 )
 # @@protoc_insertion_point(imports)
 
diff --git a/python/pyspark/sql/streaming/proto/StateMessage_pb2.py 
b/python/pyspark/sql/streaming/proto/StateMessage_pb2.py
index 1305a6213c13..4090febe34d1 100644
--- a/python/pyspark/sql/streaming/proto/StateMessage_pb2.py
+++ b/python/pyspark/sql/streaming/proto/StateMessage_pb2.py
@@ -18,7 +18,7 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # NO CHECKED-IN PROTOBUF GENCODE
 # source: org/apache/spark/sql/execution/streaming/StateMessage.proto
-# Protobuf Python Version: 6.33.0
+# Protobuf Python Version: 6.33.5
 """Generated protocol buffer code."""
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import descriptor_pool as _descriptor_pool
@@ -30,7 +30,7 @@ _runtime_version.ValidateProtobufRuntimeVersion(
     _runtime_version.Domain.PUBLIC,
     6,
     33,
-    0,
+    5,
     "",
     "org/apache/spark/sql/execution/streaming/StateMessage.proto",
 )
diff --git a/sql/connect/common/src/main/buf.gen.yaml 
b/sql/connect/common/src/main/buf.gen.yaml
index beaa7f1949e2..fd3aedfbf021 100644
--- a/sql/connect/common/src/main/buf.gen.yaml
+++ b/sql/connect/common/src/main/buf.gen.yaml
@@ -16,18 +16,18 @@
 #
 version: v1
 plugins:
-  - plugin: buf.build/protocolbuffers/cpp:v33.0
+  - plugin: buf.build/protocolbuffers/cpp:v33.5
     out: gen/proto/cpp
-  - plugin: buf.build/protocolbuffers/csharp:v33.0
+  - plugin: buf.build/protocolbuffers/csharp:v33.5
     out: gen/proto/csharp
-  - plugin: buf.build/protocolbuffers/java:v33.0
+  - plugin: buf.build/protocolbuffers/java:v33.5
     out: gen/proto/java
   - plugin: buf.build/grpc/ruby:v1.76.0
     out: gen/proto/ruby
-  - plugin: buf.build/protocolbuffers/ruby:v33.0
+  - plugin: buf.build/protocolbuffers/ruby:v33.5
     out: gen/proto/ruby
    # Building the Python build and building the mypy interfaces.
-  - plugin: buf.build/protocolbuffers/python:v33.0
+  - plugin: buf.build/protocolbuffers/python:v33.5
     out: gen/proto/python
   - plugin: buf.build/grpc/python:v1.76.0
     out: gen/proto/python
diff --git a/sql/core/src/main/buf.gen.yaml b/sql/core/src/main/buf.gen.yaml
index 5f87a840c6a4..dba2183b1190 100644
--- a/sql/core/src/main/buf.gen.yaml
+++ b/sql/core/src/main/buf.gen.yaml
@@ -17,7 +17,7 @@
 version: v1
 plugins:
    # Building the Python build and building the mypy interfaces.
-  - plugin: buf.build/protocolbuffers/python:v33.0
+  - plugin: buf.build/protocolbuffers/python:v33.5
     out: gen/proto/python
   - name: mypy
     out: gen/proto/python


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]


Reply via email to