This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new cb84939e02ff [SPARK-50596][PYTHON] Upgrade Py4J from 0.10.9.7 to 
0.10.9.8
cb84939e02ff is described below

commit cb84939e02ffdbe19830ff67216348470b0bfa6b
Author: Hyukjin Kwon <[email protected]>
AuthorDate: Wed Dec 18 16:41:05 2024 +0900

    [SPARK-50596][PYTHON] Upgrade Py4J from 0.10.9.7 to 0.10.9.8
    
    ### What changes were proposed in this pull request?
    
    This PR aim to upgrade Py4J 0.10.9.8, with relevant changes.
    
    ### Why are the changes needed?
    
    Py4J 0.10.9.8 has several bug fixes especially 
https://github.com/py4j/py4j/pull/538
    
    Release notes: https://www.py4j.org/changelog.html
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Existing test cases
    
    Closes #49214 from HyukjinKwon/SPARK-50596.
    
    Authored-by: Hyukjin Kwon <[email protected]>
    Signed-off-by: Hyukjin Kwon <[email protected]>
---
 .github/workflows/build_python_connect.yml              |   4 ++--
 .github/workflows/build_python_connect35.yml            |   2 +-
 bin/pyspark                                             |   2 +-
 bin/pyspark2.cmd                                        |   2 +-
 core/pom.xml                                            |   2 +-
 .../scala/org/apache/spark/api/python/PythonUtils.scala |   2 +-
 dev/deps/spark-deps-hadoop-3-hive-2.3                   |   2 +-
 python/docs/Makefile                                    |   2 +-
 python/docs/make2.bat                                   |   2 +-
 python/docs/source/getting_started/install.rst          |   2 +-
 python/lib/py4j-0.10.9.7-src.zip                        | Bin 42424 -> 0 bytes
 python/lib/py4j-0.10.9.8-src.zip                        | Bin 0 -> 42908 bytes
 python/packaging/classic/setup.py                       |   2 +-
 sbin/spark-config.sh                                    |   2 +-
 14 files changed, 13 insertions(+), 13 deletions(-)

diff --git a/.github/workflows/build_python_connect.yml 
b/.github/workflows/build_python_connect.yml
index 471ad31279da..b15e15abed1c 100644
--- a/.github/workflows/build_python_connect.yml
+++ b/.github/workflows/build_python_connect.yml
@@ -82,7 +82,7 @@ jobs:
           sed -i 's/rootLogger.level = info/rootLogger.level = warn/g' 
conf/log4j2.properties
 
           # Start a Spark Connect server for local
-          
PYTHONPATH="python/lib/pyspark.zip:python/lib/py4j-0.10.9.7-src.zip:$PYTHONPATH"
 ./sbin/start-connect-server.sh \
+          
PYTHONPATH="python/lib/pyspark.zip:python/lib/py4j-0.10.9.8-src.zip:$PYTHONPATH"
 ./sbin/start-connect-server.sh \
             --driver-java-options 
"-Dlog4j.configurationFile=file:$GITHUB_WORKSPACE/conf/log4j2.properties" \
             --jars "`find connector/protobuf/target -name 
spark-protobuf-*SNAPSHOT.jar`,`find connector/avro/target -name 
spark-avro*SNAPSHOT.jar`"
 
@@ -101,7 +101,7 @@ jobs:
           mv pyspark.back python/pyspark
 
           # Start a Spark Connect server for local-cluster
-          
PYTHONPATH="python/lib/pyspark.zip:python/lib/py4j-0.10.9.7-src.zip:$PYTHONPATH"
 ./sbin/start-connect-server.sh \
+          
PYTHONPATH="python/lib/pyspark.zip:python/lib/py4j-0.10.9.8-src.zip:$PYTHONPATH"
 ./sbin/start-connect-server.sh \
             --master "local-cluster[2, 4, 1024]" \
             --driver-java-options 
"-Dlog4j.configurationFile=file:$GITHUB_WORKSPACE/conf/log4j2.properties" \
             --jars "`find connector/protobuf/target -name 
spark-protobuf-*SNAPSHOT.jar`,`find connector/avro/target -name 
spark-avro*SNAPSHOT.jar`"
diff --git a/.github/workflows/build_python_connect35.yml 
b/.github/workflows/build_python_connect35.yml
index b292553f9909..080932c6c09c 100644
--- a/.github/workflows/build_python_connect35.yml
+++ b/.github/workflows/build_python_connect35.yml
@@ -85,7 +85,7 @@ jobs:
           sed -i 's/rootLogger.level = info/rootLogger.level = warn/g' 
conf/log4j2.properties
 
           # Start a Spark Connect server for local
-          
PYTHONPATH="python/lib/pyspark.zip:python/lib/py4j-0.10.9.7-src.zip:$PYTHONPATH"
 ./sbin/start-connect-server.sh \
+          
PYTHONPATH="python/lib/pyspark.zip:python/lib/py4j-0.10.9.8-src.zip:$PYTHONPATH"
 ./sbin/start-connect-server.sh \
             --driver-java-options 
"-Dlog4j.configurationFile=file:$GITHUB_WORKSPACE/conf/log4j2.properties" \
             --jars "`find connector/protobuf/target -name 
spark-protobuf-*SNAPSHOT.jar`,`find connector/avro/target -name 
spark-avro*SNAPSHOT.jar`"
 
diff --git a/bin/pyspark b/bin/pyspark
index 2f08f7836915..d719875b3659 100755
--- a/bin/pyspark
+++ b/bin/pyspark
@@ -77,7 +77,7 @@ fi
 
 # Add the PySpark classes to the Python path:
 export PYTHONPATH="${SPARK_HOME}/python/:$PYTHONPATH"
-export PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.10.9.7-src.zip:$PYTHONPATH"
+export PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.10.9.8-src.zip:$PYTHONPATH"
 
 # Load the PySpark shell.py script when ./pyspark is used interactively:
 export OLD_PYTHONSTARTUP="$PYTHONSTARTUP"
diff --git a/bin/pyspark2.cmd b/bin/pyspark2.cmd
index 232813b4ffdd..97a440697766 100644
--- a/bin/pyspark2.cmd
+++ b/bin/pyspark2.cmd
@@ -30,7 +30,7 @@ if "x%PYSPARK_DRIVER_PYTHON%"=="x" (
 )
 
 set PYTHONPATH=%SPARK_HOME%\python;%PYTHONPATH%
-set PYTHONPATH=%SPARK_HOME%\python\lib\py4j-0.10.9.7-src.zip;%PYTHONPATH%
+set PYTHONPATH=%SPARK_HOME%\python\lib\py4j-0.10.9.8-src.zip;%PYTHONPATH%
 
 set OLD_PYTHONSTARTUP=%PYTHONSTARTUP%
 set PYTHONSTARTUP=%SPARK_HOME%\python\pyspark\shell.py
diff --git a/core/pom.xml b/core/pom.xml
index 7805a3f37ae5..5bc007fa068a 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -426,7 +426,7 @@
     <dependency>
       <groupId>net.sf.py4j</groupId>
       <artifactId>py4j</artifactId>
-      <version>0.10.9.7</version>
+      <version>0.10.9.8</version>
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala 
b/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala
index 045ed0e4c01c..9a944a44f655 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala
@@ -34,7 +34,7 @@ import org.apache.spark.util.ArrayImplicits.SparkArrayOps
 import org.apache.spark.util.Utils
 
 private[spark] object PythonUtils extends Logging {
-  val PY4J_ZIP_NAME = "py4j-0.10.9.7-src.zip"
+  val PY4J_ZIP_NAME = "py4j-0.10.9.8-src.zip"
 
   /** Get the PYTHONPATH for PySpark, either from SPARK_HOME, if it is set, or 
from our JAR */
   def sparkPythonPath: String = {
diff --git a/dev/deps/spark-deps-hadoop-3-hive-2.3 
b/dev/deps/spark-deps-hadoop-3-hive-2.3
index a1b9d66f6657..4045ee137f83 100644
--- a/dev/deps/spark-deps-hadoop-3-hive-2.3
+++ b/dev/deps/spark-deps-hadoop-3-hive-2.3
@@ -247,7 +247,7 @@ 
parquet-format-structures/1.15.0//parquet-format-structures-1.15.0.jar
 parquet-hadoop/1.15.0//parquet-hadoop-1.15.0.jar
 parquet-jackson/1.15.0//parquet-jackson-1.15.0.jar
 pickle/1.5//pickle-1.5.jar
-py4j/0.10.9.7//py4j-0.10.9.7.jar
+py4j/0.10.9.8//py4j-0.10.9.8.jar
 remotetea-oncrpc/1.1.2//remotetea-oncrpc-1.1.2.jar
 rocksdbjni/9.7.3//rocksdbjni-9.7.3.jar
 scala-collection-compat_2.13/2.7.0//scala-collection-compat_2.13-2.7.0.jar
diff --git a/python/docs/Makefile b/python/docs/Makefile
index 428b0d24b568..f49adb0df80a 100644
--- a/python/docs/Makefile
+++ b/python/docs/Makefile
@@ -21,7 +21,7 @@ SPHINXBUILD   ?= sphinx-build
 SOURCEDIR     ?= source
 BUILDDIR      ?= build
 
-export PYTHONPATH=$(realpath ..):$(realpath ../lib/py4j-0.10.9.7-src.zip)
+export PYTHONPATH=$(realpath ..):$(realpath ../lib/py4j-0.10.9.8-src.zip)
 
 # Put it first so that "make" without argument is like "make help".
 help:
diff --git a/python/docs/make2.bat b/python/docs/make2.bat
index 41e33cd07d41..4127a045bf2f 100644
--- a/python/docs/make2.bat
+++ b/python/docs/make2.bat
@@ -25,7 +25,7 @@ if "%SPHINXBUILD%" == "" (
 set SOURCEDIR=source
 set BUILDDIR=build
 
-set PYTHONPATH=..;..\lib\py4j-0.10.9.7-src.zip
+set PYTHONPATH=..;..\lib\py4j-0.10.9.8-src.zip
 
 if "%1" == "" goto help
 
diff --git a/python/docs/source/getting_started/install.rst 
b/python/docs/source/getting_started/install.rst
index d0dc285b5257..2b9f28135bb1 100644
--- a/python/docs/source/getting_started/install.rst
+++ b/python/docs/source/getting_started/install.rst
@@ -177,7 +177,7 @@ PySpark requires the following dependencies.
 ========================== ========================= 
=============================
 Package                    Supported version         Note
 ========================== ========================= 
=============================
-`py4j`                     >=0.10.9.7                Required to interact with 
JVM
+`py4j`                     >=0.10.9.8                Required to interact with 
JVM
 ========================== ========================= 
=============================
 
 Additional libraries that enhance functionality but are not included in the 
installation packages:
diff --git a/python/lib/py4j-0.10.9.7-src.zip b/python/lib/py4j-0.10.9.7-src.zip
deleted file mode 100644
index 6abba4efa0f4..000000000000
Binary files a/python/lib/py4j-0.10.9.7-src.zip and /dev/null differ
diff --git a/python/lib/py4j-0.10.9.8-src.zip b/python/lib/py4j-0.10.9.8-src.zip
new file mode 100644
index 000000000000..b587e6d62052
Binary files /dev/null and b/python/lib/py4j-0.10.9.8-src.zip differ
diff --git a/python/packaging/classic/setup.py 
b/python/packaging/classic/setup.py
index d799af121634..09f194278cdc 100755
--- a/python/packaging/classic/setup.py
+++ b/python/packaging/classic/setup.py
@@ -343,7 +343,7 @@ try:
         license="http://www.apache.org/licenses/LICENSE-2.0";,
         # Don't forget to update python/docs/source/getting_started/install.rst
         # if you're updating the versions or dependencies.
-        install_requires=["py4j==0.10.9.7"],
+        install_requires=["py4j==0.10.9.8"],
         extras_require={
             "ml": ["numpy>=%s" % _minimum_numpy_version],
             "mllib": ["numpy>=%s" % _minimum_numpy_version],
diff --git a/sbin/spark-config.sh b/sbin/spark-config.sh
index 0bea4a45040e..825ea0c63c6f 100755
--- a/sbin/spark-config.sh
+++ b/sbin/spark-config.sh
@@ -28,6 +28,6 @@ export 
SPARK_CONF_DIR="${SPARK_CONF_DIR:-"${SPARK_HOME}/conf"}"
 # Add the PySpark classes to the PYTHONPATH:
 if [ -z "${PYSPARK_PYTHONPATH_SET}" ]; then
   export PYTHONPATH="${SPARK_HOME}/python:${PYTHONPATH}"
-  export 
PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.10.9.7-src.zip:${PYTHONPATH}"
+  export 
PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.10.9.8-src.zip:${PYTHONPATH}"
   export PYSPARK_PYTHONPATH_SET=1
 fi


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to