This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 209222193a04 [SPARK-55407][PYSPARK] Replace logger.warn with 
logger.warning
209222193a04 is described below

commit 209222193a04e883dd0ed1783c51e92f3e3f65ae
Author: Tian Gao <[email protected]>
AuthorDate: Sun Feb 8 12:31:45 2026 -0800

    [SPARK-55407][PYSPARK] Replace logger.warn with logger.warning
    
    ### What changes were proposed in this pull request?
    
    Replace all `logger.warn` usage with `logger.warning`
    
    ### Why are the changes needed?
    
    logger.warn is obsolete and should not be used anymore - 
https://docs.python.org/3/library/logging.html#logging.Logger.warning
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    No behavioral change.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #54190 from gaogaotiantian/fix-warn.
    
    Authored-by: Tian Gao <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 python/pyspark/logger/tests/test_logger.py    | 4 ++--
 python/pyspark/sql/connect/client/reattach.py | 4 ++--
 python/pyspark/sql/connect/dataframe.py       | 4 ++--
 python/pyspark/sql/connect/plan.py            | 2 +-
 python/pyspark/sql/connect/session.py         | 6 +++---
 5 files changed, 10 insertions(+), 10 deletions(-)

diff --git a/python/pyspark/logger/tests/test_logger.py 
b/python/pyspark/logger/tests/test_logger.py
index 9a331b49c690..28bd29b1ca33 100644
--- a/python/pyspark/logger/tests/test_logger.py
+++ b/python/pyspark/logger/tests/test_logger.py
@@ -71,8 +71,8 @@ class LoggerTestsMixin:
         self.assertTrue("msg" in log_json["exception"])
         self.assertTrue("stacktrace" in log_json["exception"])
 
-    def test_log_warn(self):
-        self.logger.warn("This is an warn log", user="test_user_warn", 
action="test_action_warn")
+    def test_log_warning(self):
+        self.logger.warning("This is an warn log", user="test_user_warn", 
action="test_action_warn")
         log_json = json.loads(self.handler.stream.getvalue().strip())
 
         self.assertEqual(log_json["msg"], "This is an warn log")
diff --git a/python/pyspark/sql/connect/client/reattach.py 
b/python/pyspark/sql/connect/client/reattach.py
index 2abe95bd2510..e9108bc4c809 100644
--- a/python/pyspark/sql/connect/client/reattach.py
+++ b/python/pyspark/sql/connect/client/reattach.py
@@ -212,7 +212,7 @@ class ExecutePlanResponseReattachableIterator(Generator):
                     with attempt:
                         self._stub.ReleaseExecute(request, 
metadata=self._metadata)
             except Exception as e:
-                logger.warn(f"ReleaseExecute failed with exception: {e}.")
+                logger.warning(f"ReleaseExecute failed with exception: {e}.")
 
         with self._lock:
             if self._release_thread_pool_instance is not None:
@@ -239,7 +239,7 @@ class ExecutePlanResponseReattachableIterator(Generator):
                     with attempt:
                         self._stub.ReleaseExecute(request, 
metadata=self._metadata)
             except Exception as e:
-                logger.warn(f"ReleaseExecute failed with exception: {e}.")
+                logger.warning(f"ReleaseExecute failed with exception: {e}.")
 
         with self._lock:
             if self._release_thread_pool_instance is not None:
diff --git a/python/pyspark/sql/connect/dataframe.py 
b/python/pyspark/sql/connect/dataframe.py
index 0df13c1020d7..7efb0887e573 100644
--- a/python/pyspark/sql/connect/dataframe.py
+++ b/python/pyspark/sql/connect/dataframe.py
@@ -1901,7 +1901,7 @@ class DataFrame(ParentDataFrame):
             try:
                 self._cached_schema_serialized = 
CPickleSerializer().dumps(self._schema)
             except Exception as e:
-                logger.warn(f"DataFrame schema pickle dumps failed with 
exception: {e}.")
+                logger.warning(f"DataFrame schema pickle dumps failed with 
exception: {e}.")
                 self._cached_schema_serialized = None
         return self._cached_schema
 
@@ -1913,7 +1913,7 @@ class DataFrame(ParentDataFrame):
             try:
                 return 
CPickleSerializer().loads(self._cached_schema_serialized)
             except Exception as e:
-                logger.warn(f"DataFrame schema pickle loads failed with 
exception: {e}.")
+                logger.warning(f"DataFrame schema pickle loads failed with 
exception: {e}.")
         # In case of pickle ser/de failure, fallback to deepcopy approach.
         return copy.deepcopy(_schema)
 
diff --git a/python/pyspark/sql/connect/plan.py 
b/python/pyspark/sql/connect/plan.py
index ca470598de76..8c214ea5da2b 100644
--- a/python/pyspark/sql/connect/plan.py
+++ b/python/pyspark/sql/connect/plan.py
@@ -733,7 +733,7 @@ class CachedRemoteRelation(LogicalPlan):
                         metadata = session.client._builder.metadata()
                         channel(req, metadata=metadata)  # type: 
ignore[arg-type]
             except Exception as e:
-                logger.warn(f"RemoveRemoteCachedRelation failed with 
exception: {e}.")
+                logger.warning(f"RemoveRemoteCachedRelation failed with 
exception: {e}.")
 
 
 class Hint(LogicalPlan):
diff --git a/python/pyspark/sql/connect/session.py 
b/python/pyspark/sql/connect/session.py
index 572be49dd307..8c73c5dcee41 100644
--- a/python/pyspark/sql/connect/session.py
+++ b/python/pyspark/sql/connect/session.py
@@ -943,12 +943,12 @@ class SparkSession:
                 try:
                     self.client.release_session()
                 except Exception as e:
-                    logger.warn(f"session.stop(): Session could not be 
released. Error: ${e}")
+                    logger.warning(f"session.stop(): Session could not be 
released. Error: ${e}")
 
             try:
                 self.client.close()
             except Exception as e:
-                logger.warn(f"session.stop(): Client could not be closed. 
Error: ${e}")
+                logger.warning(f"session.stop(): Client could not be closed. 
Error: ${e}")
 
             if self is SparkSession._default_session:
                 SparkSession._default_session = None
@@ -964,7 +964,7 @@ class SparkSession:
                     try:
                         PySparkSession._activeSession.stop()
                     except Exception as e:
-                        logger.warn(
+                        logger.warning(
                             "session.stop(): Local Spark Connect Server could 
not be stopped. "
                             f"Error: ${e}"
                         )


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to