This is an automated email from the ASF dual-hosted git repository.

xinrong pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 29a70117b27 [SPARK-41225][CONNECT][PYTHON][FOLLOW-UP] Disable 
unsupported functions
29a70117b27 is described below

commit 29a70117b272582d11e7b7b8951dff1be91d3de7
Author: Martin Grund <martin.gr...@databricks.com>
AuthorDate: Fri Dec 9 14:55:50 2022 -0800

    [SPARK-41225][CONNECT][PYTHON][FOLLOW-UP] Disable unsupported functions
    
    ### What changes were proposed in this pull request?
    This patch adds method stubs for unsupported functions in the Python client 
for Spark Connect in the `Column` class that will throw a 
`NoteImplementedError` when called. This is to give a clear indication to the 
users that these methods will be implemented in the future.
    
    ### Why are the changes needed?
    UX
    
    ### Does this PR introduce _any_ user-facing change?
    NO
    
    ### How was this patch tested?
    UT
    
    Closes #39009 from grundprinzip/SPARK-41225-v2.
    
    Authored-by: Martin Grund <martin.gr...@databricks.com>
    Signed-off-by: Xinrong Meng <xinr...@apache.org>
---
 python/pyspark/sql/connect/column.py               | 36 ++++++++++++++++++++++
 .../sql/tests/connect/test_connect_column.py       | 25 +++++++++++++++
 2 files changed, 61 insertions(+)

diff --git a/python/pyspark/sql/connect/column.py 
b/python/pyspark/sql/connect/column.py
index 63e95c851db..f1a909b89fc 100644
--- a/python/pyspark/sql/connect/column.py
+++ b/python/pyspark/sql/connect/column.py
@@ -786,3 +786,39 @@ class Column:
 
     def __repr__(self) -> str:
         return "Column<'%s'>" % self._expr.__repr__()
+
+    def otherwise(self, *args: Any, **kwargs: Any) -> None:
+        raise NotImplementedError("otherwise() is not yet implemented.")
+
+    def over(self, *args: Any, **kwargs: Any) -> None:
+        raise NotImplementedError("over() is not yet implemented.")
+
+    def isin(self, *args: Any, **kwargs: Any) -> None:
+        raise NotImplementedError("isin() is not yet implemented.")
+
+    def when(self, *args: Any, **kwargs: Any) -> None:
+        raise NotImplementedError("when() is not yet implemented.")
+
+    def getItem(self, *args: Any, **kwargs: Any) -> None:
+        raise NotImplementedError("getItem() is not yet implemented.")
+
+    def astype(self, *args: Any, **kwargs: Any) -> None:
+        raise NotImplementedError("astype() is not yet implemented.")
+
+    def between(self, *args: Any, **kwargs: Any) -> None:
+        raise NotImplementedError("between() is not yet implemented.")
+
+    def getField(self, *args: Any, **kwargs: Any) -> None:
+        raise NotImplementedError("getField() is not yet implemented.")
+
+    def withField(self, *args: Any, **kwargs: Any) -> None:
+        raise NotImplementedError("withField() is not yet implemented.")
+
+    def dropFields(self, *args: Any, **kwargs: Any) -> None:
+        raise NotImplementedError("dropFields() is not yet implemented.")
+
+    def __getitem__(self, k: Any) -> None:
+        raise NotImplementedError("apply() - __getitem__ is not yet 
implemented.")
+
+    def __iter__(self) -> None:
+        raise TypeError("Column is not iterable")
diff --git a/python/pyspark/sql/tests/connect/test_connect_column.py 
b/python/pyspark/sql/tests/connect/test_connect_column.py
index c73f1b5b0c7..734b0bbf226 100644
--- a/python/pyspark/sql/tests/connect/test_connect_column.py
+++ b/python/pyspark/sql/tests/connect/test_connect_column.py
@@ -119,6 +119,31 @@ class SparkConnectTests(SparkConnectSQLTestCase):
                 df.select(df.id.cast(x)).toPandas(), 
df2.select(df2.id.cast(x)).toPandas()
             )
 
+    def test_unsupported_functions(self):
+        # SPARK-41225: Disable unsupported functions.
+        c = self.connect.range(1).id
+        for f in (
+            "otherwise",
+            "over",
+            "isin",
+            "when",
+            "getItem",
+            "astype",
+            "between",
+            "getField",
+            "withField",
+            "dropFields",
+        ):
+            with self.assertRaises(NotImplementedError):
+                getattr(c, f)()
+
+        with self.assertRaises(NotImplementedError):
+            c["a"]
+
+        with self.assertRaises(TypeError):
+            for x in c:
+                pass
+
 
 if __name__ == "__main__":
     import unittest


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to