techaddict commented on code in PR #39249:
URL: https://github.com/apache/spark/pull/39249#discussion_r1058694818
##########
python/pyspark/sql/connect/column.py:
##########
@@ -390,3 +391,61 @@ def __nonzero__(self) -> None:
Column.__doc__ = PySparkColumn.__doc__
+
+
+def _test() -> None:
+ import os
+ import sys
+ import doctest
+ from pyspark.sql import SparkSession as PySparkSession
+ from pyspark.testing.connectutils import should_test_connect,
connect_requirement_message
+
+ os.chdir(os.environ["SPARK_HOME"])
+
+ if should_test_connect:
+ import pyspark.sql.connect.column
+
+ globs = pyspark.sql.connect.column.__dict__.copy()
+ # Works around to create a regular Spark session
+ sc = SparkContext("local[4]", "sql.connect.column tests",
conf=SparkConf())
+ globs["_spark"] = PySparkSession(sc, options={"spark.app.name":
"sql.connect.column tests"})
+
+ # Creates a remote Spark session.
+ os.environ["SPARK_REMOTE"] = "sc://localhost"
+ globs["spark"] =
PySparkSession.builder.remote("sc://localhost").getOrCreate()
+
+ # TODO(SPARK-41751): Support bitwiseAND
+ del pyspark.sql.connect.column.Column.bitwiseAND.__doc__
+ del pyspark.sql.connect.column.Column.bitwiseOR.__doc__
+ del pyspark.sql.connect.column.Column.bitwiseXOR.__doc__
+ del pyspark.sql.connect.column.Column.eqNullSafe.__doc__
+ del pyspark.sql.connect.column.Column.isNotNull.__doc__
+ del pyspark.sql.connect.column.Column.isNull.__doc__
+ del pyspark.sql.connect.column.Column.isin.__doc__
+ # TODO(SPARK-41756): Fix createDataFrame
+ del pyspark.sql.connect.column.Column.getField.__doc__
+ del pyspark.sql.connect.column.Column.getItem.__doc__
+ # TODO(SPARK-41292): Support Window functions
Review Comment:
right it failed let me file a new JIRA
```
Failed example:
window = Window.partitionBy("name").orderBy("age")
.rowsBetween(Window.unboundedPreceding, Window.currentRow)
Exception raised:
Traceback (most recent call last):
File
"/usr/local/Cellar/[email protected]/3.10.8/Frameworks/Python.framework/Versions/3.10/lib/python3.10/doctest.py",
line 1350, in __run
exec(compile(example.source, filename, "single",
File "<doctest pyspark.sql.connect.column.Column.over[1]>", line 1, in
<module>
window = Window.partitionBy("name").orderBy("age")
.rowsBetween(Window.unboundedPreceding, Window.currentRow)
File "/Users/s.singh/personal/spark-oss/python/pyspark/sql/utils.py",
line 346, in wrapped
raise NotImplementedError()
NotImplementedError
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]