HyukjinKwon commented on code in PR #39347:
URL: https://github.com/apache/spark/pull/39347#discussion_r1060221643
##########
python/pyspark/sql/connect/functions.py:
##########
@@ -2317,3 +2318,141 @@ def unwrap_udt(col: "ColumnOrName") -> Column:
unwrap_udt.__doc__ = pysparkfuncs.unwrap_udt.__doc__
+
+
+def _test() -> None:
+ import os
+ import sys
+ import doctest
+ from pyspark.sql import SparkSession as PySparkSession
+ from pyspark.testing.connectutils import should_test_connect,
connect_requirement_message
+
+ os.chdir(os.environ["SPARK_HOME"])
+
+ if should_test_connect:
+ import pyspark.sql.connect.functions
+
+ globs = pyspark.sql.connect.functions.__dict__.copy()
+ # Works around to create a regular Spark session
+ sc = SparkContext("local[4]", "sql.connect.functions tests",
conf=SparkConf())
+ globs["_spark"] = PySparkSession(
+ sc, options={"spark.app.name": "sql.connect.functions tests"}
+ )
+
+ # TODO(SPARK-41833): fix collect() output
+ del pyspark.sql.connect.functions.array.__doc__
+ del pyspark.sql.connect.functions.array_distinct.__doc__
+ del pyspark.sql.connect.functions.array_except.__doc__
+ del pyspark.sql.connect.functions.array_intersect.__doc__
+ del pyspark.sql.connect.functions.array_remove.__doc__
+ del pyspark.sql.connect.functions.array_repeat.__doc__
+ del pyspark.sql.connect.functions.array_sort.__doc__
+ del pyspark.sql.connect.functions.array_union.__doc__
+ del pyspark.sql.connect.functions.collect_list.__doc__
+ del pyspark.sql.connect.functions.collect_set.__doc__
+ del pyspark.sql.connect.functions.concat.__doc__
+ del pyspark.sql.connect.functions.create_map.__doc__
+ del pyspark.sql.connect.functions.date_trunc.__doc__
+ del pyspark.sql.connect.functions.from_utc_timestamp.__doc__
+ del pyspark.sql.connect.functions.from_csv.__doc__
+ del pyspark.sql.connect.functions.from_json.__doc__
+ del pyspark.sql.connect.functions.isnull.__doc__
+ del pyspark.sql.connect.functions.reverse.__doc__
+ del pyspark.sql.connect.functions.sequence.__doc__
+ del pyspark.sql.connect.functions.slice.__doc__
+ del pyspark.sql.connect.functions.sort_array.__doc__
+ del pyspark.sql.connect.functions.split.__doc__
+ del pyspark.sql.connect.functions.struct.__doc__
+ del pyspark.sql.connect.functions.to_timestamp.__doc__
+ del pyspark.sql.connect.functions.to_utc_timestamp.__doc__
+ del pyspark.sql.connect.functions.unhex.__doc__
+
+ # TODO(SPARK-41825): Dataframe.show formatting int as double
+ del pyspark.sql.connect.functions.coalesce.__doc__
+ del pyspark.sql.connect.functions.sum_distinct.__doc__
+
+ # TODO(SPARK-41834): implement Dataframe.conf
+ del pyspark.sql.connect.functions.from_unixtime.__doc__
+ del pyspark.sql.connect.functions.timestamp_seconds.__doc__
+ del pyspark.sql.connect.functions.unix_timestamp.__doc__
+
+ # TODO(SPARK-41757): Fix String representation for Column class
+ del pyspark.sql.connect.functions.col.__doc__
+
+ # TODO: support data type: Timestamp(NANOSECOND, null)
+ del pyspark.sql.connect.functions.hour.__doc__
+ del pyspark.sql.connect.functions.minute.__doc__
+ del pyspark.sql.connect.functions.second.__doc__
+ del pyspark.sql.connect.functions.window.__doc__
+ del pyspark.sql.connect.functions.window_time.__doc__
+
+ # TODO(SPARK-41838): fix dataset.show
+ del pyspark.sql.connect.functions.posexplode_outer.__doc__
+ del pyspark.sql.connect.functions.explode_outer.__doc__
+
+ # TODO(SPARK-41837): createDataFrame datatype conversion error
+ del pyspark.sql.connect.functions.to_csv.__doc__
+ del pyspark.sql.connect.functions.to_json.__doc__
+
+ # TODO(SPARK-41835): Implement `transform_keys` function
+ del pyspark.sql.connect.functions.transform_keys.__doc__
+
+ # TODO(SPARK-41836): Implement `transform_values` function
+ del pyspark.sql.connect.functions.transform_values.__doc__
+
+ # TODO(SPARK-41839): Implement SparkSession.sparkContext
+ del pyspark.sql.connect.functions.monotonically_increasing_id.__doc__
+
+ # TODO(SPARK-41840): Fix 'Column' object is not callable
+ del pyspark.sql.connect.functions.first.__doc__
+ del pyspark.sql.connect.functions.last.__doc__
+ del pyspark.sql.connect.functions.max_by.__doc__
+ del pyspark.sql.connect.functions.median.__doc__
+ del pyspark.sql.connect.functions.min_by.__doc__
+
+ del pyspark.sql.connect.functions.broadcast.__doc__
Review Comment:
Were they all also because of SPARK-41840?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]