This is an automated email from the ASF dual-hosted git repository.

ruifengz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 0436d6ca40b [SPARK-41965][PYTHON][DOCS][WIP] Add DataFrameWriterV2 to 
PySpark API references
0436d6ca40b is described below

commit 0436d6ca40b6d90cf9545740fb34a70fc3cbfabc
Author: Ruifeng Zheng <[email protected]>
AuthorDate: Sat Jan 14 11:11:11 2023 +0800

    [SPARK-41965][PYTHON][DOCS][WIP] Add DataFrameWriterV2 to PySpark API 
references
    
    ### What changes were proposed in this pull request?
    Add DataFrameWriterV2 to PySpark API references
    
    ### Why are the changes needed?
    DataFrameWriterV2 was not added in the API references
    
    ### Does this PR introduce _any_ user-facing change?
    doc-only
    
    ### How was this patch tested?
    CI
    
    Closes #39493 from zhengruifeng/python_doc_dfwv2_II.
    
    Authored-by: Ruifeng Zheng <[email protected]>
    Signed-off-by: Ruifeng Zheng <[email protected]>
---
 python/docs/source/reference/pyspark.sql/core_classes.rst |  1 +
 python/docs/source/reference/pyspark.sql/io.rst           | 11 +++++++++++
 python/pyspark/sql/__init__.py                            |  3 ++-
 python/pyspark/sql/readwriter.py                          |  2 +-
 4 files changed, 15 insertions(+), 2 deletions(-)

diff --git a/python/docs/source/reference/pyspark.sql/core_classes.rst 
b/python/docs/source/reference/pyspark.sql/core_classes.rst
index bc4df5087f5..90c5c412797 100644
--- a/python/docs/source/reference/pyspark.sql/core_classes.rst
+++ b/python/docs/source/reference/pyspark.sql/core_classes.rst
@@ -37,5 +37,6 @@ Core Classes
     Window
     DataFrameReader
     DataFrameWriter
+    DataFrameWriterV2
     UDFRegistration
     udf.UserDefinedFunction
diff --git a/python/docs/source/reference/pyspark.sql/io.rst 
b/python/docs/source/reference/pyspark.sql/io.rst
index 52e4593eead..e687ca0d27d 100644
--- a/python/docs/source/reference/pyspark.sql/io.rst
+++ b/python/docs/source/reference/pyspark.sql/io.rst
@@ -52,3 +52,14 @@ Input/Output
     DataFrameWriter.saveAsTable
     DataFrameWriter.sortBy
     DataFrameWriter.text
+    DataFrameWriterV2.using
+    DataFrameWriterV2.option
+    DataFrameWriterV2.options
+    DataFrameWriterV2.tableProperty
+    DataFrameWriterV2.partitionedBy
+    DataFrameWriterV2.create
+    DataFrameWriterV2.replace
+    DataFrameWriterV2.createOrReplace
+    DataFrameWriterV2.append
+    DataFrameWriterV2.overwrite
+    DataFrameWriterV2.overwritePartitions
diff --git a/python/pyspark/sql/__init__.py b/python/pyspark/sql/__init__.py
index 3e967c5695c..22149e8adb8 100644
--- a/python/pyspark/sql/__init__.py
+++ b/python/pyspark/sql/__init__.py
@@ -47,7 +47,7 @@ from pyspark.sql.catalog import Catalog
 from pyspark.sql.dataframe import DataFrame, DataFrameNaFunctions, 
DataFrameStatFunctions
 from pyspark.sql.group import GroupedData
 from pyspark.sql.observation import Observation
-from pyspark.sql.readwriter import DataFrameReader, DataFrameWriter
+from pyspark.sql.readwriter import DataFrameReader, DataFrameWriter, 
DataFrameWriterV2
 from pyspark.sql.window import Window, WindowSpec
 from pyspark.sql.pandas.group_ops import PandasCogroupedOps
 
@@ -69,5 +69,6 @@ __all__ = [
     "WindowSpec",
     "DataFrameReader",
     "DataFrameWriter",
+    "DataFrameWriterV2",
     "PandasCogroupedOps",
 ]
diff --git a/python/pyspark/sql/readwriter.py b/python/pyspark/sql/readwriter.py
index 5f29ccc8013..db6084b826f 100644
--- a/python/pyspark/sql/readwriter.py
+++ b/python/pyspark/sql/readwriter.py
@@ -31,7 +31,7 @@ if TYPE_CHECKING:
     from pyspark.sql.dataframe import DataFrame
     from pyspark.sql.streaming import StreamingQuery
 
-__all__ = ["DataFrameReader", "DataFrameWriter"]
+__all__ = ["DataFrameReader", "DataFrameWriter", "DataFrameWriterV2"]
 
 PathOrPaths = Union[str, List[str]]
 TupleOrListOfString = Union[List[str], Tuple[str, ...]]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to