HyukjinKwon commented on code in PR #38212:
URL: https://github.com/apache/spark/pull/38212#discussion_r994595663


##########
python/pyspark/sql/protobuf/functions.py:
##########
@@ -0,0 +1,189 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""
+A collections of builtin protobuf functions
+"""
+
+
+from typing import Dict, Optional, TYPE_CHECKING
+from pyspark import SparkContext
+from pyspark.sql.column import Column, _to_java_column
+from pyspark.util import _print_missing_jar
+
+if TYPE_CHECKING:
+    from pyspark.sql._typing import ColumnOrName
+
+
+def from_protobuf(
+    data: "ColumnOrName",
+    descFilePath: str,
+    messageName: str,
+    options: Optional[Dict[str, str]] = None,
+) -> Column:
+    """
+    Converts a binary column of Protobuf format into its corresponding 
catalyst value.
+    The specified schema must match the read data, otherwise the behavior is 
undefined:
+    it may fail or return arbitrary result.
+    To deserialize the data with a compatible and evolved schema, the expected
+    Protobuf schema can be set via the option protobuf descriptor.
+    .. versionadded:: 3.4.0
+
+    Parameters
+    ----------
+    data : :class:`~pyspark.sql.Column` or str
+        the binary column.
+    descFilePath : str
+        the protobuf descriptor in Message GeneratedMessageV3 format.
+    messageName: str
+        the protobuf message name to look for in descriptorFile.
+    options : dict, optional
+        options to control how the Protobuf record is parsed.
+
+    Notes
+    -----
+    Protobuf is built-in but external data source module since Spark 2.4. 
Please deploy the
+    application as per the deployment section of "Protobuf Data Source Guide".
+
+    Examples
+    --------
+    >>> from pyspark.sql import Row
+    >>> from pyspark.sql.types import *
+    >>> from pyspark.sql.protobuf.functions import from_protobuf, to_protobuf
+    >>> data = ([Row(key="1", value=Row(age=2, name="Alice", score=109200))])
+    >>> schema = StructType([StructField("key", StringType(), False), \
+    StructField( "value", StructType([ StructField("age", IntegerType(), 
False), \
+    StructField("name", StringType(), False), StructField("score", LongType(), 
False), ]), False)])
+    >>> df = spark.createDataFrame(data, schema)
+    >>> descFilePath = 
'connector/protobuf/src/test/resources/protobuf/pyspark_test.desc'
+    >>> messageName = 'SimpleMessage'
+    >>> protobufDf = df.select(to_protobuf(df.value, descFilePath, 
messageName).alias("protobuf"))
+    >>> protobufDf.collect()
+    [Row(protobuf=bytearray(b'\\x08\\x02\\x12\\x05Alice\\x18\\x90\\xd5\\x06'))]
+
+    >>> descFilePath = 
'connector/protobuf/src/test/resources/protobuf/pyspark_test.desc'
+    >>> messageName = 'SimpleMessage'
+    >>> df = protobufDf.select(from_protobuf(protobufDf.protobuf, \
+    descFilePath, messageName).alias("value"))
+    >>> df.collect()

Review Comment:
   Let's show `show()`.



##########
python/pyspark/sql/protobuf/functions.py:
##########
@@ -0,0 +1,189 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""
+A collections of builtin protobuf functions
+"""
+
+
+from typing import Dict, Optional, TYPE_CHECKING
+from pyspark import SparkContext
+from pyspark.sql.column import Column, _to_java_column
+from pyspark.util import _print_missing_jar
+
+if TYPE_CHECKING:
+    from pyspark.sql._typing import ColumnOrName
+
+
+def from_protobuf(
+    data: "ColumnOrName",
+    descFilePath: str,
+    messageName: str,
+    options: Optional[Dict[str, str]] = None,
+) -> Column:
+    """
+    Converts a binary column of Protobuf format into its corresponding 
catalyst value.
+    The specified schema must match the read data, otherwise the behavior is 
undefined:
+    it may fail or return arbitrary result.
+    To deserialize the data with a compatible and evolved schema, the expected
+    Protobuf schema can be set via the option protobuf descriptor.
+    .. versionadded:: 3.4.0
+
+    Parameters
+    ----------
+    data : :class:`~pyspark.sql.Column` or str
+        the binary column.
+    descFilePath : str
+        the protobuf descriptor in Message GeneratedMessageV3 format.
+    messageName: str
+        the protobuf message name to look for in descriptorFile.
+    options : dict, optional
+        options to control how the Protobuf record is parsed.
+
+    Notes
+    -----
+    Protobuf is built-in but external data source module since Spark 2.4. 
Please deploy the
+    application as per the deployment section of "Protobuf Data Source Guide".
+
+    Examples
+    --------
+    >>> from pyspark.sql import Row
+    >>> from pyspark.sql.types import *
+    >>> from pyspark.sql.protobuf.functions import from_protobuf, to_protobuf
+    >>> data = ([Row(key="1", value=Row(age=2, name="Alice", score=109200))])
+    >>> schema = StructType([StructField("key", StringType(), False), \
+    StructField( "value", StructType([ StructField("age", IntegerType(), 
False), \
+    StructField("name", StringType(), False), StructField("score", LongType(), 
False), ]), False)])
+    >>> df = spark.createDataFrame(data, schema)
+    >>> descFilePath = 
'connector/protobuf/src/test/resources/protobuf/pyspark_test.desc'
+    >>> messageName = 'SimpleMessage'
+    >>> protobufDf = df.select(to_protobuf(df.value, descFilePath, 
messageName).alias("protobuf"))
+    >>> protobufDf.collect()

Review Comment:
   ditto let's use `show()`



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to