HyukjinKwon commented on a change in pull request #33954:
URL: https://github.com/apache/spark/pull/33954#discussion_r706067769



##########
File path: python/pyspark/pandas/typedef/typehints.py
##########
@@ -538,6 +634,165 @@ def infer_return_type(f: Callable) -> Union[SeriesType, 
DataFrameType, ScalarTyp
         return ScalarType(*types)
 
 
+# TODO: once pandas exposes a typing module like numpy.typing, we should 
deprecate
+#   this logic and migrate to it with implementing the typing module in pandas 
API on Spark.
+
+
+def create_type_for_series_type(param: Any) -> Type[SeriesType]:
+    """
+    Supported syntax:
+
+    >>> str(pd.Series[float]).endswith("SeriesType[float]")
+    True
+    """
+    from pyspark.pandas.typedef import NameTypeHolder
+
+    if isinstance(param, ExtensionDtype):
+        new_class = type("NameType", (NameTypeHolder,), {})  # type: 
Type[NameTypeHolder]
+        new_class.tpe = param
+    else:
+        new_class = param.type if isinstance(param, np.dtype) else param
+
+    return SeriesType[new_class]  # type: ignore
+
+
+# TODO: Remove this variadic-generic hack by tuple once ww drop Python up to 
3.9.
+#   See also PEP 646. One problem is that pandas doesn't inherits Generic[T]
+#   so we might have to leave this hack only for monkey-patching pandas 
DataFrame.
+def create_tuple_for_frame_type(params: Any) -> object:

Review comment:
       Multiindex will be done separately at SPARK-36711




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to