zero323 commented on a change in pull request #35399:
URL: https://github.com/apache/spark/pull/35399#discussion_r799927023
##########
File path: python/pyspark/ml/wrapper.py
##########
@@ -17,57 +17,76 @@
from abc import ABCMeta, abstractmethod
+from typing import Any, Generic, Optional, List, Type, TypeVar, TYPE_CHECKING
+
from pyspark import since
from pyspark import SparkContext
from pyspark.sql import DataFrame
from pyspark.ml import Estimator, Predictor, PredictionModel, Transformer,
Model
from pyspark.ml.base import _PredictorParams
-from pyspark.ml.param import Params
-from pyspark.ml.util import _jvm
+from pyspark.ml.param import Param, Params
+from pyspark.ml.util import _jvm # type: ignore[attr-defined]
from pyspark.ml.common import inherit_doc, _java2py, _py2java
+if TYPE_CHECKING:
+ from pyspark.ml._typing import ParamMap
+ from py4j.java_gateway import JavaObject, JavaClass
+
+
+T = TypeVar("T")
+JW = TypeVar("JW", bound="JavaWrapper")
+JM = TypeVar("JM", bound="JavaTransformer")
+JP = TypeVar("JP", bound="JavaParams")
+
+
class JavaWrapper:
"""
Wrapper class for a Java companion object
"""
- def __init__(self, java_obj=None):
+ def __init__(self, java_obj: Optional["JavaObject"] = None):
super(JavaWrapper, self).__init__()
self._java_obj = java_obj
- def __del__(self):
+ def __del__(self) -> None:
if SparkContext._active_spark_context and self._java_obj is not None:
- SparkContext._active_spark_context._gateway.detach(self._java_obj)
+ SparkContext._active_spark_context._gateway.detach( # type:
ignore[union-attr]
Review comment:
Making exception from `assert not None` here, to improve readability.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]