This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new b724955bb7c2 [SPARK-45446][PYTHON] Fix imports according to PEP8: 
pyspark.errors and pyspark.ml
b724955bb7c2 is described below

commit b724955bb7c2995fbbd9c7fe550e44f16397cb5b
Author: Hyukjin Kwon <[email protected]>
AuthorDate: Sat Oct 7 15:20:22 2023 -0700

    [SPARK-45446][PYTHON] Fix imports according to PEP8: pyspark.errors and 
pyspark.ml
    
    ### What changes were proposed in this pull request?
    
    This PR proposes to fix imports according to PEP8 in `pyspark.errors` and 
`pyspark.ml`, see https://peps.python.org/pep-0008/#imports.
    
    ### Why are the changes needed?
    
    I have not been fixing them as they are too minor. However, this practice 
is being propagated across the whole PySpark packages, and I think we should 
fix them all so other users do not follow the non-standard practice.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Existing linters and tests should cover.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #43255 from HyukjinKwon/SPARK-45446.
    
    Authored-by: Hyukjin Kwon <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 python/pyspark/errors/exceptions/connect.py        |  1 -
 python/pyspark/ml/_typing.pyi                      |  2 +-
 python/pyspark/ml/base.py                          |  2 --
 python/pyspark/ml/classification.py                |  2 --
 python/pyspark/ml/clustering.py                    |  1 -
 python/pyspark/ml/common.py                        |  6 ++---
 python/pyspark/ml/connect/__init__.py              |  2 --
 python/pyspark/ml/connect/base.py                  |  5 ++--
 python/pyspark/ml/connect/classification.py        | 17 ++++++--------
 python/pyspark/ml/connect/evaluation.py            |  3 ++-
 python/pyspark/ml/connect/feature.py               |  5 ++--
 python/pyspark/ml/connect/io_utils.py              |  3 +--
 python/pyspark/ml/connect/pipeline.py              |  3 ++-
 python/pyspark/ml/connect/summarizer.py            |  3 ++-
 python/pyspark/ml/connect/tuning.py                |  2 --
 python/pyspark/ml/connect/util.py                  |  3 ++-
 python/pyspark/ml/evaluation.py                    |  1 -
 python/pyspark/ml/linalg/__init__.py               | 27 +++++++++++-----------
 python/pyspark/ml/regression.py                    |  2 --
 python/pyspark/ml/stat.py                          |  2 --
 .../tests/connect/test_connect_classification.py   |  1 +
 .../ml/tests/connect/test_connect_evaluation.py    |  1 +
 .../ml/tests/connect/test_connect_feature.py       |  1 +
 .../ml/tests/connect/test_connect_function.py      |  1 -
 .../ml/tests/connect/test_connect_pipeline.py      |  1 +
 .../ml/tests/connect/test_connect_summarizer.py    |  1 +
 .../ml/tests/connect/test_connect_tuning.py        |  2 +-
 .../connect/test_legacy_mode_classification.py     |  2 ++
 .../tests/connect/test_legacy_mode_evaluation.py   |  3 ++-
 .../ml/tests/connect/test_legacy_mode_feature.py   |  3 ++-
 .../ml/tests/connect/test_legacy_mode_pipeline.py  |  2 ++
 .../tests/connect/test_legacy_mode_summarizer.py   |  1 +
 .../ml/tests/connect/test_legacy_mode_tuning.py    |  1 +
 .../tests/connect/test_parity_torch_data_loader.py |  2 +-
 .../tests/connect/test_parity_torch_distributor.py |  1 -
 python/pyspark/ml/tests/test_dl_util.py            |  3 ++-
 python/pyspark/ml/tests/test_functions.py          |  3 ++-
 python/pyspark/ml/tests/test_model_cache.py        |  2 +-
 python/pyspark/ml/tests/tuning/test_tuning.py      |  1 +
 python/pyspark/ml/torch/data.py                    |  4 +++-
 python/pyspark/ml/torch/tests/test_data_loader.py  |  3 ++-
 python/pyspark/ml/tuning.py                        |  2 --
 python/pyspark/ml/util.py                          |  2 --
 python/pyspark/ml/wrapper.py                       |  1 -
 44 files changed, 66 insertions(+), 70 deletions(-)

diff --git a/python/pyspark/errors/exceptions/connect.py 
b/python/pyspark/errors/exceptions/connect.py
index dd645000e7b7..423fb2c6f0ac 100644
--- a/python/pyspark/errors/exceptions/connect.py
+++ b/python/pyspark/errors/exceptions/connect.py
@@ -18,7 +18,6 @@ import pyspark.sql.connect.proto as pb2
 import json
 from typing import Dict, List, Optional, TYPE_CHECKING
 
-
 from pyspark.errors.exceptions.base import (
     AnalysisException as BaseAnalysisException,
     IllegalArgumentException as BaseIllegalArgumentException,
diff --git a/python/pyspark/ml/_typing.pyi b/python/pyspark/ml/_typing.pyi
index 12d831f1e8c7..a5237dad7521 100644
--- a/python/pyspark/ml/_typing.pyi
+++ b/python/pyspark/ml/_typing.pyi
@@ -20,13 +20,13 @@ from typing import Any, Dict, List, TypeVar, Tuple, Union
 from typing_extensions import Literal
 
 from numpy import ndarray
+from py4j.java_gateway import JavaObject
 
 import pyspark.ml.base
 import pyspark.ml.param
 import pyspark.ml.util
 from pyspark.ml.linalg import Vector
 import pyspark.ml.wrapper
-from py4j.java_gateway import JavaObject
 
 ParamMap = Dict[pyspark.ml.param.Param, Any]
 PipelineStage = Union[pyspark.ml.base.Estimator, pyspark.ml.base.Transformer]
diff --git a/python/pyspark/ml/base.py b/python/pyspark/ml/base.py
index b94358d26fd4..224ef34fd5ed 100644
--- a/python/pyspark/ml/base.py
+++ b/python/pyspark/ml/base.py
@@ -16,10 +16,8 @@
 #
 
 from abc import ABCMeta, abstractmethod
-
 import copy
 import threading
-
 from typing import (
     Any,
     Callable,
diff --git a/python/pyspark/ml/classification.py 
b/python/pyspark/ml/classification.py
index 81d7a1d51dab..263a108a216d 100644
--- a/python/pyspark/ml/classification.py
+++ b/python/pyspark/ml/classification.py
@@ -22,7 +22,6 @@ import uuid
 import warnings
 from abc import ABCMeta, abstractmethod
 from multiprocessing.pool import ThreadPool
-
 from typing import (
     Any,
     Dict,
@@ -95,7 +94,6 @@ from pyspark.sql.functions import udf, when
 from pyspark.sql.types import ArrayType, DoubleType
 from pyspark.storagelevel import StorageLevel
 
-
 if TYPE_CHECKING:
     from pyspark.ml._typing import P, ParamMap
     from py4j.java_gateway import JavaObject
diff --git a/python/pyspark/ml/clustering.py b/python/pyspark/ml/clustering.py
index 41108782a473..7f9e87e61243 100644
--- a/python/pyspark/ml/clustering.py
+++ b/python/pyspark/ml/clustering.py
@@ -17,7 +17,6 @@
 
 import sys
 import warnings
-
 from typing import Any, Dict, List, Optional, TYPE_CHECKING
 
 import numpy as np
diff --git a/python/pyspark/ml/common.py b/python/pyspark/ml/common.py
index dd6fee467e69..91593515a2de 100644
--- a/python/pyspark/ml/common.py
+++ b/python/pyspark/ml/common.py
@@ -17,9 +17,6 @@
 
 from typing import Any, Callable, TYPE_CHECKING
 
-if TYPE_CHECKING:
-    from pyspark.ml._typing import C, JavaObjectOrPickleDump
-
 import py4j.protocol
 from py4j.protocol import Py4JJavaError
 from py4j.java_gateway import JavaObject
@@ -30,6 +27,9 @@ from pyspark import RDD, SparkContext
 from pyspark.serializers import CPickleSerializer, AutoBatchedSerializer
 from pyspark.sql import DataFrame, SparkSession
 
+if TYPE_CHECKING:
+    from pyspark.ml._typing import C, JavaObjectOrPickleDump
+
 # Hack for support float('inf') in Py4j
 _old_smart_decode = py4j.protocol.smart_decode
 
diff --git a/python/pyspark/ml/connect/__init__.py 
b/python/pyspark/ml/connect/__init__.py
index 0760daa8e11b..875a5370d996 100644
--- a/python/pyspark/ml/connect/__init__.py
+++ b/python/pyspark/ml/connect/__init__.py
@@ -25,14 +25,12 @@ from pyspark.ml.connect.base import (
     Transformer,
     Model,
 )
-
 from pyspark.ml.connect import (
     feature,
     evaluation,
     tuning,
 )
 from pyspark.ml.connect.evaluation import Evaluator
-
 from pyspark.ml.connect.pipeline import Pipeline, PipelineModel
 
 __all__ = [
diff --git a/python/pyspark/ml/connect/base.py 
b/python/pyspark/ml/connect/base.py
index fdfcddf601c9..cbddc8fcfd1c 100644
--- a/python/pyspark/ml/connect/base.py
+++ b/python/pyspark/ml/connect/base.py
@@ -16,9 +16,6 @@
 #
 
 from abc import ABCMeta, abstractmethod
-
-import pandas as pd
-
 from typing import (
     Any,
     Generic,
@@ -31,6 +28,8 @@ from typing import (
     Callable,
 )
 
+import pandas as pd
+
 from pyspark import since
 from pyspark.ml.common import inherit_doc
 from pyspark.sql.dataframe import DataFrame
diff --git a/python/pyspark/ml/connect/classification.py 
b/python/pyspark/ml/connect/classification.py
index ca6e01e9577c..8b816f51ca27 100644
--- a/python/pyspark/ml/connect/classification.py
+++ b/python/pyspark/ml/connect/classification.py
@@ -14,17 +14,17 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-from pyspark import keyword_only
-from pyspark.ml.connect.base import _PredictorParams
-
-from pyspark.ml.param.shared import HasProbabilityCol
-
 from typing import Any, Dict, Union, List, Tuple, Callable, Optional
+import math
+
+import torch
+import torch.nn as torch_nn
 import numpy as np
 import pandas as pd
-import math
 
+from pyspark import keyword_only
+from pyspark.ml.connect.base import _PredictorParams
+from pyspark.ml.param.shared import HasProbabilityCol
 from pyspark.sql import DataFrame
 from pyspark.ml.common import inherit_doc
 from pyspark.ml.torch.distributor import TorchDistributor
@@ -43,9 +43,6 @@ from pyspark.ml.connect.base import Predictor, PredictionModel
 from pyspark.ml.connect.io_utils import ParamsReadWrite, CoreModelReadWrite
 from pyspark.sql import functions as sf
 
-import torch
-import torch.nn as torch_nn
-
 
 class _LogisticRegressionParams(
     _PredictorParams,
diff --git a/python/pyspark/ml/connect/evaluation.py 
b/python/pyspark/ml/connect/evaluation.py
index d728867f0867..15af4dc3aa78 100644
--- a/python/pyspark/ml/connect/evaluation.py
+++ b/python/pyspark/ml/connect/evaluation.py
@@ -14,9 +14,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
+from typing import Any, Union, List, Tuple
+
 import numpy as np
 import pandas as pd
-from typing import Any, Union, List, Tuple
 
 from pyspark import keyword_only
 from pyspark.ml.param import Param, Params, TypeConverters
diff --git a/python/pyspark/ml/connect/feature.py 
b/python/pyspark/ml/connect/feature.py
index 42b470246d50..1ffc6f1613af 100644
--- a/python/pyspark/ml/connect/feature.py
+++ b/python/pyspark/ml/connect/feature.py
@@ -15,11 +15,12 @@
 # limitations under the License.
 #
 
-import numpy as np
-import pandas as pd
 import pickle
 from typing import Any, Union, List, Tuple, Callable, Dict, Optional
 
+import numpy as np
+import pandas as pd
+
 from pyspark import keyword_only
 from pyspark.sql import DataFrame
 from pyspark.ml.param.shared import HasInputCol, HasOutputCol
diff --git a/python/pyspark/ml/connect/io_utils.py 
b/python/pyspark/ml/connect/io_utils.py
index a09a244862c5..c401e3e76676 100644
--- a/python/pyspark/ml/connect/io_utils.py
+++ b/python/pyspark/ml/connect/io_utils.py
@@ -22,11 +22,10 @@ import tempfile
 import time
 from urllib.parse import urlparse
 from typing import Any, Dict, List
+
 from pyspark.ml.base import Params
 from pyspark.sql import SparkSession
 from pyspark.sql.utils import is_remote
-
-
 from pyspark import __version__ as pyspark_version
 
 
diff --git a/python/pyspark/ml/connect/pipeline.py 
b/python/pyspark/ml/connect/pipeline.py
index 0f7b3643950f..32f44fc8007a 100644
--- a/python/pyspark/ml/connect/pipeline.py
+++ b/python/pyspark/ml/connect/pipeline.py
@@ -14,9 +14,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-import pandas as pd
 from typing import Any, Dict, List, Optional, Union, cast, TYPE_CHECKING
 
+import pandas as pd
+
 from pyspark import keyword_only, since
 from pyspark.ml.connect.base import Estimator, Model, Transformer
 from pyspark.ml.connect.io_utils import (
diff --git a/python/pyspark/ml/connect/summarizer.py 
b/python/pyspark/ml/connect/summarizer.py
index dd42c521e14b..2f3e16ef4d18 100644
--- a/python/pyspark/ml/connect/summarizer.py
+++ b/python/pyspark/ml/connect/summarizer.py
@@ -15,9 +15,10 @@
 # limitations under the License.
 #
 
+from typing import Any, Union, List, Dict
+
 import numpy as np
 import pandas as pd
-from typing import Any, Union, List, Dict
 
 from pyspark.sql import DataFrame
 from pyspark.ml.connect.util import aggregate_dataframe
diff --git a/python/pyspark/ml/connect/tuning.py 
b/python/pyspark/ml/connect/tuning.py
index 871e448966c6..97106646f74b 100644
--- a/python/pyspark/ml/connect/tuning.py
+++ b/python/pyspark/ml/connect/tuning.py
@@ -16,7 +16,6 @@
 #
 
 from multiprocessing.pool import ThreadPool
-
 from typing import (
     Any,
     Callable,
@@ -45,7 +44,6 @@ from pyspark.ml.param.shared import HasParallelism, HasSeed
 from pyspark.sql.functions import col, lit, rand
 from pyspark.sql.dataframe import DataFrame
 from pyspark.sql import SparkSession
-
 from pyspark.sql.utils import is_remote
 
 
diff --git a/python/pyspark/ml/connect/util.py 
b/python/pyspark/ml/connect/util.py
index d05893ad2a04..c139482784f3 100644
--- a/python/pyspark/ml/connect/util.py
+++ b/python/pyspark/ml/connect/util.py
@@ -15,9 +15,10 @@
 # limitations under the License.
 #
 
-import pandas as pd
 from typing import Any, Union, List, Tuple, Callable, Iterable
 
+import pandas as pd
+
 from pyspark import cloudpickle
 from pyspark.sql import DataFrame
 from pyspark.sql.functions import col, pandas_udf
diff --git a/python/pyspark/ml/evaluation.py b/python/pyspark/ml/evaluation.py
index 19d123debaea..c6445c7f0241 100644
--- a/python/pyspark/ml/evaluation.py
+++ b/python/pyspark/ml/evaluation.py
@@ -17,7 +17,6 @@
 
 import sys
 from abc import abstractmethod, ABCMeta
-
 from typing import Any, Dict, Optional, TYPE_CHECKING
 
 from pyspark import since, keyword_only
diff --git a/python/pyspark/ml/linalg/__init__.py 
b/python/pyspark/ml/linalg/__init__.py
index 0fde63925161..d470f8b8b5c4 100644
--- a/python/pyspark/ml/linalg/__init__.py
+++ b/python/pyspark/ml/linalg/__init__.py
@@ -26,20 +26,6 @@ SciPy is available in their environment.
 import sys
 import array
 import struct
-
-import numpy as np
-
-from pyspark.sql.types import (
-    UserDefinedType,
-    StructField,
-    StructType,
-    ArrayType,
-    DoubleType,
-    IntegerType,
-    ByteType,
-    BooleanType,
-)
-
 from typing import (
     Any,
     Callable,
@@ -56,6 +42,19 @@ from typing import (
     Union,
 )
 
+import numpy as np
+
+from pyspark.sql.types import (
+    UserDefinedType,
+    StructField,
+    StructType,
+    ArrayType,
+    DoubleType,
+    IntegerType,
+    ByteType,
+    BooleanType,
+)
+
 
 __all__ = [
     "Vector",
diff --git a/python/pyspark/ml/regression.py b/python/pyspark/ml/regression.py
index a4ce961c92e8..d08e241b41d2 100644
--- a/python/pyspark/ml/regression.py
+++ b/python/pyspark/ml/regression.py
@@ -16,9 +16,7 @@
 #
 
 import sys
-
 from typing import Any, Dict, Generic, List, Optional, TypeVar, TYPE_CHECKING
-
 from abc import ABCMeta
 
 from pyspark import keyword_only, since
diff --git a/python/pyspark/ml/stat.py b/python/pyspark/ml/stat.py
index e0c4d05a32af..3ac77b409821 100644
--- a/python/pyspark/ml/stat.py
+++ b/python/pyspark/ml/stat.py
@@ -16,10 +16,8 @@
 #
 
 import sys
-
 from typing import Optional, Tuple, TYPE_CHECKING
 
-
 from pyspark import since, SparkContext
 from pyspark.ml.common import _java2py, _py2java
 from pyspark.ml.linalg import Matrix, Vector
diff --git a/python/pyspark/ml/tests/connect/test_connect_classification.py 
b/python/pyspark/ml/tests/connect/test_connect_classification.py
index 1c777fc3d40a..ccf7c346be72 100644
--- a/python/pyspark/ml/tests/connect/test_connect_classification.py
+++ b/python/pyspark/ml/tests/connect/test_connect_classification.py
@@ -17,6 +17,7 @@
 #
 
 import unittest
+
 from pyspark.sql import SparkSession
 from pyspark.testing.connectutils import should_test_connect, 
connect_requirement_message
 
diff --git a/python/pyspark/ml/tests/connect/test_connect_evaluation.py 
b/python/pyspark/ml/tests/connect/test_connect_evaluation.py
index 0512619d2bfe..7f3b6bd0198c 100644
--- a/python/pyspark/ml/tests/connect/test_connect_evaluation.py
+++ b/python/pyspark/ml/tests/connect/test_connect_evaluation.py
@@ -16,6 +16,7 @@
 #
 
 import unittest
+
 from pyspark.sql import SparkSession
 from pyspark.testing.connectutils import should_test_connect, 
connect_requirement_message
 
diff --git a/python/pyspark/ml/tests/connect/test_connect_feature.py 
b/python/pyspark/ml/tests/connect/test_connect_feature.py
index bd5eebe6e420..cf450cc743ae 100644
--- a/python/pyspark/ml/tests/connect/test_connect_feature.py
+++ b/python/pyspark/ml/tests/connect/test_connect_feature.py
@@ -16,6 +16,7 @@
 #
 
 import unittest
+
 from pyspark.sql import SparkSession
 from pyspark.testing.connectutils import should_test_connect, 
connect_requirement_message
 
diff --git a/python/pyspark/ml/tests/connect/test_connect_function.py 
b/python/pyspark/ml/tests/connect/test_connect_function.py
index 7da3d3f1addd..2b2cd3bc3a89 100644
--- a/python/pyspark/ml/tests/connect/test_connect_function.py
+++ b/python/pyspark/ml/tests/connect/test_connect_function.py
@@ -20,7 +20,6 @@ import unittest
 from pyspark.sql import SparkSession as PySparkSession
 from pyspark.sql.dataframe import DataFrame as SDF
 from pyspark.ml import functions as SF
-
 from pyspark.testing.sqlutils import SQLTestUtils
 from pyspark.testing.connectutils import (
     should_test_connect,
diff --git a/python/pyspark/ml/tests/connect/test_connect_pipeline.py 
b/python/pyspark/ml/tests/connect/test_connect_pipeline.py
index d2d960d6b749..6925b2482f24 100644
--- a/python/pyspark/ml/tests/connect/test_connect_pipeline.py
+++ b/python/pyspark/ml/tests/connect/test_connect_pipeline.py
@@ -17,6 +17,7 @@
 #
 
 import unittest
+
 from pyspark.sql import SparkSession
 from pyspark.testing.connectutils import should_test_connect, 
connect_requirement_message
 
diff --git a/python/pyspark/ml/tests/connect/test_connect_summarizer.py 
b/python/pyspark/ml/tests/connect/test_connect_summarizer.py
index 107f8348d7e1..866a3468388d 100644
--- a/python/pyspark/ml/tests/connect/test_connect_summarizer.py
+++ b/python/pyspark/ml/tests/connect/test_connect_summarizer.py
@@ -16,6 +16,7 @@
 #
 
 import unittest
+
 from pyspark.sql import SparkSession
 from pyspark.testing.connectutils import should_test_connect, 
connect_requirement_message
 
diff --git a/python/pyspark/ml/tests/connect/test_connect_tuning.py 
b/python/pyspark/ml/tests/connect/test_connect_tuning.py
index a38b081636a4..d7dbb00b5e17 100644
--- a/python/pyspark/ml/tests/connect/test_connect_tuning.py
+++ b/python/pyspark/ml/tests/connect/test_connect_tuning.py
@@ -17,8 +17,8 @@
 #
 
 import unittest
-from pyspark.sql import SparkSession
 
+from pyspark.sql import SparkSession
 from pyspark.testing.connectutils import should_test_connect, 
connect_requirement_message
 
 if should_test_connect:
diff --git a/python/pyspark/ml/tests/connect/test_legacy_mode_classification.py 
b/python/pyspark/ml/tests/connect/test_legacy_mode_classification.py
index bc48b4bddd49..26eb4230df35 100644
--- a/python/pyspark/ml/tests/connect/test_legacy_mode_classification.py
+++ b/python/pyspark/ml/tests/connect/test_legacy_mode_classification.py
@@ -18,7 +18,9 @@
 import os
 import tempfile
 import unittest
+
 import numpy as np
+
 from pyspark.sql import SparkSession
 from pyspark.testing.connectutils import should_test_connect, 
connect_requirement_message
 
diff --git a/python/pyspark/ml/tests/connect/test_legacy_mode_evaluation.py 
b/python/pyspark/ml/tests/connect/test_legacy_mode_evaluation.py
index 19442667b2bf..6a6d6b183d1b 100644
--- a/python/pyspark/ml/tests/connect/test_legacy_mode_evaluation.py
+++ b/python/pyspark/ml/tests/connect/test_legacy_mode_evaluation.py
@@ -16,9 +16,10 @@
 # limitations under the License.
 #
 import unittest
-import numpy as np
 import tempfile
 
+import numpy as np
+
 from pyspark.sql import SparkSession
 from pyspark.testing.connectutils import should_test_connect, 
connect_requirement_message
 
diff --git a/python/pyspark/ml/tests/connect/test_legacy_mode_feature.py 
b/python/pyspark/ml/tests/connect/test_legacy_mode_feature.py
index 7315590cdd3e..f440fa9d682f 100644
--- a/python/pyspark/ml/tests/connect/test_legacy_mode_feature.py
+++ b/python/pyspark/ml/tests/connect/test_legacy_mode_feature.py
@@ -18,10 +18,11 @@
 
 import os
 import pickle
-import numpy as np
 import tempfile
 import unittest
 
+import numpy as np
+
 from pyspark.sql import SparkSession
 from pyspark.testing.connectutils import should_test_connect, 
connect_requirement_message
 
diff --git a/python/pyspark/ml/tests/connect/test_legacy_mode_pipeline.py 
b/python/pyspark/ml/tests/connect/test_legacy_mode_pipeline.py
index 34a2ed851bb9..0fd0fd63ffce 100644
--- a/python/pyspark/ml/tests/connect/test_legacy_mode_pipeline.py
+++ b/python/pyspark/ml/tests/connect/test_legacy_mode_pipeline.py
@@ -18,7 +18,9 @@
 import os
 import tempfile
 import unittest
+
 import numpy as np
+
 from pyspark.sql import SparkSession
 from pyspark.testing.connectutils import should_test_connect, 
connect_requirement_message
 
diff --git a/python/pyspark/ml/tests/connect/test_legacy_mode_summarizer.py 
b/python/pyspark/ml/tests/connect/test_legacy_mode_summarizer.py
index 2e6299dabdf4..7f09eb9f0742 100644
--- a/python/pyspark/ml/tests/connect/test_legacy_mode_summarizer.py
+++ b/python/pyspark/ml/tests/connect/test_legacy_mode_summarizer.py
@@ -17,6 +17,7 @@
 #
 
 import unittest
+
 import numpy as np
 
 from pyspark.sql import SparkSession
diff --git a/python/pyspark/ml/tests/connect/test_legacy_mode_tuning.py 
b/python/pyspark/ml/tests/connect/test_legacy_mode_tuning.py
index 5f714eeb169d..63f31bb2ccdf 100644
--- a/python/pyspark/ml/tests/connect/test_legacy_mode_tuning.py
+++ b/python/pyspark/ml/tests/connect/test_legacy_mode_tuning.py
@@ -18,6 +18,7 @@
 
 import tempfile
 import unittest
+
 import numpy as np
 
 from pyspark.ml.param import Param, Params
diff --git a/python/pyspark/ml/tests/connect/test_parity_torch_data_loader.py 
b/python/pyspark/ml/tests/connect/test_parity_torch_data_loader.py
index 68a281dbefab..1984efdc6c6e 100644
--- a/python/pyspark/ml/tests/connect/test_parity_torch_data_loader.py
+++ b/python/pyspark/ml/tests/connect/test_parity_torch_data_loader.py
@@ -16,8 +16,8 @@
 #
 
 import unittest
-from pyspark.sql import SparkSession
 
+from pyspark.sql import SparkSession
 from pyspark.ml.torch.tests.test_data_loader import 
TorchDistributorDataLoaderUnitTests
 
 have_torch = True
diff --git a/python/pyspark/ml/tests/connect/test_parity_torch_distributor.py 
b/python/pyspark/ml/tests/connect/test_parity_torch_distributor.py
index b855332f96c4..70aa80ba6d11 100644
--- a/python/pyspark/ml/tests/connect/test_parity_torch_distributor.py
+++ b/python/pyspark/ml/tests/connect/test_parity_torch_distributor.py
@@ -26,7 +26,6 @@ except ImportError:
     have_torch = False
 
 from pyspark.sql import SparkSession
-
 from pyspark.ml.torch.tests.test_distributor import (
     TorchDistributorBaselineUnitTestsMixin,
     TorchDistributorLocalUnitTestsMixin,
diff --git a/python/pyspark/ml/tests/test_dl_util.py 
b/python/pyspark/ml/tests/test_dl_util.py
index c9634b6c79fb..e5e2c6bc191d 100644
--- a/python/pyspark/ml/tests/test_dl_util.py
+++ b/python/pyspark/ml/tests/test_dl_util.py
@@ -18,9 +18,10 @@ from contextlib import contextmanager
 import os
 import textwrap
 from typing import Any, BinaryIO, Callable, Iterator
-
 import unittest
+
 from parameterized import parameterized
+
 from pyspark import cloudpickle
 from pyspark.ml.dl_util import FunctionPickler
 
diff --git a/python/pyspark/ml/tests/test_functions.py 
b/python/pyspark/ml/tests/test_functions.py
index e3c7982f92bc..7df0a2639414 100644
--- a/python/pyspark/ml/tests/test_functions.py
+++ b/python/pyspark/ml/tests/test_functions.py
@@ -14,9 +14,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-import numpy as np
 import unittest
 
+import numpy as np
+
 from pyspark.ml.functions import predict_batch_udf
 from pyspark.sql.functions import array, struct, col
 from pyspark.sql.types import ArrayType, DoubleType, IntegerType, StructType, 
StructField, FloatType
diff --git a/python/pyspark/ml/tests/test_model_cache.py 
b/python/pyspark/ml/tests/test_model_cache.py
index f37c73e8d5e2..9ad8ac544274 100644
--- a/python/pyspark/ml/tests/test_model_cache.py
+++ b/python/pyspark/ml/tests/test_model_cache.py
@@ -15,10 +15,10 @@
 # limitations under the License.
 #
 import unittest
+from uuid import uuid4
 
 from pyspark.ml.model_cache import ModelCache
 from pyspark.testing.mlutils import SparkSessionTestCase
-from uuid import uuid4
 
 
 class ModelCacheTests(SparkSessionTestCase):
diff --git a/python/pyspark/ml/tests/tuning/test_tuning.py 
b/python/pyspark/ml/tests/tuning/test_tuning.py
index ac0f846afe1c..83acedc4b581 100644
--- a/python/pyspark/ml/tests/tuning/test_tuning.py
+++ b/python/pyspark/ml/tests/tuning/test_tuning.py
@@ -19,6 +19,7 @@ import tempfile
 import unittest
 
 import numpy as np
+
 from pyspark.ml import Estimator, Model
 from pyspark.ml.classification import LogisticRegression
 from pyspark.ml.evaluation import (
diff --git a/python/pyspark/ml/torch/data.py b/python/pyspark/ml/torch/data.py
index 0a5597fbd241..826526d56c8d 100644
--- a/python/pyspark/ml/torch/data.py
+++ b/python/pyspark/ml/torch/data.py
@@ -15,9 +15,11 @@
 # limitations under the License.
 #
 
+from typing import Any, Callable, Iterator
+
 import torch
 import numpy as np
-from typing import Any, Callable, Iterator
+
 from pyspark.sql.types import StructType
 
 
diff --git a/python/pyspark/ml/torch/tests/test_data_loader.py 
b/python/pyspark/ml/torch/tests/test_data_loader.py
index 67ab6e378cea..00f5f0a8c8d8 100644
--- a/python/pyspark/ml/torch/tests/test_data_loader.py
+++ b/python/pyspark/ml/torch/tests/test_data_loader.py
@@ -15,9 +15,10 @@
 # limitations under the License.
 #
 
-import numpy as np
 import unittest
 
+import numpy as np
+
 have_torch = True
 try:
     import torch  # noqa: F401
diff --git a/python/pyspark/ml/tuning.py b/python/pyspark/ml/tuning.py
index fa5d604981b1..ae028b2f3996 100644
--- a/python/pyspark/ml/tuning.py
+++ b/python/pyspark/ml/tuning.py
@@ -19,7 +19,6 @@ import os
 import sys
 import itertools
 from multiprocessing.pool import ThreadPool
-
 from typing import (
     Any,
     Callable,
@@ -58,7 +57,6 @@ from pyspark.ml.util import (
 from pyspark.ml.wrapper import JavaParams, JavaEstimator, JavaWrapper
 from pyspark.sql.functions import col, lit, rand, UserDefinedFunction
 from pyspark.sql.types import BooleanType
-
 from pyspark.sql.dataframe import DataFrame
 
 if TYPE_CHECKING:
diff --git a/python/pyspark/ml/util.py b/python/pyspark/ml/util.py
index 595894047905..8ed8c9ffdea4 100644
--- a/python/pyspark/ml/util.py
+++ b/python/pyspark/ml/util.py
@@ -20,7 +20,6 @@ import os
 import time
 import uuid
 import functools
-
 from typing import (
     Any,
     Callable,
@@ -35,7 +34,6 @@ from typing import (
     TYPE_CHECKING,
 )
 
-
 from pyspark import SparkContext, since
 from pyspark.ml.common import inherit_doc
 from pyspark.sql import SparkSession
diff --git a/python/pyspark/ml/wrapper.py b/python/pyspark/ml/wrapper.py
index 6edddb0026d4..ea2a38cd9101 100644
--- a/python/pyspark/ml/wrapper.py
+++ b/python/pyspark/ml/wrapper.py
@@ -16,7 +16,6 @@
 #
 
 from abc import ABCMeta, abstractmethod
-
 from typing import Any, Generic, Optional, List, Type, TypeVar, TYPE_CHECKING
 
 from pyspark import since


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to