This is an automated email from the ASF dual-hosted git repository.

ruifengz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new f74d3fbbd41d [SPARK-55474][PYTHON][TESTS] Remove test files from 
ignore list of ruff
f74d3fbbd41d is described below

commit f74d3fbbd41d3d17e084b0740af091421de5da95
Author: Tian Gao <[email protected]>
AuthorDate: Wed Feb 18 09:53:07 2026 +0800

    [SPARK-55474][PYTHON][TESTS] Remove test files from ignore list of ruff
    
    ### What changes were proposed in this pull request?
    
    Removed test files from ignore list of ruff for `F403` and `F401`.
    
    ### Why are the changes needed?
    
    We used to need it because we use `from ... import *` in our tests. We 
don't do that anymore (with a few rare cases that we can ignore inline).
    
    During the time we ignore these warnings, we accumulated some real unused 
imports. These are cleared with this PR.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    CI.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #54251 from gaogaotiantian/fix-test-import.
    
    Authored-by: Tian Gao <[email protected]>
    Signed-off-by: Ruifeng Zheng <[email protected]>
---
 pyproject.toml                                             | 14 --------------
 python/pyspark/errors/tests/test_traceback.py              |  4 ++--
 python/pyspark/ml/tests/connect/test_parity_functions.py   |  2 +-
 python/pyspark/ml/tests/test_functions.py                  |  4 ++--
 python/pyspark/pandas/tests/computation/test_apply_func.py |  1 -
 python/pyspark/pandas/tests/computation/test_stats.py      |  2 +-
 .../tests/connect/indexes/test_parity_indexing_adv.py      |  2 +-
 python/pyspark/pandas/tests/data_type_ops/test_as_type.py  |  1 -
 python/pyspark/pandas/tests/data_type_ops/test_num_ops.py  |  1 -
 python/pyspark/pandas/tests/groupby/test_groupby.py        |  2 +-
 python/pyspark/pandas/tests/indexes/test_basic.py          |  2 +-
 python/pyspark/pandas/tests/indexes/test_indexing_adv.py   |  2 +-
 .../pyspark/pandas/tests/indexes/test_indexing_loc_2d.py   |  2 +-
 python/pyspark/pandas/tests/test_typedef.py                |  2 --
 .../pyspark/pandas/tests/window/test_groupby_expanding.py  |  2 +-
 .../pandas/tests/window/test_groupby_expanding_adv.py      |  2 +-
 python/pyspark/pandas/tests/window/test_rolling_adv.py     |  2 +-
 python/pyspark/resource/tests/test_resources.py            |  2 +-
 python/pyspark/sql/tests/arrow/test_arrow.py               |  4 ++--
 python/pyspark/sql/tests/arrow/test_arrow_cogrouped_map.py |  8 ++------
 python/pyspark/sql/tests/arrow/test_arrow_grouped_map.py   |  8 ++------
 python/pyspark/sql/tests/arrow/test_arrow_map.py           |  8 ++++----
 python/pyspark/sql/tests/arrow/test_arrow_python_udf.py    |  6 +++---
 python/pyspark/sql/tests/arrow/test_arrow_udf.py           |  4 ++--
 python/pyspark/sql/tests/arrow/test_arrow_udtf.py          |  3 ++-
 .../sql/tests/connect/test_connect_dataframe_property.py   |  2 +-
 .../pandas/helper/helper_pandas_transform_with_state.py    |  2 +-
 .../pandas/streaming/test_pandas_transform_with_state.py   |  4 ++--
 .../test_pandas_transform_with_state_state_variable.py     |  6 +++---
 .../tests/pandas/streaming/test_transform_with_state.py    |  4 ++--
 .../streaming/test_transform_with_state_state_variable.py  |  4 ++--
 .../pyspark/sql/tests/pandas/streaming/test_tws_tester.py  |  4 ++--
 python/pyspark/sql/tests/pandas/test_converter.py          |  2 +-
 .../pyspark/sql/tests/pandas/test_pandas_cogrouped_map.py  |  6 +++---
 python/pyspark/sql/tests/pandas/test_pandas_grouped_map.py |  6 +++---
 .../sql/tests/pandas/test_pandas_grouped_map_with_state.py |  6 +++---
 python/pyspark/sql/tests/pandas/test_pandas_map.py         |  7 ++++---
 python/pyspark/sql/tests/pandas/test_pandas_sqlmetrics.py  |  4 ++--
 python/pyspark/sql/tests/pandas/test_pandas_udf.py         |  4 ++--
 .../sql/tests/pandas/test_pandas_udf_grouped_agg.py        |  6 +++---
 python/pyspark/sql/tests/pandas/test_pandas_udf_scalar.py  |  4 +++-
 .../pyspark/sql/tests/pandas/test_pandas_udf_typehints.py  |  4 ++--
 .../test_pandas_udf_typehints_with_future_annotations.py   |  4 ++--
 python/pyspark/sql/tests/pandas/test_pandas_udf_window.py  |  6 +++---
 .../streaming/test_streaming_offline_state_repartition.py  |  4 ++--
 python/pyspark/sql/tests/test_collection.py                |  6 +++---
 python/pyspark/sql/tests/test_column.py                    |  3 ++-
 python/pyspark/sql/tests/test_creation.py                  |  4 ++--
 python/pyspark/sql/tests/test_dataframe.py                 |  2 ++
 python/pyspark/sql/tests/test_group.py                     |  4 ++--
 python/pyspark/sql/tests/test_listener.py                  |  4 ++--
 python/pyspark/sql/tests/test_python_datasource.py         |  2 ++
 .../pyspark/sql/tests/test_python_streaming_datasource.py  |  6 +-----
 python/pyspark/sql/tests/test_resources.py                 |  4 ++--
 python/pyspark/sql/tests/test_udtf.py                      |  4 ++--
 python/pyspark/sql/tests/test_utils.py                     | 10 ++++++++--
 python/pyspark/testing/connectutils.py                     | 10 ----------
 python/pyspark/testing/pandasutils.py                      |  1 -
 python/pyspark/testing/sqlutils.py                         |  4 ----
 python/pyspark/testing/tests/test_no_tests.py              |  2 --
 python/pyspark/testing/tests/test_skip_class.py            |  1 -
 python/pyspark/testing/utils.py                            |  4 ++--
 python/pyspark/tests/test_memory_profiler.py               |  6 +++---
 python/pyspark/tests/test_rdd.py                           |  4 ++--
 64 files changed, 113 insertions(+), 147 deletions(-)

diff --git a/pyproject.toml b/pyproject.toml
index 08ced23191aa..68a6cba0acb5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -55,20 +55,6 @@ ignore = [
     "python/pyspark/errors/error_classes.py" = ["E501"]
     # Examples contain some unused variables.
     "examples/src/main/python/sql/datasource.py" = ["F841"]
-    # Exclude * imports in test files
-    "python/pyspark/errors/tests/*.py" = ["F403"]
-    "python/pyspark/logger/tests/*.py" = ["F403"]
-    "python/pyspark/logger/tests/connect/*.py" = ["F403"]
-    "python/pyspark/ml/tests/*.py" = ["F403"]
-    "python/pyspark/mllib/tests/*.py" = ["F403"]
-    "python/pyspark/pandas/tests/*.py" = ["F401", "F403"]
-    "python/pyspark/pandas/tests/connect/*.py" = ["F401", "F403"]
-    "python/pyspark/resource/tests/*.py" = ["F403"]
-    "python/pyspark/sql/tests/*.py" = ["F403"]
-    "python/pyspark/streaming/tests/*.py" = ["F403"]
-    "python/pyspark/tests/*.py" = ["F403"]
-    "python/pyspark/testing/*.py" = ["F401"]
-    "python/pyspark/testing/tests/*.py" = ["F403"]
 
 [tool.black]
 # When changing the version, we have to update
diff --git a/python/pyspark/errors/tests/test_traceback.py 
b/python/pyspark/errors/tests/test_traceback.py
index ceccfc60fa35..070ce666700a 100644
--- a/python/pyspark/errors/tests/test_traceback.py
+++ b/python/pyspark/errors/tests/test_traceback.py
@@ -29,8 +29,8 @@ from pyspark.errors.exceptions.base import AnalysisException
 from pyspark.errors.exceptions.tblib import Traceback
 from pyspark.sql.datasource import DataSource, DataSourceReader
 from pyspark.sql.session import SparkSession
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
diff --git a/python/pyspark/ml/tests/connect/test_parity_functions.py 
b/python/pyspark/ml/tests/connect/test_parity_functions.py
index 0d2a3f135794..ac5321556ae8 100644
--- a/python/pyspark/ml/tests/connect/test_parity_functions.py
+++ b/python/pyspark/ml/tests/connect/test_parity_functions.py
@@ -22,7 +22,7 @@ from pyspark.ml.tests.test_functions import (
     PredictBatchUDFTestsMixin,
 )
 from pyspark.testing.connectutils import ReusedConnectTestCase
-from pyspark.testing.sqlutils import (
+from pyspark.testing.utils import (
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
diff --git a/python/pyspark/ml/tests/test_functions.py 
b/python/pyspark/ml/tests/test_functions.py
index 6d4bb0ce0a29..883b6881e39e 100644
--- a/python/pyspark/ml/tests/test_functions.py
+++ b/python/pyspark/ml/tests/test_functions.py
@@ -23,12 +23,12 @@ from pyspark.ml.linalg import DenseVector
 from pyspark.ml.functions import array_to_vector, vector_to_array, 
predict_batch_udf
 from pyspark.sql.functions import array, struct, col
 from pyspark.sql.types import ArrayType, DoubleType, IntegerType, StructType, 
StructField, FloatType
-from pyspark.testing.sqlutils import (
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
     pyarrow_requirement_message,
-    ReusedSQLTestCase,
 )
 
 
diff --git a/python/pyspark/pandas/tests/computation/test_apply_func.py 
b/python/pyspark/pandas/tests/computation/test_apply_func.py
index d436fc1f50d2..60c41309e1ef 100644
--- a/python/pyspark/pandas/tests/computation/test_apply_func.py
+++ b/python/pyspark/pandas/tests/computation/test_apply_func.py
@@ -15,7 +15,6 @@
 # limitations under the License.
 #
 from datetime import datetime
-import sys
 from typing import List
 
 import numpy as np
diff --git a/python/pyspark/pandas/tests/computation/test_stats.py 
b/python/pyspark/pandas/tests/computation/test_stats.py
index e7f505276916..05e9bdd3c679 100644
--- a/python/pyspark/pandas/tests/computation/test_stats.py
+++ b/python/pyspark/pandas/tests/computation/test_stats.py
@@ -19,7 +19,7 @@ import numpy as np
 import pandas as pd
 
 from pyspark import pandas as ps
-from pyspark.testing.pandasutils import PandasOnSparkTestCase, 
SPARK_CONF_ARROW_ENABLED
+from pyspark.testing.pandasutils import PandasOnSparkTestCase
 from pyspark.testing.sqlutils import SQLTestUtils
 
 
diff --git 
a/python/pyspark/pandas/tests/connect/indexes/test_parity_indexing_adv.py 
b/python/pyspark/pandas/tests/connect/indexes/test_parity_indexing_adv.py
index eaafe9f82448..0af2d091d87e 100644
--- a/python/pyspark/pandas/tests/connect/indexes/test_parity_indexing_adv.py
+++ b/python/pyspark/pandas/tests/connect/indexes/test_parity_indexing_adv.py
@@ -29,7 +29,7 @@ class IndexingAdvParityTests(
 
 
 if __name__ == "__main__":
-    from pyspark.pandas.tests.connect.indexes.test_parity_indexing import *
+    from pyspark.pandas.tests.connect.indexes.test_parity_indexing import *  # 
noqa: F403
 
     from pyspark.testing import main
 
diff --git a/python/pyspark/pandas/tests/data_type_ops/test_as_type.py 
b/python/pyspark/pandas/tests/data_type_ops/test_as_type.py
index e3860cba4a8f..2c543f6b6715 100644
--- a/python/pyspark/pandas/tests/data_type_ops/test_as_type.py
+++ b/python/pyspark/pandas/tests/data_type_ops/test_as_type.py
@@ -24,7 +24,6 @@ from pyspark import pandas as ps
 from pyspark.testing.pandasutils import PandasOnSparkTestCase
 from pyspark.pandas.tests.data_type_ops.testing_utils import OpsTestBase
 from pyspark.pandas.typedef.typehints import (
-    extension_dtypes_available,
     extension_float_dtypes_available,
     extension_object_dtypes_available,
 )
diff --git a/python/pyspark/pandas/tests/data_type_ops/test_num_ops.py 
b/python/pyspark/pandas/tests/data_type_ops/test_num_ops.py
index 9d647e39a136..9088262308c2 100644
--- a/python/pyspark/pandas/tests/data_type_ops/test_num_ops.py
+++ b/python/pyspark/pandas/tests/data_type_ops/test_num_ops.py
@@ -28,7 +28,6 @@ from pyspark.pandas.tests.data_type_ops.testing_utils import 
OpsTestBase
 from pyspark.pandas.typedef.typehints import (
     extension_dtypes_available,
     extension_float_dtypes_available,
-    extension_object_dtypes_available,
 )
 from pyspark.sql.types import DecimalType, IntegralType
 
diff --git a/python/pyspark/pandas/tests/groupby/test_groupby.py 
b/python/pyspark/pandas/tests/groupby/test_groupby.py
index 8df5f7e5f1ab..92bb84a5c341 100644
--- a/python/pyspark/pandas/tests/groupby/test_groupby.py
+++ b/python/pyspark/pandas/tests/groupby/test_groupby.py
@@ -20,7 +20,7 @@ import numpy as np
 import pandas as pd
 
 from pyspark import pandas as ps
-from pyspark.pandas.groupby import is_multi_agg_with_relabel, SeriesGroupBy
+from pyspark.pandas.groupby import is_multi_agg_with_relabel
 from pyspark.testing.pandasutils import PandasOnSparkTestCase, TestUtils
 
 
diff --git a/python/pyspark/pandas/tests/indexes/test_basic.py 
b/python/pyspark/pandas/tests/indexes/test_basic.py
index bc9a32e6bbc1..7255522c6e78 100644
--- a/python/pyspark/pandas/tests/indexes/test_basic.py
+++ b/python/pyspark/pandas/tests/indexes/test_basic.py
@@ -23,7 +23,7 @@ import pandas as pd
 import pyspark.pandas as ps
 from pyspark.loose_version import LooseVersion
 from pyspark.pandas.exceptions import PandasNotImplementedError
-from pyspark.testing.pandasutils import PandasOnSparkTestCase, TestUtils, 
SPARK_CONF_ARROW_ENABLED
+from pyspark.testing.pandasutils import PandasOnSparkTestCase, TestUtils
 
 
 class IndexBasicMixin:
diff --git a/python/pyspark/pandas/tests/indexes/test_indexing_adv.py 
b/python/pyspark/pandas/tests/indexes/test_indexing_adv.py
index d299fb01dfd7..fdebdcbd0002 100644
--- a/python/pyspark/pandas/tests/indexes/test_indexing_adv.py
+++ b/python/pyspark/pandas/tests/indexes/test_indexing_adv.py
@@ -23,7 +23,7 @@ import pandas as pd
 
 from pyspark import pandas as ps
 from pyspark.pandas.exceptions import SparkPandasNotImplementedError
-from pyspark.testing.pandasutils import PandasOnSparkTestCase, compare_both
+from pyspark.testing.pandasutils import PandasOnSparkTestCase
 from pyspark.testing.sqlutils import SQLTestUtils
 
 
diff --git a/python/pyspark/pandas/tests/indexes/test_indexing_loc_2d.py 
b/python/pyspark/pandas/tests/indexes/test_indexing_loc_2d.py
index 03568290198d..9cf2c2f27c5f 100644
--- a/python/pyspark/pandas/tests/indexes/test_indexing_loc_2d.py
+++ b/python/pyspark/pandas/tests/indexes/test_indexing_loc_2d.py
@@ -20,7 +20,7 @@ import numpy as np
 import pandas as pd
 
 from pyspark import pandas as ps
-from pyspark.pandas.exceptions import SparkPandasIndexingError, 
SparkPandasNotImplementedError
+from pyspark.pandas.exceptions import SparkPandasIndexingError
 from pyspark.testing.pandasutils import PandasOnSparkTestCase
 from pyspark.testing.sqlutils import SQLTestUtils
 
diff --git a/python/pyspark/pandas/tests/test_typedef.py 
b/python/pyspark/pandas/tests/test_typedef.py
index 0c6a33393a21..7c9d2a600037 100644
--- a/python/pyspark/pandas/tests/test_typedef.py
+++ b/python/pyspark/pandas/tests/test_typedef.py
@@ -15,8 +15,6 @@
 # limitations under the License.
 #
 
-import os
-import sys
 import unittest
 import datetime
 import decimal
diff --git a/python/pyspark/pandas/tests/window/test_groupby_expanding.py 
b/python/pyspark/pandas/tests/window/test_groupby_expanding.py
index 95ef9c201367..e37986307f7a 100644
--- a/python/pyspark/pandas/tests/window/test_groupby_expanding.py
+++ b/python/pyspark/pandas/tests/window/test_groupby_expanding.py
@@ -19,7 +19,7 @@ import numpy as np
 import pandas as pd
 
 import pyspark.pandas as ps
-from pyspark.testing.pandasutils import PandasOnSparkTestCase, TestUtils
+from pyspark.testing.pandasutils import PandasOnSparkTestCase
 
 
 class GroupByExpandingTestingFuncMixin:
diff --git a/python/pyspark/pandas/tests/window/test_groupby_expanding_adv.py 
b/python/pyspark/pandas/tests/window/test_groupby_expanding_adv.py
index 8f855b3113cf..4de1bce2bae8 100644
--- a/python/pyspark/pandas/tests/window/test_groupby_expanding_adv.py
+++ b/python/pyspark/pandas/tests/window/test_groupby_expanding_adv.py
@@ -15,7 +15,7 @@
 # limitations under the License.
 #
 
-from pyspark.testing.pandasutils import PandasOnSparkTestCase, TestUtils
+from pyspark.testing.pandasutils import PandasOnSparkTestCase
 from pyspark.pandas.tests.window.test_groupby_expanding import 
GroupByExpandingTestingFuncMixin
 
 
diff --git a/python/pyspark/pandas/tests/window/test_rolling_adv.py 
b/python/pyspark/pandas/tests/window/test_rolling_adv.py
index 472011d8b91b..47bdab8705b6 100644
--- a/python/pyspark/pandas/tests/window/test_rolling_adv.py
+++ b/python/pyspark/pandas/tests/window/test_rolling_adv.py
@@ -15,7 +15,7 @@
 # limitations under the License.
 #
 
-from pyspark.testing.pandasutils import PandasOnSparkTestCase, TestUtils
+from pyspark.testing.pandasutils import PandasOnSparkTestCase
 from pyspark.pandas.tests.window.test_rolling import RollingTestingFuncMixin
 
 
diff --git a/python/pyspark/resource/tests/test_resources.py 
b/python/pyspark/resource/tests/test_resources.py
index 7b151a40dded..e62a56d8d475 100644
--- a/python/pyspark/resource/tests/test_resources.py
+++ b/python/pyspark/resource/tests/test_resources.py
@@ -17,7 +17,7 @@
 import unittest
 from pyspark.resource import ExecutorResourceRequests, ResourceProfileBuilder, 
TaskResourceRequests
 from pyspark.sql import SparkSession
-from pyspark.testing.sqlutils import (
+from pyspark.testing.utils import (
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
diff --git a/python/pyspark/sql/tests/arrow/test_arrow.py 
b/python/pyspark/sql/tests/arrow/test_arrow.py
index 136f530f805a..4189860181a8 100644
--- a/python/pyspark/sql/tests/arrow/test_arrow.py
+++ b/python/pyspark/sql/tests/arrow/test_arrow.py
@@ -56,8 +56,8 @@ from pyspark.sql.pandas.types import (
     to_arrow_schema,
 )
 from pyspark.testing.objects import ExamplePoint, ExamplePointUDT
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
diff --git a/python/pyspark/sql/tests/arrow/test_arrow_cogrouped_map.py 
b/python/pyspark/sql/tests/arrow/test_arrow_cogrouped_map.py
index 45c8a455a44a..8e7bf39b33ba 100644
--- a/python/pyspark/sql/tests/arrow/test_arrow_cogrouped_map.py
+++ b/python/pyspark/sql/tests/arrow/test_arrow_cogrouped_map.py
@@ -22,12 +22,8 @@ import logging
 from pyspark.errors import PythonException
 from pyspark.sql import Row
 from pyspark.sql import functions as sf
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
-    have_pyarrow,
-    pyarrow_requirement_message,
-)
-from pyspark.testing.utils import assertDataFrameEqual
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import assertDataFrameEqual, have_pyarrow, 
pyarrow_requirement_message
 from pyspark.util import is_remote_only
 
 if have_pyarrow:
diff --git a/python/pyspark/sql/tests/arrow/test_arrow_grouped_map.py 
b/python/pyspark/sql/tests/arrow/test_arrow_grouped_map.py
index d34c31220be6..3f11c87f2d41 100644
--- a/python/pyspark/sql/tests/arrow/test_arrow_grouped_map.py
+++ b/python/pyspark/sql/tests/arrow/test_arrow_grouped_map.py
@@ -25,12 +25,8 @@ from pyspark.errors import PythonException
 from pyspark.sql import Row, functions as sf
 from pyspark.sql.functions import array, col, explode, lit, mean, stddev
 from pyspark.sql.window import Window
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
-    have_pyarrow,
-    pyarrow_requirement_message,
-)
-from pyspark.testing.utils import assertDataFrameEqual
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import assertDataFrameEqual, have_pyarrow, 
pyarrow_requirement_message
 from pyspark.util import is_remote_only
 
 if have_pyarrow:
diff --git a/python/pyspark/sql/tests/arrow/test_arrow_map.py 
b/python/pyspark/sql/tests/arrow/test_arrow_map.py
index a15bdcca7362..af2626084925 100644
--- a/python/pyspark/sql/tests/arrow/test_arrow_map.py
+++ b/python/pyspark/sql/tests/arrow/test_arrow_map.py
@@ -20,15 +20,15 @@ import unittest
 import logging
 
 from pyspark.sql.utils import PythonException
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.sql import Row
+from pyspark.testing.utils import (
+    assertDataFrameEqual,
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
     pyarrow_requirement_message,
 )
-from pyspark.sql import Row
-from pyspark.testing.utils import assertDataFrameEqual
 from pyspark.util import is_remote_only
 
 if have_pyarrow:
diff --git a/python/pyspark/sql/tests/arrow/test_arrow_python_udf.py 
b/python/pyspark/sql/tests/arrow/test_arrow_python_udf.py
index 84190f1b1074..bddbe3b84e02 100644
--- a/python/pyspark/sql/tests/arrow/test_arrow_python_udf.py
+++ b/python/pyspark/sql/tests/arrow/test_arrow_python_udf.py
@@ -33,14 +33,14 @@ from pyspark.sql.types import (
     StructType,
     VarcharType,
 )
-from pyspark.testing.sqlutils import (
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
+    assertDataFrameEqual,
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
     pyarrow_requirement_message,
-    ReusedSQLTestCase,
 )
-from pyspark.testing.utils import assertDataFrameEqual
 from pyspark.util import PythonEvalType
 
 
diff --git a/python/pyspark/sql/tests/arrow/test_arrow_udf.py 
b/python/pyspark/sql/tests/arrow/test_arrow_udf.py
index 81ba715f9485..a7f2ef197fa7 100644
--- a/python/pyspark/sql/tests/arrow/test_arrow_udf.py
+++ b/python/pyspark/sql/tests/arrow/test_arrow_udf.py
@@ -33,8 +33,8 @@ from pyspark.sql.types import (
 )
 from pyspark.errors import ParseException, PySparkTypeError
 from pyspark.util import PythonEvalType
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
     have_pyarrow,
     pyarrow_requirement_message,
 )
diff --git a/python/pyspark/sql/tests/arrow/test_arrow_udtf.py 
b/python/pyspark/sql/tests/arrow/test_arrow_udtf.py
index 1a7589467590..0e366d23632b 100644
--- a/python/pyspark/sql/tests/arrow/test_arrow_udtf.py
+++ b/python/pyspark/sql/tests/arrow/test_arrow_udtf.py
@@ -22,7 +22,8 @@ from pyspark.errors import PySparkAttributeError
 from pyspark.errors import PythonException
 from pyspark.sql.functions import arrow_udtf, lit
 from pyspark.sql.types import Row, StructType, StructField, IntegerType
-from pyspark.testing.sqlutils import ReusedSQLTestCase, have_pyarrow, 
pyarrow_requirement_message
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import have_pyarrow, pyarrow_requirement_message
 from pyspark.testing import assertDataFrameEqual
 from pyspark.util import is_remote_only
 
diff --git 
a/python/pyspark/sql/tests/connect/test_connect_dataframe_property.py 
b/python/pyspark/sql/tests/connect/test_connect_dataframe_property.py
index d2a264a67406..a01293c74b47 100644
--- a/python/pyspark/sql/tests/connect/test_connect_dataframe_property.py
+++ b/python/pyspark/sql/tests/connect/test_connect_dataframe_property.py
@@ -30,7 +30,7 @@ from pyspark.sql.utils import is_remote
 from pyspark.sql import functions as SF
 from pyspark.testing.connectutils import should_test_connect, 
ReusedMixedTestCase
 from pyspark.testing.pandasutils import PandasOnSparkTestUtils
-from pyspark.testing.sqlutils import (
+from pyspark.testing.utils import (
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
diff --git 
a/python/pyspark/sql/tests/pandas/helper/helper_pandas_transform_with_state.py 
b/python/pyspark/sql/tests/pandas/helper/helper_pandas_transform_with_state.py
index 2ae88297b810..a16b6d3ca80f 100644
--- 
a/python/pyspark/sql/tests/pandas/helper/helper_pandas_transform_with_state.py
+++ 
b/python/pyspark/sql/tests/pandas/helper/helper_pandas_transform_with_state.py
@@ -39,7 +39,7 @@ from pyspark.sql.types import (
     ArrayType,
     MapType,
 )
-from pyspark.testing.sqlutils import have_pandas
+from pyspark.testing.utils import have_pandas
 
 if have_pandas:
     import pandas as pd
diff --git 
a/python/pyspark/sql/tests/pandas/streaming/test_pandas_transform_with_state.py 
b/python/pyspark/sql/tests/pandas/streaming/test_pandas_transform_with_state.py
index 51de1e0bff67..77cbe84f57e7 100644
--- 
a/python/pyspark/sql/tests/pandas/streaming/test_pandas_transform_with_state.py
+++ 
b/python/pyspark/sql/tests/pandas/streaming/test_pandas_transform_with_state.py
@@ -40,8 +40,8 @@ from pyspark.sql.types import (
     MapType,
     DoubleType,
 )
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
diff --git 
a/python/pyspark/sql/tests/pandas/streaming/test_pandas_transform_with_state_state_variable.py
 
b/python/pyspark/sql/tests/pandas/streaming/test_pandas_transform_with_state_state_variable.py
index 820903b1d1c6..da98085af5d3 100644
--- 
a/python/pyspark/sql/tests/pandas/streaming/test_pandas_transform_with_state_state_variable.py
+++ 
b/python/pyspark/sql/tests/pandas/streaming/test_pandas_transform_with_state_state_variable.py
@@ -36,8 +36,8 @@ from pyspark.sql.types import (
     IntegerType,
 )
 from pyspark.testing import assertDataFrameEqual
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
@@ -1010,7 +1010,7 @@ class TransformWithStateInPandasStateVariableTests(
 
 
 if __name__ == "__main__":
-    from pyspark.sql.tests.pandas.streaming.test_pandas_transform_with_state 
import *
+    from pyspark.sql.tests.pandas.streaming.test_pandas_transform_with_state 
import *  # noqa: F403
 
     from pyspark.testing import main
 
diff --git 
a/python/pyspark/sql/tests/pandas/streaming/test_transform_with_state.py 
b/python/pyspark/sql/tests/pandas/streaming/test_transform_with_state.py
index ac96c6d6b83a..995e7f5b2fab 100644
--- a/python/pyspark/sql/tests/pandas/streaming/test_transform_with_state.py
+++ b/python/pyspark/sql/tests/pandas/streaming/test_transform_with_state.py
@@ -18,10 +18,10 @@
 import os
 import unittest
 from pyspark import SparkConf
-from pyspark.testing.sqlutils import (
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
     have_pyarrow,
     pyarrow_requirement_message,
-    ReusedSQLTestCase,
 )
 from pyspark.sql.tests.pandas.streaming.test_pandas_transform_with_state 
import (
     TransformWithStateTestsMixin,
diff --git 
a/python/pyspark/sql/tests/pandas/streaming/test_transform_with_state_state_variable.py
 
b/python/pyspark/sql/tests/pandas/streaming/test_transform_with_state_state_variable.py
index 437f11dcb714..16edb07c2f31 100644
--- 
a/python/pyspark/sql/tests/pandas/streaming/test_transform_with_state_state_variable.py
+++ 
b/python/pyspark/sql/tests/pandas/streaming/test_transform_with_state_state_variable.py
@@ -18,10 +18,10 @@
 import os
 import unittest
 from pyspark import SparkConf
-from pyspark.testing.sqlutils import (
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
     have_pyarrow,
     pyarrow_requirement_message,
-    ReusedSQLTestCase,
 )
 
 from 
pyspark.sql.tests.pandas.streaming.test_pandas_transform_with_state_state_variable
 import (
diff --git a/python/pyspark/sql/tests/pandas/streaming/test_tws_tester.py 
b/python/pyspark/sql/tests/pandas/streaming/test_tws_tester.py
index 6263ac30b9f1..3f7c9bdc7248 100644
--- a/python/pyspark/sql/tests/pandas/streaming/test_tws_tester.py
+++ b/python/pyspark/sql/tests/pandas/streaming/test_tws_tester.py
@@ -47,8 +47,8 @@ from pyspark.sql.types import (
 )
 from pyspark.errors import PySparkValueError, PySparkAssertionError
 from pyspark.errors.exceptions.base import IllegalArgumentException
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
diff --git a/python/pyspark/sql/tests/pandas/test_converter.py 
b/python/pyspark/sql/tests/pandas/test_converter.py
index 30828b1ccb3b..3fbe4109fd15 100644
--- a/python/pyspark/sql/tests/pandas/test_converter.py
+++ b/python/pyspark/sql/tests/pandas/test_converter.py
@@ -24,7 +24,7 @@ from pyspark.sql.types import (
     StructType,
     Row,
 )
-from pyspark.testing.sqlutils import (
+from pyspark.testing.utils import (
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
diff --git a/python/pyspark/sql/tests/pandas/test_pandas_cogrouped_map.py 
b/python/pyspark/sql/tests/pandas/test_pandas_cogrouped_map.py
index 88b5b9138f71..cfaa7e850d57 100644
--- a/python/pyspark/sql/tests/pandas/test_pandas_cogrouped_map.py
+++ b/python/pyspark/sql/tests/pandas/test_pandas_cogrouped_map.py
@@ -32,14 +32,14 @@ from pyspark.sql.types import (
 )
 from pyspark.sql.window import Window
 from pyspark.errors import IllegalArgumentException, PythonException
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
+    assertDataFrameEqual,
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
     pyarrow_requirement_message,
 )
-from pyspark.testing.utils import assertDataFrameEqual
 from pyspark.util import is_remote_only
 
 if have_pandas:
diff --git a/python/pyspark/sql/tests/pandas/test_pandas_grouped_map.py 
b/python/pyspark/sql/tests/pandas/test_pandas_grouped_map.py
index 76def82729b9..3367d6a3ae02 100644
--- a/python/pyspark/sql/tests/pandas/test_pandas_grouped_map.py
+++ b/python/pyspark/sql/tests/pandas/test_pandas_grouped_map.py
@@ -46,14 +46,14 @@ from pyspark.sql.types import (
     YearMonthIntervalType,
 )
 from pyspark.errors import PythonException, PySparkTypeError, PySparkValueError
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
+    assertDataFrameEqual,
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
     pyarrow_requirement_message,
 )
-from pyspark.testing.utils import assertDataFrameEqual
 from pyspark.util import is_remote_only
 
 if have_pyarrow and have_pandas:
diff --git 
a/python/pyspark/sql/tests/pandas/test_pandas_grouped_map_with_state.py 
b/python/pyspark/sql/tests/pandas/test_pandas_grouped_map_with_state.py
index dddc210e0ddd..7d536e9a9175 100644
--- a/python/pyspark/sql/tests/pandas/test_pandas_grouped_map_with_state.py
+++ b/python/pyspark/sql/tests/pandas/test_pandas_grouped_map_with_state.py
@@ -33,14 +33,14 @@ from pyspark.sql.types import (
     Row,
     DecimalType,
 )
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
+    eventually,
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
     pyarrow_requirement_message,
 )
-from pyspark.testing.utils import eventually
 
 if have_pandas:
     import pandas as pd
diff --git a/python/pyspark/sql/tests/pandas/test_pandas_map.py 
b/python/pyspark/sql/tests/pandas/test_pandas_map.py
index 43c41ed54ebd..1df1828d2fb2 100644
--- a/python/pyspark/sql/tests/pandas/test_pandas_map.py
+++ b/python/pyspark/sql/tests/pandas/test_pandas_map.py
@@ -27,14 +27,15 @@ from pyspark.sql.functions import col, encode, lit
 from pyspark.errors import PythonException
 from pyspark.sql.session import SparkSession
 from pyspark.sql.types import StructType
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
+    assertDataFrameEqual,
+    eventually,
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
     pyarrow_requirement_message,
 )
-from pyspark.testing.utils import assertDataFrameEqual, eventually
 from pyspark.util import is_remote_only
 
 if have_pandas:
diff --git a/python/pyspark/sql/tests/pandas/test_pandas_sqlmetrics.py 
b/python/pyspark/sql/tests/pandas/test_pandas_sqlmetrics.py
index fc1fa70fe6c2..d4739c76bb9f 100644
--- a/python/pyspark/sql/tests/pandas/test_pandas_sqlmetrics.py
+++ b/python/pyspark/sql/tests/pandas/test_pandas_sqlmetrics.py
@@ -17,8 +17,8 @@
 
 import unittest
 from pyspark.sql.functions import pandas_udf
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
diff --git a/python/pyspark/sql/tests/pandas/test_pandas_udf.py 
b/python/pyspark/sql/tests/pandas/test_pandas_udf.py
index 6fced7557158..370629696807 100644
--- a/python/pyspark/sql/tests/pandas/test_pandas_udf.py
+++ b/python/pyspark/sql/tests/pandas/test_pandas_udf.py
@@ -29,8 +29,8 @@ from pyspark.sql.types import (
 )
 from pyspark.errors import ParseException, PythonException, PySparkTypeError
 from pyspark.util import PythonEvalType
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
diff --git a/python/pyspark/sql/tests/pandas/test_pandas_udf_grouped_agg.py 
b/python/pyspark/sql/tests/pandas/test_pandas_udf_grouped_agg.py
index fb8a229da97e..fae97348a49c 100644
--- a/python/pyspark/sql/tests/pandas/test_pandas_udf_grouped_agg.py
+++ b/python/pyspark/sql/tests/pandas/test_pandas_udf_grouped_agg.py
@@ -33,14 +33,14 @@ from pyspark.sql.functions import (
 )
 from pyspark.sql.types import ArrayType, YearMonthIntervalType
 from pyspark.errors import AnalysisException, PySparkNotImplementedError, 
PythonException
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
+    assertDataFrameEqual,
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
     pyarrow_requirement_message,
 )
-from pyspark.testing.utils import assertDataFrameEqual
 
 
 if have_pandas:
diff --git a/python/pyspark/sql/tests/pandas/test_pandas_udf_scalar.py 
b/python/pyspark/sql/tests/pandas/test_pandas_udf_scalar.py
index a9656bfce4a6..ad3a3bea3e53 100644
--- a/python/pyspark/sql/tests/pandas/test_pandas_udf_scalar.py
+++ b/python/pyspark/sql/tests/pandas/test_pandas_udf_scalar.py
@@ -65,12 +65,14 @@ from pyspark.testing.sqlutils import (
     ReusedSQLTestCase,
     test_compiled,
     test_not_compiled_message,
+)
+from pyspark.testing.utils import (
+    assertDataFrameEqual,
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
     pyarrow_requirement_message,
 )
-from pyspark.testing.utils import assertDataFrameEqual
 
 if have_pandas:
     import pandas as pd
diff --git a/python/pyspark/sql/tests/pandas/test_pandas_udf_typehints.py 
b/python/pyspark/sql/tests/pandas/test_pandas_udf_typehints.py
index d368bff06629..cdeb12d5db4e 100644
--- a/python/pyspark/sql/tests/pandas/test_pandas_udf_typehints.py
+++ b/python/pyspark/sql/tests/pandas/test_pandas_udf_typehints.py
@@ -19,8 +19,8 @@ from inspect import signature
 from typing import Union, Iterator, Tuple, get_type_hints
 
 from pyspark.sql.functions import mean, lit
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
diff --git 
a/python/pyspark/sql/tests/pandas/test_pandas_udf_typehints_with_future_annotations.py
 
b/python/pyspark/sql/tests/pandas/test_pandas_udf_typehints_with_future_annotations.py
index ecf3c91f1f91..94dcaa3b6caf 100644
--- 
a/python/pyspark/sql/tests/pandas/test_pandas_udf_typehints_with_future_annotations.py
+++ 
b/python/pyspark/sql/tests/pandas/test_pandas_udf_typehints_with_future_annotations.py
@@ -21,8 +21,8 @@ from inspect import signature
 from typing import Union, Iterator, Tuple, get_type_hints
 
 from pyspark.sql.functions import mean, lit
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
diff --git a/python/pyspark/sql/tests/pandas/test_pandas_udf_window.py 
b/python/pyspark/sql/tests/pandas/test_pandas_udf_window.py
index 27d1f3dc21c6..6d4edc61d6a1 100644
--- a/python/pyspark/sql/tests/pandas/test_pandas_udf_window.py
+++ b/python/pyspark/sql/tests/pandas/test_pandas_udf_window.py
@@ -30,14 +30,14 @@ from pyspark.sql.types import (
     FloatType,
     DoubleType,
 )
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
+    assertDataFrameEqual,
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
     pyarrow_requirement_message,
 )
-from pyspark.testing.utils import assertDataFrameEqual
 from pyspark.sql import Row
 from pyspark.util import is_remote_only
 
diff --git 
a/python/pyspark/sql/tests/streaming/test_streaming_offline_state_repartition.py
 
b/python/pyspark/sql/tests/streaming/test_streaming_offline_state_repartition.py
index 42b64aad367f..10a8ea7d6b7e 100644
--- 
a/python/pyspark/sql/tests/streaming/test_streaming_offline_state_repartition.py
+++ 
b/python/pyspark/sql/tests/streaming/test_streaming_offline_state_repartition.py
@@ -28,8 +28,8 @@ from 
pyspark.sql.tests.pandas.helper.helper_pandas_transform_with_state import (
     StatefulProcessorCompositeTypeFactory,
 )
 from pyspark.sql.types import LongType, StringType, StructType, StructField
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
diff --git a/python/pyspark/sql/tests/test_collection.py 
b/python/pyspark/sql/tests/test_collection.py
index fb04d59989db..c6d44d614474 100644
--- a/python/pyspark/sql/tests/test_collection.py
+++ b/python/pyspark/sql/tests/test_collection.py
@@ -32,14 +32,14 @@ from pyspark.sql.types import (
     FloatType,
     DayTimeIntervalType,
 )
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
+    assertDataFrameEqual,
     have_pyarrow,
     have_pandas,
     pandas_requirement_message,
     pyarrow_requirement_message,
 )
-from pyspark.testing.utils import assertDataFrameEqual
 
 
 class DataFrameCollectionTestsMixin:
diff --git a/python/pyspark/sql/tests/test_column.py 
b/python/pyspark/sql/tests/test_column.py
index e711b478d05e..1983b291a5a3 100644
--- a/python/pyspark/sql/tests/test_column.py
+++ b/python/pyspark/sql/tests/test_column.py
@@ -25,7 +25,8 @@ from pyspark.sql import Column, Row
 from pyspark.sql import functions as sf
 from pyspark.sql.types import StructType, StructField, IntegerType, LongType
 from pyspark.errors import AnalysisException, PySparkTypeError, 
PySparkValueError
-from pyspark.testing.sqlutils import ReusedSQLTestCase, have_pandas, 
pandas_requirement_message
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import have_pandas, pandas_requirement_message
 
 
 class ColumnTestsMixin:
diff --git a/python/pyspark/sql/tests/test_creation.py 
b/python/pyspark/sql/tests/test_creation.py
index 439bb60dbea3..906dab969201 100644
--- a/python/pyspark/sql/tests/test_creation.py
+++ b/python/pyspark/sql/tests/test_creation.py
@@ -36,8 +36,8 @@ from pyspark.errors import (
     PySparkValueError,
 )
 from pyspark.testing import assertDataFrameEqual
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
diff --git a/python/pyspark/sql/tests/test_dataframe.py 
b/python/pyspark/sql/tests/test_dataframe.py
index 7aa25605b2ee..d7cc1860fa89 100644
--- a/python/pyspark/sql/tests/test_dataframe.py
+++ b/python/pyspark/sql/tests/test_dataframe.py
@@ -56,6 +56,8 @@ from pyspark.testing import assertDataFrameEqual
 from pyspark.testing.sqlutils import (
     ReusedSQLTestCase,
     SPARK_HOME,
+)
+from pyspark.testing.utils import (
     have_pyarrow,
     have_pandas,
     pandas_requirement_message,
diff --git a/python/pyspark/sql/tests/test_group.py 
b/python/pyspark/sql/tests/test_group.py
index 9d67fe2a3024..af2ee6801f4d 100644
--- a/python/pyspark/sql/tests/test_group.py
+++ b/python/pyspark/sql/tests/test_group.py
@@ -19,8 +19,8 @@ import unittest
 from pyspark.sql import Row
 from pyspark.sql import functions as sf
 from pyspark.errors import AnalysisException
-from pyspark.testing.sqlutils import (
-    ReusedSQLTestCase,
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
diff --git a/python/pyspark/sql/tests/test_listener.py 
b/python/pyspark/sql/tests/test_listener.py
index 05480808c74d..b83b4a96f128 100644
--- a/python/pyspark/sql/tests/test_listener.py
+++ b/python/pyspark/sql/tests/test_listener.py
@@ -18,8 +18,8 @@
 import os
 import unittest
 from pyspark.sql import SparkSession
-from pyspark.testing.sqlutils import (
-    SQLTestUtils,
+from pyspark.testing.sqlutils import SQLTestUtils
+from pyspark.testing.utils import (
     have_pyarrow,
     have_pandas,
     pandas_requirement_message,
diff --git a/python/pyspark/sql/tests/test_python_datasource.py 
b/python/pyspark/sql/tests/test_python_datasource.py
index eefabb3e7ea0..d78e7ecb0c03 100644
--- a/python/pyspark/sql/tests/test_python_datasource.py
+++ b/python/pyspark/sql/tests/test_python_datasource.py
@@ -59,6 +59,8 @@ from pyspark.testing import assertDataFrameEqual
 from pyspark.testing.sqlutils import (
     SPARK_HOME,
     ReusedSQLTestCase,
+)
+from pyspark.testing.utils import (
     have_pyarrow,
     pyarrow_requirement_message,
 )
diff --git a/python/pyspark/sql/tests/test_python_streaming_datasource.py 
b/python/pyspark/sql/tests/test_python_streaming_datasource.py
index 5f6aaf10fe01..1ed7795a40e3 100644
--- a/python/pyspark/sql/tests/test_python_streaming_datasource.py
+++ b/python/pyspark/sql/tests/test_python_streaming_datasource.py
@@ -37,13 +37,9 @@ from pyspark.sql.streaming.datasource import (
 )
 from pyspark.sql.streaming import StreamingQueryException
 from pyspark.sql.types import Row
-from pyspark.testing.sqlutils import (
-    have_pyarrow,
-    pyarrow_requirement_message,
-)
 from pyspark.errors import PySparkException
 from pyspark.testing import assertDataFrameEqual
-from pyspark.testing.utils import eventually
+from pyspark.testing.utils import eventually, have_pyarrow, 
pyarrow_requirement_message
 from pyspark.testing.sqlutils import ReusedSQLTestCase
 
 
diff --git a/python/pyspark/sql/tests/test_resources.py 
b/python/pyspark/sql/tests/test_resources.py
index a7a26ed02dc3..3adb33a9904b 100644
--- a/python/pyspark/sql/tests/test_resources.py
+++ b/python/pyspark/sql/tests/test_resources.py
@@ -19,13 +19,13 @@ import unittest
 from pyspark import TaskContext
 from pyspark.resource import TaskResourceRequests, ResourceProfileBuilder
 from pyspark.sql import SparkSession
-from pyspark.testing.sqlutils import (
+from pyspark.testing.utils import (
+    ReusedPySparkTestCase,
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
     pyarrow_requirement_message,
 )
-from pyspark.testing.utils import ReusedPySparkTestCase
 
 
 @unittest.skipIf(
diff --git a/python/pyspark/sql/tests/test_udtf.py 
b/python/pyspark/sql/tests/test_udtf.py
index 8795f3039821..de201b842c78 100644
--- a/python/pyspark/sql/tests/test_udtf.py
+++ b/python/pyspark/sql/tests/test_udtf.py
@@ -68,12 +68,12 @@ from pyspark.sql.types import (
 from pyspark.logger import PySparkLogger
 from pyspark.testing import assertDataFrameEqual, assertSchemaEqual
 from pyspark.testing.objects import ExamplePoint, ExamplePointUDT
-from pyspark.testing.sqlutils import (
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
     pyarrow_requirement_message,
-    ReusedSQLTestCase,
 )
 from pyspark.util import is_remote_only
 
diff --git a/python/pyspark/sql/tests/test_utils.py 
b/python/pyspark/sql/tests/test_utils.py
index f2397e1365fe..239e028ced98 100644
--- a/python/pyspark/sql/tests/test_utils.py
+++ b/python/pyspark/sql/tests/test_utils.py
@@ -29,7 +29,14 @@ from pyspark.errors import (
     SparkUpgradeException,
     PySparkTypeError,
 )
-from pyspark.testing.utils import assertDataFrameEqual, assertSchemaEqual, 
_context_diff, have_numpy
+from pyspark.testing.utils import (
+    assertDataFrameEqual,
+    assertSchemaEqual,
+    _context_diff,
+    have_numpy,
+    have_pandas,
+    have_pyarrow,
+)
 from pyspark.testing.sqlutils import ReusedSQLTestCase
 from pyspark.sql import Row
 import pyspark.sql.functions as F
@@ -47,7 +54,6 @@ from pyspark.sql.types import (
     IntegerType,
     BooleanType,
 )
-from pyspark.testing.sqlutils import have_pandas, have_pyarrow
 
 
 class UtilsTestsMixin:
diff --git a/python/pyspark/testing/connectutils.py 
b/python/pyspark/testing/connectutils.py
index 08c86561b57a..32db9fe87ec4 100644
--- a/python/pyspark/testing/connectutils.py
+++ b/python/pyspark/testing/connectutils.py
@@ -28,16 +28,6 @@ from pyspark.loose_version import LooseVersion
 from pyspark.util import is_remote_only
 from pyspark.testing.utils import (
     have_pandas,
-    pandas_requirement_message,
-    pyarrow_requirement_message,
-    have_graphviz,
-    graphviz_requirement_message,
-    grpc_requirement_message,
-    have_grpc,
-    grpc_status_requirement_message,
-    have_grpc_status,
-    googleapis_common_protos_requirement_message,
-    have_googleapis_common_protos,
     connect_requirement_message,
     should_test_connect,
     PySparkErrorTestUtils,
diff --git a/python/pyspark/testing/pandasutils.py 
b/python/pyspark/testing/pandasutils.py
index 9a019d88f509..0cd8f7f8cf46 100644
--- a/python/pyspark/testing/pandasutils.py
+++ b/python/pyspark/testing/pandasutils.py
@@ -47,7 +47,6 @@ def _assert_pandas_equal(
     right: Union[pd.DataFrame, pd.Series, pd.Index],
     checkExact: bool,
 ):
-    from pandas.core.dtypes.common import is_numeric_dtype
     from pandas.testing import assert_frame_equal, assert_index_equal, 
assert_series_equal
 
     if isinstance(left, pd.DataFrame) and isinstance(right, pd.DataFrame):
diff --git a/python/pyspark/testing/sqlutils.py 
b/python/pyspark/testing/sqlutils.py
index 00b3a1ef504d..7d81c4b4e70f 100644
--- a/python/pyspark/testing/sqlutils.py
+++ b/python/pyspark/testing/sqlutils.py
@@ -27,10 +27,6 @@ from pyspark.sql.types import Row
 from pyspark.testing.utils import (
     ReusedPySparkTestCase,
     PySparkErrorTestUtils,
-    have_pandas,
-    pandas_requirement_message,
-    have_pyarrow,
-    pyarrow_requirement_message,
 )
 from pyspark.find_spark_home import _find_spark_home
 
diff --git a/python/pyspark/testing/tests/test_no_tests.py 
b/python/pyspark/testing/tests/test_no_tests.py
index f60a6f5a3f08..b929b8b61ce8 100644
--- a/python/pyspark/testing/tests/test_no_tests.py
+++ b/python/pyspark/testing/tests/test_no_tests.py
@@ -14,8 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-import sys
-
 
 if __name__ == "__main__":
     from pyspark.testing import main
diff --git a/python/pyspark/testing/tests/test_skip_class.py 
b/python/pyspark/testing/tests/test_skip_class.py
index 271776fdad0b..4642de35be65 100644
--- a/python/pyspark/testing/tests/test_skip_class.py
+++ b/python/pyspark/testing/tests/test_skip_class.py
@@ -14,7 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-import sys
 import unittest
 
 
diff --git a/python/pyspark/testing/utils.py b/python/pyspark/testing/utils.py
index c0f6f5e5db2f..bc58873e384b 100644
--- a/python/pyspark/testing/utils.py
+++ b/python/pyspark/testing/utils.py
@@ -105,7 +105,7 @@ grpc_status_requirement_message = "" if have_grpc_status 
else "No module named '
 googleapis_common_protos_requirement_message = ""
 
 try:
-    from google.rpc import error_details_pb2
+    from google.rpc import error_details_pb2  # noqa: F401
 except ImportError as e:
     googleapis_common_protos_requirement_message = str(e)
 have_googleapis_common_protos = not 
googleapis_common_protos_requirement_message
@@ -960,7 +960,7 @@ def assertDataFrameEqual(
 
     has_arrow = False
     try:
-        import pyarrow
+        import pyarrow  # noqa: F401
 
         has_arrow = True
     except ImportError:
diff --git a/python/pyspark/tests/test_memory_profiler.py 
b/python/pyspark/tests/test_memory_profiler.py
index c23ea5e5bc32..e260e5997990 100644
--- a/python/pyspark/tests/test_memory_profiler.py
+++ b/python/pyspark/tests/test_memory_profiler.py
@@ -31,14 +31,14 @@ from pyspark.profiler import has_memory_profiler
 from pyspark.sql import SparkSession
 from pyspark.sql.functions import col, pandas_udf, udf
 from pyspark.sql.window import Window
-from pyspark.testing.sqlutils import (
+from pyspark.testing.sqlutils import ReusedSQLTestCase
+from pyspark.testing.utils import (
+    PySparkTestCase,
     have_pandas,
     have_pyarrow,
     pandas_requirement_message,
     pyarrow_requirement_message,
-    ReusedSQLTestCase,
 )
-from pyspark.testing.utils import PySparkTestCase
 
 
 def _do_computation(spark, *, action=lambda df: df.collect(), use_arrow=False):
diff --git a/python/pyspark/tests/test_rdd.py b/python/pyspark/tests/test_rdd.py
index b37038f66f82..f9346f04a4a8 100644
--- a/python/pyspark/tests/test_rdd.py
+++ b/python/pyspark/tests/test_rdd.py
@@ -36,8 +36,8 @@ from pyspark.serializers import (
     NoOpSerializer,
 )
 from pyspark.sql import SparkSession
-from pyspark.testing.utils import ReusedPySparkTestCase, QuietTest, have_numpy
-from pyspark.testing.sqlutils import SPARK_HOME, have_pandas
+from pyspark.testing.utils import ReusedPySparkTestCase, QuietTest, 
have_numpy, have_pandas
+from pyspark.testing.sqlutils import SPARK_HOME
 
 
 global_func = lambda: "Hi"  # noqa: E731


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to