This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 8d6450229127 [SPARK-55667][PYTHON][CONNECT] Move check_dependencies to
__init__
8d6450229127 is described below
commit 8d645022912715201da122b654155b9f1edb9316
Author: Tian Gao <[email protected]>
AuthorDate: Thu Mar 12 09:25:35 2026 +0900
[SPARK-55667][PYTHON][CONNECT] Move check_dependencies to __init__
### What changes were proposed in this pull request?
* Modified `check_dependencies` so it does not need `__name__` to check
whether this is a doctest call. Now it checks whether `sys.modules['__main__']`
is initialized to determine if we are running a doctest. If we are running a
module under `pyspark.sql.connect` or `pyspark.ml.connect` directly, this
function will be executed before the module is initialized. Otherwise the
`__main__` module should have a `__spec__` or `__file__` associated to it.
* Removed all the `check_dependencies` usages in different files and moved
it to `__init__` file of `pyspark.sql.connect` ad `pyspark.ml.connect`
### Why are the changes needed?
There are some downsides to have `check_dependencies` scattered in
different files
* People don't really know what's it about. Some files have it and some
don't.
* It's against PEP 8 where all the imports should happen before any
executable code. We may have some exceptions but we should not do it in too
many files.
* When we add a new file in the future, people might forget to do this (or
they don't know if they should do it)
With this change, people can still get a decent error message when they
don't have the dependencies while they are using connect. doctests are skipped
properly.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Did some manual test but still need to check CI.
### Was this patch authored or co-authored using generative AI tooling?
Only the removing part by Cursor (claude-4.6-opus-high)
Closes #54463 from gaogaotiantian/refactor-check-dependency.
Authored-by: Tian Gao <[email protected]>
Signed-off-by: Hyukjin Kwon <[email protected]>
---
python/pyspark/ml/connect/__init__.py | 5 +++++
python/pyspark/ml/connect/proto.py | 4 ----
python/pyspark/ml/connect/readwrite.py | 4 ----
python/pyspark/ml/connect/serialize.py | 4 ----
python/pyspark/sql/connect/__init__.py | 4 ++++
python/pyspark/sql/connect/avro/functions.py | 3 ---
python/pyspark/sql/connect/catalog.py | 3 ---
python/pyspark/sql/connect/client/__init__.py | 4 ----
python/pyspark/sql/connect/client/artifact.py | 3 ---
python/pyspark/sql/connect/client/core.py | 3 ---
python/pyspark/sql/connect/client/reattach.py | 3 ---
python/pyspark/sql/connect/column.py | 4 ----
python/pyspark/sql/connect/conf.py | 3 ---
python/pyspark/sql/connect/conversion.py | 5 -----
python/pyspark/sql/connect/dataframe.py | 3 ---
python/pyspark/sql/connect/datasource.py | 4 ----
python/pyspark/sql/connect/expressions.py | 4 ----
python/pyspark/sql/connect/functions/builtin.py | 4 ----
.../pyspark/sql/connect/functions/partitioning.py | 4 ----
python/pyspark/sql/connect/group.py | 4 ----
python/pyspark/sql/connect/merge.py | 4 ----
python/pyspark/sql/connect/observation.py | 4 ----
python/pyspark/sql/connect/plan.py | 3 ---
python/pyspark/sql/connect/protobuf/functions.py | 4 ----
python/pyspark/sql/connect/readwriter.py | 4 ----
python/pyspark/sql/connect/session.py | 3 ---
python/pyspark/sql/connect/streaming/query.py | 4 ----
python/pyspark/sql/connect/streaming/readwriter.py | 4 ----
python/pyspark/sql/connect/types.py | 4 ----
python/pyspark/sql/connect/udf.py | 4 ----
python/pyspark/sql/connect/udtf.py | 4 ----
python/pyspark/sql/connect/utils.py | 22 ++++++++++++----------
python/pyspark/sql/connect/window.py | 4 ----
.../sql/tests/connect/test_connect_function.py | 4 +---
.../sql/tests/test_connect_compatibility.py | 6 +++---
35 files changed, 25 insertions(+), 128 deletions(-)
diff --git a/python/pyspark/ml/connect/__init__.py
b/python/pyspark/ml/connect/__init__.py
index 6a5453db0be9..c4bc8c9d84d2 100644
--- a/python/pyspark/ml/connect/__init__.py
+++ b/python/pyspark/ml/connect/__init__.py
@@ -16,6 +16,11 @@
#
"""Spark Connect Python Client - ML module"""
+
+from pyspark.sql.connect.utils import check_dependencies
+
+check_dependencies()
+
from pyspark.ml.connect.base import (
Estimator,
Transformer,
diff --git a/python/pyspark/ml/connect/proto.py
b/python/pyspark/ml/connect/proto.py
index 7cffd32631ba..eecf971440fb 100644
--- a/python/pyspark/ml/connect/proto.py
+++ b/python/pyspark/ml/connect/proto.py
@@ -14,10 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
-
from typing import Optional, TYPE_CHECKING, List
import pyspark.sql.connect.proto as pb2
diff --git a/python/pyspark/ml/connect/readwrite.py
b/python/pyspark/ml/connect/readwrite.py
index 43b724780a54..efe6d60e0e2c 100644
--- a/python/pyspark/ml/connect/readwrite.py
+++ b/python/pyspark/ml/connect/readwrite.py
@@ -14,10 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
-
from typing import cast, Type, TYPE_CHECKING, Union, Dict, Any
import pyspark.sql.connect.proto as pb2
diff --git a/python/pyspark/ml/connect/serialize.py
b/python/pyspark/ml/connect/serialize.py
index 37102d463b05..42bedfb330b1 100644
--- a/python/pyspark/ml/connect/serialize.py
+++ b/python/pyspark/ml/connect/serialize.py
@@ -14,10 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
-
from typing import Any, List, TYPE_CHECKING, Mapping, Dict
import pyspark.sql.connect.proto as pb2
diff --git a/python/pyspark/sql/connect/__init__.py
b/python/pyspark/sql/connect/__init__.py
index 93236e8294fe..cff3174b321d 100644
--- a/python/pyspark/sql/connect/__init__.py
+++ b/python/pyspark/sql/connect/__init__.py
@@ -16,3 +16,7 @@
#
"""Spark Connect client"""
+
+from pyspark.sql.connect.utils import check_dependencies
+
+check_dependencies()
diff --git a/python/pyspark/sql/connect/avro/functions.py
b/python/pyspark/sql/connect/avro/functions.py
index 55067c33dd49..611c1ac1d1f3 100644
--- a/python/pyspark/sql/connect/avro/functions.py
+++ b/python/pyspark/sql/connect/avro/functions.py
@@ -20,9 +20,6 @@ A collections of builtin avro functions
"""
from pyspark.errors import PySparkTypeError
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
from typing import Dict, Optional, TYPE_CHECKING
diff --git a/python/pyspark/sql/connect/catalog.py
b/python/pyspark/sql/connect/catalog.py
index 85bf3caaf94d..3575f9a187c8 100644
--- a/python/pyspark/sql/connect/catalog.py
+++ b/python/pyspark/sql/connect/catalog.py
@@ -15,9 +15,6 @@
# limitations under the License.
#
from pyspark.errors import PySparkTypeError
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
from typing import Any, Callable, List, Optional, TYPE_CHECKING
diff --git a/python/pyspark/sql/connect/client/__init__.py
b/python/pyspark/sql/connect/client/__init__.py
index adb4148f60d4..a07a3e396ec0 100644
--- a/python/pyspark/sql/connect/client/__init__.py
+++ b/python/pyspark/sql/connect/client/__init__.py
@@ -15,9 +15,5 @@
# limitations under the License.
#
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
-
from pyspark.sql.connect.client.core import * # noqa: F403
from pyspark.sql.connect.logging import getLogLevel # noqa: F401
diff --git a/python/pyspark/sql/connect/client/artifact.py
b/python/pyspark/sql/connect/client/artifact.py
index a37642186fda..3e70cb790548 100644
--- a/python/pyspark/sql/connect/client/artifact.py
+++ b/python/pyspark/sql/connect/client/artifact.py
@@ -15,11 +15,8 @@
# limitations under the License.
#
from pyspark.errors import PySparkRuntimeError, PySparkValueError
-from pyspark.sql.connect.utils import check_dependencies
from pyspark.sql.connect.logging import logger
-check_dependencies(__name__)
-
import hashlib
import importlib
import io
diff --git a/python/pyspark/sql/connect/client/core.py
b/python/pyspark/sql/connect/client/core.py
index ab7979a28326..e86d07aed6d3 100644
--- a/python/pyspark/sql/connect/client/core.py
+++ b/python/pyspark/sql/connect/client/core.py
@@ -24,9 +24,6 @@ import atexit
import pyspark
from pyspark.sql.connect.proto.base_pb2 import FetchErrorDetailsResponse
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
import concurrent.futures
import logging
diff --git a/python/pyspark/sql/connect/client/reattach.py
b/python/pyspark/sql/connect/client/reattach.py
index 62593384c7d0..7e168ec0fdd6 100644
--- a/python/pyspark/sql/connect/client/reattach.py
+++ b/python/pyspark/sql/connect/client/reattach.py
@@ -15,9 +15,6 @@
# limitations under the License.
#
from pyspark.sql.connect.client.retries import Retrying, RetryException
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
from threading import RLock
import uuid
diff --git a/python/pyspark/sql/connect/column.py
b/python/pyspark/sql/connect/column.py
index 93c85e1b095d..ad011d723ed9 100644
--- a/python/pyspark/sql/connect/column.py
+++ b/python/pyspark/sql/connect/column.py
@@ -14,10 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
-
import datetime
import decimal
import warnings
diff --git a/python/pyspark/sql/connect/conf.py
b/python/pyspark/sql/connect/conf.py
index 84d7ad34fb36..890bad25877e 100644
--- a/python/pyspark/sql/connect/conf.py
+++ b/python/pyspark/sql/connect/conf.py
@@ -15,9 +15,6 @@
# limitations under the License.
#
from pyspark.errors import PySparkValueError, PySparkTypeError
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
from typing import Any, Dict, Optional, Union, cast
import warnings
diff --git a/python/pyspark/sql/connect/conversion.py
b/python/pyspark/sql/connect/conversion.py
index efc07f428309..137b04510f0f 100644
--- a/python/pyspark/sql/connect/conversion.py
+++ b/python/pyspark/sql/connect/conversion.py
@@ -14,11 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
-
-
from typing import TYPE_CHECKING
import pyspark.sql.connect.proto as pb2
diff --git a/python/pyspark/sql/connect/dataframe.py
b/python/pyspark/sql/connect/dataframe.py
index 9cb12d8388a5..89846e36e718 100644
--- a/python/pyspark/sql/connect/dataframe.py
+++ b/python/pyspark/sql/connect/dataframe.py
@@ -22,9 +22,6 @@ from pyspark.errors.exceptions.base import (
)
from pyspark.resource import ResourceProfile
from pyspark.sql.connect.logging import logger
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
from typing import (
Any,
diff --git a/python/pyspark/sql/connect/datasource.py
b/python/pyspark/sql/connect/datasource.py
index b687c7aea227..c12606aa323b 100644
--- a/python/pyspark/sql/connect/datasource.py
+++ b/python/pyspark/sql/connect/datasource.py
@@ -14,10 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
-
from typing import Type, TYPE_CHECKING
from pyspark.sql.datasource import DataSourceRegistration as
PySparkDataSourceRegistration
diff --git a/python/pyspark/sql/connect/expressions.py
b/python/pyspark/sql/connect/expressions.py
index 0e8bb3c30924..793b562cdd99 100644
--- a/python/pyspark/sql/connect/expressions.py
+++ b/python/pyspark/sql/connect/expressions.py
@@ -14,10 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
-
from typing import (
cast,
TYPE_CHECKING,
diff --git a/python/pyspark/sql/connect/functions/builtin.py
b/python/pyspark/sql/connect/functions/builtin.py
index 330e323b5b20..0510de9361f1 100644
--- a/python/pyspark/sql/connect/functions/builtin.py
+++ b/python/pyspark/sql/connect/functions/builtin.py
@@ -14,10 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
-
import decimal
import inspect
import warnings
diff --git a/python/pyspark/sql/connect/functions/partitioning.py
b/python/pyspark/sql/connect/functions/partitioning.py
index 70f9aed8c213..8497345f7507 100644
--- a/python/pyspark/sql/connect/functions/partitioning.py
+++ b/python/pyspark/sql/connect/functions/partitioning.py
@@ -14,10 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
-
from typing import Union, TYPE_CHECKING
from pyspark.errors import PySparkTypeError
diff --git a/python/pyspark/sql/connect/group.py
b/python/pyspark/sql/connect/group.py
index afaf286a7e9c..dfd8e756ec6e 100644
--- a/python/pyspark/sql/connect/group.py
+++ b/python/pyspark/sql/connect/group.py
@@ -15,10 +15,6 @@
# limitations under the License.
#
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
-
import warnings
from typing import (
Dict,
diff --git a/python/pyspark/sql/connect/merge.py
b/python/pyspark/sql/connect/merge.py
index 913b5099b776..7a1c684ca37a 100644
--- a/python/pyspark/sql/connect/merge.py
+++ b/python/pyspark/sql/connect/merge.py
@@ -14,10 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
-
import sys
from typing import Dict, Optional, TYPE_CHECKING, Callable
diff --git a/python/pyspark/sql/connect/observation.py
b/python/pyspark/sql/connect/observation.py
index 1ce4235a67ce..efb526d5ca8a 100644
--- a/python/pyspark/sql/connect/observation.py
+++ b/python/pyspark/sql/connect/observation.py
@@ -14,10 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
-
from typing import Any, Dict, Optional
import uuid
diff --git a/python/pyspark/sql/connect/plan.py
b/python/pyspark/sql/connect/plan.py
index 75354e727678..36118359f9e6 100644
--- a/python/pyspark/sql/connect/plan.py
+++ b/python/pyspark/sql/connect/plan.py
@@ -18,9 +18,6 @@
# mypy: disable-error-code="operator"
from pyspark.resource import ResourceProfile
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
from typing import (
Any,
diff --git a/python/pyspark/sql/connect/protobuf/functions.py
b/python/pyspark/sql/connect/protobuf/functions.py
index ebe1f70fe8c7..24bfba19aa5b 100644
--- a/python/pyspark/sql/connect/protobuf/functions.py
+++ b/python/pyspark/sql/connect/protobuf/functions.py
@@ -19,10 +19,6 @@
A collections of builtin protobuf functions
"""
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
-
from typing import Dict, Optional, TYPE_CHECKING
from pyspark.sql.protobuf import functions as PyProtobufFunctions
diff --git a/python/pyspark/sql/connect/readwriter.py
b/python/pyspark/sql/connect/readwriter.py
index 6cc38aca4fc4..105d07629d50 100644
--- a/python/pyspark/sql/connect/readwriter.py
+++ b/python/pyspark/sql/connect/readwriter.py
@@ -14,10 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
-
from typing import Dict
from typing import Optional, Union, List, overload, Tuple, cast, Callable
from typing import TYPE_CHECKING
diff --git a/python/pyspark/sql/connect/session.py
b/python/pyspark/sql/connect/session.py
index f9a360ec6054..ab3fb03ca552 100644
--- a/python/pyspark/sql/connect/session.py
+++ b/python/pyspark/sql/connect/session.py
@@ -15,9 +15,6 @@
# limitations under the License.
#
import uuid
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
import json
import threading
diff --git a/python/pyspark/sql/connect/streaming/query.py
b/python/pyspark/sql/connect/streaming/query.py
index c4e9c512ec58..13df13c4a613 100644
--- a/python/pyspark/sql/connect/streaming/query.py
+++ b/python/pyspark/sql/connect/streaming/query.py
@@ -14,10 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
-
import json
import sys
import warnings
diff --git a/python/pyspark/sql/connect/streaming/readwriter.py
b/python/pyspark/sql/connect/streaming/readwriter.py
index b2813db8a805..ca9506329b6a 100644
--- a/python/pyspark/sql/connect/streaming/readwriter.py
+++ b/python/pyspark/sql/connect/streaming/readwriter.py
@@ -14,10 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
-
import json
import re
import sys
diff --git a/python/pyspark/sql/connect/types.py
b/python/pyspark/sql/connect/types.py
index d3352b618d7c..e8b292eb090b 100644
--- a/python/pyspark/sql/connect/types.py
+++ b/python/pyspark/sql/connect/types.py
@@ -14,10 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
-
import json
from typing import Any, Dict, Optional, List
diff --git a/python/pyspark/sql/connect/udf.py
b/python/pyspark/sql/connect/udf.py
index 3d61471e8251..5fec5a83e2e3 100644
--- a/python/pyspark/sql/connect/udf.py
+++ b/python/pyspark/sql/connect/udf.py
@@ -17,10 +17,6 @@
"""
User-defined function related classes and functions
"""
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
-
import warnings
import sys
import functools
diff --git a/python/pyspark/sql/connect/udtf.py
b/python/pyspark/sql/connect/udtf.py
index f04993207167..a6cb8a148e89 100644
--- a/python/pyspark/sql/connect/udtf.py
+++ b/python/pyspark/sql/connect/udtf.py
@@ -17,10 +17,6 @@
"""
User-defined table function related classes and functions
"""
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
-
import warnings
from typing import List, Type, TYPE_CHECKING, Optional, Union, Any
diff --git a/python/pyspark/sql/connect/utils.py
b/python/pyspark/sql/connect/utils.py
index 0e0e04244653..34413a3d2c3b 100644
--- a/python/pyspark/sql/connect/utils.py
+++ b/python/pyspark/sql/connect/utils.py
@@ -21,23 +21,25 @@ from pyspark.sql.pandas.utils import
require_minimum_pandas_version, require_min
from pyspark.errors import PySparkImportError
-def check_dependencies(mod_name: str) -> None:
- if mod_name == "__main__" or mod_name == "pyspark.sql.connect.utils":
+def check_dependencies() -> None:
+ main_module = sys.modules["__main__"]
+ if main_module.__spec__ is None and not hasattr(main_module, "__file__"):
+ # The main module is not initialized at all at this point. We must be
running doctests.
from pyspark.testing.connectutils import should_test_connect,
connect_requirement_message
if not should_test_connect:
print(
- f"Skipping {mod_name} doctests: {connect_requirement_message}",
+ f"Skipping doctests: {connect_requirement_message}",
file=sys.stderr,
)
sys.exit(0)
- else:
- require_minimum_pandas_version()
- require_minimum_pyarrow_version()
- require_minimum_grpc_version()
- require_minimum_grpcio_status_version()
- require_minimum_googleapis_common_protos_version()
- require_minimum_zstandard_version()
+
+ require_minimum_pandas_version()
+ require_minimum_pyarrow_version()
+ require_minimum_grpc_version()
+ require_minimum_grpcio_status_version()
+ require_minimum_googleapis_common_protos_version()
+ require_minimum_zstandard_version()
def require_minimum_grpc_version() -> None:
diff --git a/python/pyspark/sql/connect/window.py
b/python/pyspark/sql/connect/window.py
index 952258e8db48..130172094377 100644
--- a/python/pyspark/sql/connect/window.py
+++ b/python/pyspark/sql/connect/window.py
@@ -14,10 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from pyspark.sql.connect.utils import check_dependencies
-
-check_dependencies(__name__)
-
from typing import TYPE_CHECKING, Any, Union, Sequence, List, Optional, Tuple,
cast, Iterable
from pyspark.sql.column import Column
diff --git a/python/pyspark/sql/tests/connect/test_connect_function.py
b/python/pyspark/sql/tests/connect/test_connect_function.py
index e53ed6c70a70..e806012fd49e 100644
--- a/python/pyspark/sql/tests/connect/test_connect_function.py
+++ b/python/pyspark/sql/tests/connect/test_connect_function.py
@@ -2578,9 +2578,7 @@ class SparkConnectFunctionTests(ReusedMixedTestCase,
PandasOnSparkTestUtils):
)
# Functions in Spark Connect we do not expect to be available in
classic PySpark
- cf_excluded_fn = {
- "check_dependencies", # internal helper function
- }
+ cf_excluded_fn = set()
self.assertEqual(
cf_fn - sf_fn,
diff --git a/python/pyspark/sql/tests/test_connect_compatibility.py
b/python/pyspark/sql/tests/test_connect_compatibility.py
index 56b212387fe4..94ebd958843b 100644
--- a/python/pyspark/sql/tests/test_connect_compatibility.py
+++ b/python/pyspark/sql/tests/test_connect_compatibility.py
@@ -380,7 +380,7 @@ class ConnectCompatibilityTestsMixin:
expected_missing_connect_properties = set()
expected_missing_classic_properties = set()
expected_missing_connect_methods = set()
- expected_missing_classic_methods = {"check_dependencies"}
+ expected_missing_classic_methods = set()
self.check_compatibility(
ClassicFunctions,
ConnectFunctions,
@@ -418,7 +418,7 @@ class ConnectCompatibilityTestsMixin:
"cast",
"get_active_spark_context",
}
- expected_missing_classic_methods = {"lit", "check_dependencies"}
+ expected_missing_classic_methods = {"lit"}
self.check_compatibility(
ClassicAvro,
ConnectAvro,
@@ -456,7 +456,7 @@ class ConnectCompatibilityTestsMixin:
"try_remote_protobuf_functions",
"get_active_spark_context",
}
- expected_missing_classic_methods = {"lit", "check_dependencies"}
+ expected_missing_classic_methods = {"lit"}
self.check_compatibility(
ClassicProtobuf,
ConnectProtobuf,
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]