This is an automated email from the ASF dual-hosted git repository.
ruifengz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 7bf0deea93e6 [SPARK-52238][TESTS][FOLLOW-UP] Fix test failure in
classic-only envs
7bf0deea93e6 is described below
commit 7bf0deea93e6358dcbd368635d5822f621f3e0b7
Author: Ruifeng Zheng <[email protected]>
AuthorDate: Mon Jun 9 10:58:19 2025 +0800
[SPARK-52238][TESTS][FOLLOW-UP] Fix test failure in classic-only envs
### What changes were proposed in this pull request?
fix import error in classic-only envs:
- pypy3.10
- python3.11-classic-only
- python3.13-nogil
### Why are the changes needed?
https://github.com/apache/spark/actions/runs/15430761771/job/43428094456
```
Starting test(python3.11):
pyspark.pipelines.tests.test_block_connect_access (temp output:
/__w/spark/spark/python/target/e57e05b9-80f9-4149-9a9c-898cf497bd80/python3.11__pyspark.pipelines.tests.test_block_connect_access__wv66e6_5.log)
Traceback (most recent call last):
File "<frozen runpy>", line 198, in _run_module_as_main
File "<frozen runpy>", line 88, in _run_code
File
"/__w/spark/spark/python/pyspark/pipelines/tests/test_block_connect_access.py",
line 21, in <module>
from pyspark.pipelines.block_connect_access import
block_spark_connect_execution_and_analysis
File "/__w/spark/spark/python/pyspark/pipelines/block_connect_access.py",
line 21, in <module>
from pyspark.sql.connect.proto.base_pb2_grpc import
SparkConnectServiceStub
File "/__w/spark/spark/python/pyspark/sql/connect/proto/__init__.py",
line 18, in <module>
from pyspark.sql.connect.proto.base_pb2_grpc import *
File
"/__w/spark/spark/python/pyspark/sql/connect/proto/base_pb2_grpc.py", line 19,
in <module>
import grpc
ModuleNotFoundError: No module named 'grpc'
```
### Does this PR introduce _any_ user-facing change?
no, test-only
### How was this patch tested?
PR builder with
```
default: '{"PYSPARK_IMAGE_TO_TEST": "python-311-classic-only",
"PYTHON_TO_TEST": "python3.11"}'
```
### Was this patch authored or co-authored using generative AI tooling?
No
Closes #51094 from zhengruifeng/fix_pp_no_grpc.
Authored-by: Ruifeng Zheng <[email protected]>
Signed-off-by: Ruifeng Zheng <[email protected]>
---
.../pipelines/tests/test_block_connect_access.py | 4 +++-
python/pyspark/pipelines/tests/test_cli.py | 25 ++++++++++++++--------
python/pyspark/pipelines/tests/test_init_cli.py | 25 ++++++++++++++--------
3 files changed, 35 insertions(+), 19 deletions(-)
diff --git a/python/pyspark/pipelines/tests/test_block_connect_access.py
b/python/pyspark/pipelines/tests/test_block_connect_access.py
index 5467dded430c..1ad1881b7127 100644
--- a/python/pyspark/pipelines/tests/test_block_connect_access.py
+++ b/python/pyspark/pipelines/tests/test_block_connect_access.py
@@ -18,13 +18,15 @@ import unittest
from pyspark.errors import PySparkException
from pyspark.testing.connectutils import ReusedConnectTestCase
-from pyspark.pipelines.block_connect_access import
block_spark_connect_execution_and_analysis
from pyspark.testing.connectutils import (
ReusedConnectTestCase,
should_test_connect,
connect_requirement_message,
)
+if should_test_connect:
+ from pyspark.pipelines.block_connect_access import
block_spark_connect_execution_and_analysis
+
@unittest.skipIf(not should_test_connect, connect_requirement_message)
class BlockSparkConnectAccessTests(ReusedConnectTestCase):
diff --git a/python/pyspark/pipelines/tests/test_cli.py
b/python/pyspark/pipelines/tests/test_cli.py
index 61755bfd8e62..3f372fba2606 100644
--- a/python/pyspark/pipelines/tests/test_cli.py
+++ b/python/pyspark/pipelines/tests/test_cli.py
@@ -21,18 +21,25 @@ import textwrap
from pathlib import Path
from pyspark.errors import PySparkException
-from pyspark.pipelines.cli import (
- change_dir,
- find_pipeline_spec,
- load_pipeline_spec,
- register_definitions,
- unpack_pipeline_spec,
- DefinitionsGlob,
- PipelineSpec,
+from pyspark.testing.connectutils import (
+ should_test_connect,
+ connect_requirement_message,
)
-from pyspark.pipelines.tests.local_graph_element_registry import
LocalGraphElementRegistry
+if should_test_connect:
+ from pyspark.pipelines.cli import (
+ change_dir,
+ find_pipeline_spec,
+ load_pipeline_spec,
+ register_definitions,
+ unpack_pipeline_spec,
+ DefinitionsGlob,
+ PipelineSpec,
+ )
+ from pyspark.pipelines.tests.local_graph_element_registry import
LocalGraphElementRegistry
+
[email protected](not should_test_connect, connect_requirement_message)
class CLIUtilityTests(unittest.TestCase):
def test_load_pipeline_spec(self):
with tempfile.NamedTemporaryFile(mode="w") as tmpfile:
diff --git a/python/pyspark/pipelines/tests/test_init_cli.py
b/python/pyspark/pipelines/tests/test_init_cli.py
index 97d4eabdca08..484ee4cb78e5 100644
--- a/python/pyspark/pipelines/tests/test_init_cli.py
+++ b/python/pyspark/pipelines/tests/test_init_cli.py
@@ -18,18 +18,25 @@ import unittest
import tempfile
from pathlib import Path
-from pyspark.pipelines.cli import (
- change_dir,
- find_pipeline_spec,
- load_pipeline_spec,
- init,
- register_definitions,
+from pyspark.testing.connectutils import (
+ ReusedConnectTestCase,
+ should_test_connect,
+ connect_requirement_message,
)
-from pyspark.pipelines.tests.local_graph_element_registry import
LocalGraphElementRegistry
-from pyspark.testing.sqlutils import ReusedSQLTestCase
+if should_test_connect:
+ from pyspark.pipelines.cli import (
+ change_dir,
+ find_pipeline_spec,
+ load_pipeline_spec,
+ init,
+ register_definitions,
+ )
+ from pyspark.pipelines.tests.local_graph_element_registry import
LocalGraphElementRegistry
-class InitCLITests(ReusedSQLTestCase):
+
[email protected](not should_test_connect, connect_requirement_message)
+class InitCLITests(ReusedConnectTestCase):
def test_init(self):
with tempfile.TemporaryDirectory() as temp_dir:
project_name = "test_project"
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]