LuciferYang commented on code in PR #53106:
URL: https://github.com/apache/spark/pull/53106#discussion_r2540037303
##########
sql/connect/server/src/test/scala/org/apache/spark/sql/connect/pipelines/PythonPipelineSuite.scala:
##########
@@ -1111,4 +1104,13 @@ class PythonPipelineSuite
| return spark.range(5)
|""".stripMargin)
}
+
+ override protected def test(testName: String, testTags: Tag*)(testFun: =>
Any)(implicit
+ pos: Position): Unit = {
+ if (PythonTestDepsChecker.isConnectDepsAvailable) {
Review Comment:
```
WARNING: Using incubator modules: jdk.incubator.vector
Traceback (most recent call last):
File
"/Users/yangjie01/SourceCode/git/spark-mine-sbt/python/pyspark/sql/connect/utils.py",
line 105, in require_minimum_zstandard_version
import zstandard # noqaOnly 3s
^^^^^^^^^^^^^^^^
ModuleNotFoundError: No module named 'zstandard'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "<string>", line 1, in <module>
File
"/Users/yangjie01/SourceCode/git/spark-mine-sbt/python/pyspark/sql/connect/utils.py",
line 40, in check_dependencies
require_minimum_zstandard_version()
File
"/Users/yangjie01/SourceCode/git/spark-mine-sbt/python/pyspark/sql/connect/utils.py",
line 107, in require_minimum_zstandard_version
raise PySparkImportError(
pyspark.errors.exceptions.base.PySparkImportError: [PACKAGE_NOT_INSTALLED]
zstandard >= 0.25.0 must be installed; however, it was not found.
[info] PythonPipelineSuite:
[info] org.apache.spark.sql.connect.pipelines.PythonPipelineSuite ***
ABORTED *** (8 milliseconds)
[info]
org.apache.spark.sql.connect.PythonTestDepsChecker.isConnectDepsAvailable was
false (PythonPipelineSuite.scala:121)
[info] org.scalatest.exceptions.TestCanceledException:
[info] at
org.scalatest.Assertions.newTestCanceledException(Assertions.scala:475)
[info] at
org.scalatest.Assertions.newTestCanceledException$(Assertions.scala:474)
[info] at
org.scalatest.Assertions$.newTestCanceledException(Assertions.scala:1231)
[info] at
org.scalatest.Assertions$AssertionsHelper.macroAssume(Assertions.scala:1310)
[info] at
org.apache.spark.sql.connect.pipelines.PythonPipelineSuite.test(PythonPipelineSuite.scala:1110)
[info] at
org.apache.spark.sql.connect.pipelines.PythonPipelineSuite.<init>(PythonPipelineSuite.scala:121)
[info] at
java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native
Method)
[info] at
java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:77)
[info] at
java.base/jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
[info] at
java.base/java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:500)
[info] at
java.base/java.lang.reflect.ReflectAccess.newInstance(ReflectAccess.java:128)
[info] at
java.base/jdk.internal.reflect.ReflectionFactory.newInstance(ReflectionFactory.java:347)
[info] at java.base/java.lang.Class.newInstance(Class.java:645)
[info] at
org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:454)
[info] at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:414)
[info] at
java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
[info] at
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
[info] at
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
[info] at java.base/java.lang.Thread.run(Thread.java:840)
[info] Run completed in 4 seconds, 395 milliseconds.
[info] Total number of tests run: 0
[info] Suites: completed 0, aborted 1
[info] Tests: succeeded 0, failed 0, canceled 0, ignored 0, pending 0
[info] *** 1 SUITE ABORTED ***
[error] Error during tests:
[error] org.apache.spark.sql.connect.pipelines.PythonPipelineSuite
[error] (connect / Test / testOnly) sbt.TestsFailedException: Tests
unsuccessful
[error] Total time: 205 s (0:03:25.0), completed 2025年11月19日 上午8:25:50
yangjie01@localhost spark-mine-sbt % git diff
diff --git
a/sql/connect/server/src/test/scala/org/apache/spark/sql/connect/pipelines/PythonPipelineSuite.scala
b/sql/connect/server/src/test/scala/org/apache/spark/sql/connect/pipelines/PythonPipelineSuite.scala
index 98b33c3296f..9d9aee4e994 100644
---
a/sql/connect/server/src/test/scala/org/apache/spark/sql/connect/pipelines/PythonPipelineSuite.scala
+++
b/sql/connect/server/src/test/scala/org/apache/spark/sql/connect/pipelines/PythonPipelineSuite.scala
@@ -1107,10 +1107,7 @@ class PythonPipelineSuite
override protected def test(testName: String, testTags: Tag*)(testFun: =>
Any)(implicit
pos: Position): Unit = {
- if (PythonTestDepsChecker.isConnectDepsAvailable) {
- super.test(testName, testTags: _*)(testFun)
- } else {
- super.ignore(testName, testTags: _*)(testFun)
- }
+ assume(PythonTestDepsChecker.isConnectDepsAvailable)
+ super.test(testName, testTags: _*)(testFun)
}
}
yangjie01@localhost spark-mine-sbt % build/sbt clean "connect/testOnly
org.apache.spark.sql.connect.pipelines.PythonPipelineSuite"
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]