LuciferYang commented on code in PR #53935:
URL: https://github.com/apache/spark/pull/53935#discussion_r2752578537
##########
sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/ClientE2ETestSuite.scala:
##########
@@ -1616,6 +1616,17 @@ class ClientE2ETestSuite
assert(metrics2 === Map("min(extra)" -> -1, "avg(extra)" -> 48,
"max(extra)" -> 97))
}
+ test("SPARK-55150: observation errors leads to empty result in connect
mode") {
Review Comment:
This test case will fail in non-ansi mode, we can reproduce it as follows:
`SPARK_ANSI_SQL_MODE="false" build/sbt clean "connect-client-jvm/testOnly
org.apache.spark.sql.connect.ClientE2ETestSuite" -Phive`
```
[info] - SPARK-55150: observation errors leads to empty result in connect
mode *** FAILED *** (149 milliseconds)
[info] Map("sum_id" -> 45, "sum_id_div_by_zero" -> null) was not empty
(ClientE2ETestSuite.scala:1627)
[info] org.scalatest.exceptions.TestFailedException:
[info] at
org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:472)
[info] at
org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:471)
[info] at
org.scalatest.Assertions$.newAssertionFailedException(Assertions.scala:1231)
[info] at
org.scalatest.Assertions$AssertionsHelper.macroAssert(Assertions.scala:1295)
[info] at
org.apache.spark.sql.connect.ClientE2ETestSuite.$anonfun$new$216(ClientE2ETestSuite.scala:1627)
[info] at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
[info] at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
[info] at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
[info] at org.scalatest.Transformer.apply(Transformer.scala:22)
[info] at org.scalatest.Transformer.apply(Transformer.scala:20)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226)
[info] at org.scalatest.TestSuite.withFixture(TestSuite.scala:196)
[info] at org.scalatest.TestSuite.withFixture$(TestSuite.scala:195)
[info] at
org.scalatest.funsuite.AnyFunSuite.withFixture(AnyFunSuite.scala:1564)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236)
[info] at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218)
[info] at
org.apache.spark.sql.connect.test.QueryTest.org$scalatest$BeforeAndAfterEach$$super$runTest(QueryTest.scala:32)
[info] at
org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234)
[info] at
org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227)
[info] at
org.apache.spark.sql.connect.test.QueryTest.runTest(QueryTest.scala:32)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269)
[info] at
org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413)
[info] at scala.collection.immutable.List.foreach(List.scala:323)
[info] at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
[info] at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396)
[info] at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:269)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:268)
[info] at
org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1564)
[info] at org.scalatest.Suite.run(Suite.scala:1114)
[info] at org.scalatest.Suite.run$(Suite.scala:1096)
[info] at
org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1564)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:273)
[info] at org.scalatest.SuperEngine.runImpl(Engine.scala:535)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:273)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:272)
[info] at
org.apache.spark.sql.connect.test.QueryTest.org$scalatest$BeforeAndAfterAll$$super$run(QueryTest.scala:32)
[info] at
org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
[info] at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
[info] at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
[info] at
org.apache.spark.sql.connect.test.QueryTest.run(QueryTest.scala:32)
[info] at
org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:321)
[info] at
org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:517)
[info] at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:414)
[info] at
java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
[info] at
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
[info] at
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
[info] at java.base/java.lang.Thread.run(Thread.java:840)
[info] - SPARK-48852: trim function on a string column returns correct
results (59 milliseconds)
[info] - SPARK-49673: new batch size, multiple batches (962 milliseconds)
[info] - SPARK-50965: Multiple positional parameterized nodes in the parsed
logical plan (205 milliseconds)
[info] - SPARK-50965: Multiple named parameterized nodes in the parsed
logical plan (129 milliseconds)
[info] - SPARK-50965: Multiple named and positional parameterized nodes in
the parsed logical plan (157 milliseconds)
[info] - SPARK-52770: Support Time type (70 milliseconds)
[info] - SPARK-53054: DataFrameReader defaults to spark.sql.sources.default
(301 milliseconds)
[info] - SPARK-52930: the nullability of arrays should be preserved using
typedlit (106 milliseconds)
[info] - SPARK-52930: the nullability of map values should be preserved
using typedlit (133 milliseconds)
[info] - SPARK-54043: DirectShufflePartitionID should be supported (337
milliseconds)
[info] - SPARK-53490: struct type in observed metrics (40 milliseconds)
[info] - SPARK-53490: array type in observed metrics (110 milliseconds)
[info] - SPARK-53490: map type in observed metrics (73 milliseconds)
[info] - SPARK-53553: null value handling in literals (35 milliseconds)
[info] - SQL Script result (128 milliseconds)
[info] - SQL Script schema (324 milliseconds)
[info] - SQL Script empty result (162 milliseconds)
[info] - SQL Script no result (106 milliseconds)
[info] - Arrow batch result chunking (1 second, 179 milliseconds)
[info] - Plan compression works correctly (667 milliseconds)
No more client connections.
[info] Run completed in 1 minute, 14 seconds.
[info] Total number of tests run: 116
[info] Suites: completed 1, aborted 0
[info] Tests: succeeded 115, failed 1, canceled 0, ignored 0, pending 0
[info] *** 1 TEST FAILED ***
[error] Failed tests:
[error] org.apache.spark.sql.connect.ClientE2ETestSuite
[error] (connect-client-jvm / Test / testOnly) sbt.TestsFailedException:
Tests unsuccessful
```
The daily test for non-ansi has failed:
- https://github.com/apache/spark/actions/runs/21574398887/job/62158995793
<img width="1684" height="904" alt="Image"
src="https://github.com/user-attachments/assets/eb7481b3-0372-47b5-aa42-9800d1889469"
/>
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]