Yang Jie created SPARK-45211:
--------------------------------

             Summary: Scala 2.13 daily  test failed
                 Key: SPARK-45211
                 URL: https://issues.apache.org/jira/browse/SPARK-45211
             Project: Spark
          Issue Type: Bug
          Components: Connect
    Affects Versions: 4.0.0
            Reporter: Yang Jie


* [https://github.com/apache/spark/actions/runs/6215331575/job/16868131377]

{code:java}
[info] - abandoned query gets INVALID_HANDLE.OPERATION_ABANDONED error *** 
FAILED *** (157 milliseconds)
19991[info]   Expected exception org.apache.spark.SparkException to be thrown, 
but java.lang.StackOverflowError was thrown (ReattachableExecuteSuite.scala:172)
19992[info]   org.scalatest.exceptions.TestFailedException:
19993[info]   at 
org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:472)
19994[info]   at 
org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:471)
19995[info]   at 
org.scalatest.funsuite.AnyFunSuite.newAssertionFailedException(AnyFunSuite.scala:1564)
19996[info]   at org.scalatest.Assertions.intercept(Assertions.scala:756)
19997[info]   at org.scalatest.Assertions.intercept$(Assertions.scala:746)
19998[info]   at 
org.scalatest.funsuite.AnyFunSuite.intercept(AnyFunSuite.scala:1564)
19999[info]   at 
org.apache.spark.sql.connect.execution.ReattachableExecuteSuite.$anonfun$new$18(ReattachableExecuteSuite.scala:172)
20000[info]   at 
org.apache.spark.sql.connect.execution.ReattachableExecuteSuite.$anonfun$new$18$adapted(ReattachableExecuteSuite.scala:168)
20001[info]   at 
org.apache.spark.sql.connect.SparkConnectServerTest.withCustomBlockingStub(SparkConnectServerTest.scala:222)
20002[info]   at 
org.apache.spark.sql.connect.SparkConnectServerTest.withCustomBlockingStub$(SparkConnectServerTest.scala:216)
20003[info]   at 
org.apache.spark.sql.connect.execution.ReattachableExecuteSuite.withCustomBlockingStub(ReattachableExecuteSuite.scala:30)
20004[info]   at 
org.apache.spark.sql.connect.execution.ReattachableExecuteSuite.$anonfun$new$16(ReattachableExecuteSuite.scala:168)
20005[info]   at 
org.apache.spark.sql.connect.execution.ReattachableExecuteSuite.$anonfun$new$16$adapted(ReattachableExecuteSuite.scala:151)
20006[info]   at 
org.apache.spark.sql.connect.SparkConnectServerTest.withClient(SparkConnectServerTest.scala:199)
20007[info]   at 
org.apache.spark.sql.connect.SparkConnectServerTest.withClient$(SparkConnectServerTest.scala:191)
20008[info]   at 
org.apache.spark.sql.connect.execution.ReattachableExecuteSuite.withClient(ReattachableExecuteSuite.scala:30)
20009[info]   at 
org.apache.spark.sql.connect.execution.ReattachableExecuteSuite.$anonfun$new$15(ReattachableExecuteSuite.scala:151)
20010[info]   at 
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
20011[info]   at 
org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127)
20012[info]   at 
org.scalatest.concurrent.TimeLimits$.failAfterImpl(TimeLimits.scala:282)
20013[info]   at 
org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:231)
20014[info]   at 
org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:230)
20015[info]   at 
org.apache.spark.SparkFunSuite.failAfter(SparkFunSuite.scala:69)
20016[info]   at 
org.apache.spark.SparkFunSuite.$anonfun$test$2(SparkFunSuite.scala:155)
20017[info]   at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
20018[info]   at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
20019[info]   at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
20020[info]   at org.scalatest.Transformer.apply(Transformer.scala:22)
20021[info]   at org.scalatest.Transformer.apply(Transformer.scala:20)
20022[info]   at 
org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226)
20023[info]   at 
org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:227)
20024[info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224)
20025[info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236)
20026[info]   at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
20027[info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236)
20028[info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218)
20029[info]   at 
org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:69)
20030[info]   at 
org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234)
20031[info]   at 
org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227)
20032[info]   at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:69)
20033[info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269)
20034[info]   at 
org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413)
20035[info]   at scala.collection.immutable.List.foreach(List.scala:333)
20036[info]   at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
20037[info]   at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396)
20038[info]   at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475)
20039[info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:269)
20040[info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:268)
20041[info]   at 
org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1564)
20042[info]   at org.scalatest.Suite.run(Suite.scala:1114)
20043[info]   at org.scalatest.Suite.run$(Suite.scala:1096)
20044[info]   at 
org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1564)
20045[info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:273)
20046[info]   at org.scalatest.SuperEngine.runImpl(Engine.scala:535)
20047[info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:273)
20048[info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:272)
20049[info]   at 
org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:69)
20050[info]   at 
org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
20051[info]   at 
org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
20052[info]   at 
org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
20053[info]   at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:69)
20054[info]   at 
org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:321)
20055[info]   at 
org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:517)
20056[info]   at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:414)
20057[info]   at java.util.concurrent.FutureTask.run(FutureTask.java:266)
20058[info]   at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
20059[info]   at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
20060[info]   at java.lang.Thread.run(Thread.java:750)
20061[info]   Cause: java.lang.StackOverflowError:
20062[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20063[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20064[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20065[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20066[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20067[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20068[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20069[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20070[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20071[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20072[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20073[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20074[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20075[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20076[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20077[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20078[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20079[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20080[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20081[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20082[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20083[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20084[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20085[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
20086[info]   at 
org.apache.spark.sql.connect.client.WrappedCloseableIterator.hasNext(CloseableIterator.scala:36)
 {code}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to