[ 
https://issues.apache.org/jira/browse/SPARK-50207?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17894873#comment-17894873
 ] 

Pengfei Xu commented on SPARK-50207:
------------------------------------

Duplicate of https://issues.apache.org/jira/browse/SPARK-49696.

> Disable `SparkSessionJobTaggingAndCancellationSuite.Cancellation APIs in 
> SparkSession are isolated`
> ---------------------------------------------------------------------------------------------------
>
>                 Key: SPARK-50207
>                 URL: https://issues.apache.org/jira/browse/SPARK-50207
>             Project: Spark
>          Issue Type: Sub-task
>          Components: Connect, Tests
>    Affects Versions: 4.0.0, 3.5.2
>            Reporter: Pengfei Xu
>            Priority: Blocker
>              Labels: pull-request-available
>
> * [https://github.com/apache/spark/actions/runs/10915451051/job/30295259985]
> {code:java}
> [info] SparkSessionJobTaggingAndCancellationSuite:
> [info] - Tags are not inherited by new sessions (31 milliseconds)
> [info] - Tags are inherited by cloned sessions (30 milliseconds)
> [info] - Tags set from session are prefixed with session UUID (68 
> milliseconds)
> [info] - Cancellation APIs in SparkSession are isolated *** FAILED *** (109 
> milliseconds)
> [info]   ArrayBuffer({"spark.driver.port"="43083", 
> "spark.app.startTime"="1726636674325", 
> "spark.rdd.scope"="{"id":"798732","name":"Exchange"}", 
> "spark.testing"="true", 
> "spark.hadoop.fs.s3a.connection.establish.timeout"="30000", 
> "spark.memory.debugFill"="true", "spark.master.rest.enabled"="false", 
> "spark.sql.execution.id"="58981", 
> "spark.sql.warehouse.dir"="file:/home/runner/work/spark/spark/sql/core/spark-warehouse",
>  "spark.rdd.scope.noOverride"="true", "spark.master"="local[2]", 
> "spark.job.interruptOnCancel"="true", "spark.executor.id"="driver", 
> "spark.app.name"="test", "spark.driver.host"="localhost", 
> "spark.app.id"="local-1726636674363", "spark.port.maxRetries"="100", 
> "spark.job.tags"="spark-session-fdcdce98-fa3b-460c-b7a7-67797d0701c9-execution-root-id-58981,spark-session-fdcdce98-fa3b-460c-b7a7-67797d0701c9-one,spark-session-fdcdce98-fa3b-460c-b7a7-67797d0701c9",
>  "spark.executor.extraJavaOptions"="-Djava.net.preferIPv6Addresses=false 
> -XX:+IgnoreUnrecognizedVMOptions --add-modules=jdk.incubator.vector 
> --add-opens=java.base/java.lang=ALL-UNNAMED 
> --add-opens=java.base/java.lang.invoke=ALL-UNNAMED 
> --add-opens=java.base/java.lang.reflect=ALL-UNNAMED 
> --add-opens=java.base/java.io=ALL-UNNAMED 
> --add-opens=java.base/java.net=ALL-UNNAMED 
> --add-opens=java.base/java.nio=ALL-UNNAMED 
> --add-opens=java.base/java.util=ALL-UNNAMED 
> --add-opens=java.base/java.util.concurrent=ALL-UNNAMED 
> --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED 
> --add-opens=java.base/jdk.internal.ref=ALL-UNNAMED 
> --add-opens=java.base/sun.nio.ch=ALL-UNNAMED 
> --add-opens=java.base/sun.nio.cs=ALL-UNNAMED 
> --add-opens=java.base/sun.security.action=ALL-UNNAMED 
> --add-opens=java.base/sun.util.calendar=ALL-UNNAMED 
> --add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED 
> -Djdk.reflect.useDirectMethodHandle=false 
> -Dio.netty.tryReflectionSetAccessible=true", 
> "spark.unsafe.exceptionOnMemoryLeak"="true", 
> "spark.sql.execution.root.id"="58981", 
> "spark.ui.showConsoleProgress"="false", 
> "spark.test.home"="/home/runner/work/spark/spark", 
> "spark.ui.enabled"="false", 
> "spark.driver.extraJavaOptions"="-Djava.net.preferIPv6Addresses=false 
> -XX:+IgnoreUnrecognizedVMOptions --add-modules=jdk.incubator.vector 
> --add-opens=java.base/java.lang=ALL-UNNAMED 
> --add-opens=java.base/java.lang.invoke=ALL-UNNAMED 
> --add-opens=java.base/java.lang.reflect=ALL-UNNAMED 
> --add-opens=java.base/java.io=ALL-UNNAMED 
> --add-opens=java.base/java.net=ALL-UNNAMED 
> --add-opens=java.base/java.nio=ALL-UNNAMED 
> --add-opens=java.base/java.util=ALL-UNNAMED 
> --add-opens=java.base/java.util.concurrent=ALL-UNNAMED 
> --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED 
> --add-opens=java.base/jdk.internal.ref=ALL-UNNAMED 
> --add-opens=java.base/sun.nio.ch=ALL-UNNAMED 
> --add-opens=java.base/sun.nio.cs=ALL-UNNAMED 
> --add-opens=java.base/sun.security.action=ALL-UNNAMED 
> --add-opens=java.base/sun.util.calendar=ALL-UNNAMED 
> --add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED 
> -Djdk.reflect.useDirectMethodHandle=false 
> -Dio.netty.tryReflectionSetAccessible=true"}, {"spark.driver.port"="43083", 
> "spark.app.startTime"="1726636674325", 
> "spark.rdd.scope"="{"id":"798748","name":"Exchange"}", 
> "spark.testing"="true", 
> "spark.hadoop.fs.s3a.connection.establish.timeout"="30000", 
> "spark.memory.debugFill"="true", "spark.master.rest.enabled"="false", 
> "spark.sql.execution.id"="58983", 
> "spark.sql.warehouse.dir"="file:/home/runner/work/spark/spark/sql/core/spark-warehouse",
>  "spark.rdd.scope.noOverride"="true", "spark.master"="local[2]", 
> "spark.job.interruptOnCancel"="true", "spark.executor.id"="driver", 
> "spark.app.name"="test", "spark.driver.host"="localhost", 
> "spark.app.id"="local-1726636674363", "spark.port.maxRetries"="100", 
> "spark.job.tags"="spark-session-fdcdce98-fa3b-460c-b7a7-67797d0701c9-execution-root-id-58981,spark-session-63c3177e-eba2-4523-abef-bc355ae3c16c-boo,spark-session-fdcdce98-fa3b-460c-b7a7-67797d0701c9,spark-session-63c3177e-eba2-4523-abef-bc355ae3c16c,spark-session-fdcdce98-fa3b-460c-b7a7-67797d0701c9-one",
>  "spark.executor.extraJavaOptions"="-Djava.net.preferIPv6Addresses=false 
> -XX:+IgnoreUnrecognizedVMOptions --add-modules=jdk.incubator.vector 
> --add-opens=java.base/java.lang=ALL-UNNAMED 
> --add-opens=java.base/java.lang.invoke=ALL-UNNAMED 
> --add-opens=java.base/java.lang.reflect=ALL-UNNAMED 
> --add-opens=java.base/java.io=ALL-UNNAMED 
> --add-opens=java.base/java.net=ALL-UNNAMED 
> --add-opens=java.base/java.nio=ALL-UNNAMED 
> --add-opens=java.base/java.util=ALL-UNNAMED 
> --add-opens=java.base/java.util.concurrent=ALL-UNNAMED 
> --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED 
> --add-opens=java.base/jdk.internal.ref=ALL-UNNAMED 
> --add-opens=java.base/sun.nio.ch=ALL-UNNAMED 
> --add-opens=java.base/sun.nio.cs=ALL-UNNAMED 
> --add-opens=java.base/sun.security.action=ALL-UNNAMED 
> --add-opens=java.base/sun.util.calendar=ALL-UNNAMED 
> --add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED 
> -Djdk.reflect.useDirectMethodHandle=false 
> -Dio.netty.tryReflectionSetAccessible=true", 
> "spark.unsafe.exceptionOnMemoryLeak"="true", 
> "spark.sql.execution.root.id"="58981", 
> "spark.ui.showConsoleProgress"="false", 
> "spark.test.home"="/home/runner/work/spark/spark", 
> "spark.ui.enabled"="false", 
> "spark.driver.extraJavaOptions"="-Djava.net.preferIPv6Addresses=false 
> -XX:+IgnoreUnrecognizedVMOptions --add-modules=jdk.incubator.vector 
> --add-opens=java.base/java.lang=ALL-UNNAMED 
> --add-opens=java.base/java.lang.invoke=ALL-UNNAMED 
> --add-opens=java.base/java.lang.reflect=ALL-UNNAMED 
> --add-opens=java.base/java.io=ALL-UNNAMED 
> --add-opens=java.base/java.net=ALL-UNNAMED 
> --add-opens=java.base/java.nio=ALL-UNNAMED 
> --add-opens=java.base/java.util=ALL-UNNAMED 
> --add-opens=java.base/java.util.concurrent=ALL-UNNAMED 
> --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED 
> --add-opens=java.base/jdk.internal.ref=ALL-UNNAMED 
> --add-opens=java.base/sun.nio.ch=ALL-UNNAMED 
> --add-opens=java.base/sun.nio.cs=ALL-UNNAMED 
> --add-opens=java.base/sun.security.action=ALL-UNNAMED 
> --add-opens=java.base/sun.util.calendar=ALL-UNNAMED 
> --add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED 
> -Djdk.reflect.useDirectMethodHandle=false 
> -Dio.netty.tryReflectionSetAccessible=true"}) had size 2 instead of expected 
> size 1 (SparkSessionJobTaggingAndCancellationSuite.scala:202)
> [info]   org.scalatest.exceptions.TestFailedException:
> [info]   at 
> org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:472)
> [info]   at 
> org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:471)
> [info]   at 
> org.scalatest.Assertions$.newAssertionFailedException(Assertions.scala:1231)
> [info]   at 
> org.scalatest.Assertions$AssertionsHelper.macroAssert(Assertions.scala:1295)
> [info]   at 
> org.apache.spark.sql.SparkSessionJobTaggingAndCancellationSuite.$anonfun$new$13(SparkSessionJobTaggingAndCancellationSuite.scala:202)
> [info]   at scala.collection.immutable.List.foreach(List.scala:334)
> [info]   at 
> org.apache.spark.sql.SparkSessionJobTaggingAndCancellationSuite.$anonfun$new$6(SparkSessionJobTaggingAndCancellationSuite.scala:199)
> [info]   at org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127)
> [info]   at 
> org.scalatest.concurrent.TimeLimits$.failAfterImpl(TimeLimits.scala:282)
> [info]   at 
> org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:231)
> [info]   at 
> org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:230)
> [info]   at org.apache.spark.SparkFunSuite.failAfter(SparkFunSuite.scala:69)
> [info]   at 
> org.apache.spark.SparkFunSuite.$anonfun$test$2(SparkFunSuite.scala:155)
> [info]   at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
> [info]   at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
> [info]   at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
> [info]   at org.scalatest.Transformer.apply(Transformer.scala:22)
> [info]   at org.scalatest.Transformer.apply(Transformer.scala:20)
> [info]   at 
> org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226)
> [info]   at 
> org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:227)
> [info]   at 
> org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224)
> [info]   at 
> org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236)
> [info]   at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
> [info]   at 
> org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236)
> [info]   at 
> org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218)
> [info]   at 
> org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:69)
> [info]   at 
> org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234)
> [info]   at 
> org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227)
> [info]   at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:69)
> [info]   at 
> org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269)
> [info]   at 
> org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413)
> [info]   at scala.collection.immutable.List.foreach(List.scala:334)
> [info]   at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
> [info]   at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396)
> [info]   at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475)
> [info]   at 
> org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:269)
> [info]   at 
> org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:268)
> [info]   at 
> org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1564)
> [info]   at org.scalatest.Suite.run(Suite.scala:1114)
> [info]   at org.scalatest.Suite.run$(Suite.scala:1096)
> [info]   at 
> org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1564)
> [info]   at 
> org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:273)
> [info]   at org.scalatest.SuperEngine.runImpl(Engine.scala:535)
> [info]   at 
> org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:273)
> [info]   at 
> org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:272)
> [info]   at 
> org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:69)
> [info]   at 
> org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
> [info]   at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
> [info]   at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
> [info]   at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:69)
> [info]   at 
> org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:321)
> [info]   at 
> org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:517)
> [info]   at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:414)
> [info]   at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
> [info]   at 
> java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
> [info]   at 
> java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
> [info]   at java.base/java.lang.Thread.run(Thread.java:840) {code}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to