[
https://issues.apache.org/jira/browse/SPARK-51606?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17938496#comment-17938496
]
Yang Jie commented on SPARK-51606:
----------------------------------
cc [~gurwls223]
> After exiting the remote local connect shell, the SparkConnectServer will not
> terminate.
> ----------------------------------------------------------------------------------------
>
> Key: SPARK-51606
> URL: https://issues.apache.org/jira/browse/SPARK-51606
> Project: Spark
> Issue Type: Bug
> Components: Connect
> Affects Versions: 4.0.0, 4.1.0
> Reporter: Yang Jie
> Priority: Major
>
> {code:java}
> bin/spark-shell --remote local
> WARNING: Using incubator modules: jdk.incubator.vector
> Using Spark's default log4j profile:
> org/apache/spark/log4j2-defaults.properties
> 25/03/26 15:43:55 INFO SparkSession: Spark Connect server started with the
> log file:
> /Users/yangjie01/Tools/spark-4.1.0-SNAPSHOT-bin-3.4.1/logs/spark-cb51ad74-00e1-4567-9746-3dc9a7888ecb-org.apache.spark.sql.connect.service.SparkConnectServer-1-local.out
> 25/03/26 15:43:56 INFO BaseAllocator: Debug mode disabled. Enable with the VM
> option -Darrow.memory.debug.allocator=true.
> 25/03/26 15:43:56 INFO DefaultAllocationManagerOption: allocation manager
> type not specified, using netty as the default type
> 25/03/26 15:43:56 INFO CheckAllocator: Using DefaultAllocationManager at
> memory/netty/DefaultAllocationManagerFactory.class
> Welcome to
> ____ __
> / __/__ ___ _____/ /__
> _\ \/ _ \/ _ `/ __/ '_/
> /___/ .__/\_,_/_/ /_/\_\ version 4.1.0-SNAPSHOT
> /_/
> Type in expressions to have them evaluated.
> Spark connect server version 4.1.0-SNAPSHOT.
> Spark session available as 'spark'.
>
> scala> exit
> Bye!
> 25/03/26 15:44:00 INFO ShutdownHookManager: Shutdown hook called
> 25/03/26 15:44:00 INFO ShutdownHookManager: Deleting directory
> /private/var/folders/j2/cfn7w6795538n_416_27rkqm0000gn/T/spark-ad8dfdf4-cf2b-413f-a9e3-d6e310dff1ea
> bin/spark-shell --remote local
> WARNING: Using incubator modules: jdk.incubator.vector
> Using Spark's default log4j profile:
> org/apache/spark/log4j2-defaults.properties
> 25/03/26 15:44:04 INFO SparkSession: Spark Connect server started with the
> log file:
> /Users/yangjie01/Tools/spark-4.1.0-SNAPSHOT-bin-3.4.1/logs/spark-a7b9a1dc-1e16-4e0e-b7c1-8f957d730df3-org.apache.spark.sql.connect.service.SparkConnectServer-1-local.out
> 25/03/26 15:44:05 INFO BaseAllocator: Debug mode disabled. Enable with the VM
> option -Darrow.memory.debug.allocator=true.
> 25/03/26 15:44:05 INFO DefaultAllocationManagerOption: allocation manager
> type not specified, using netty as the default type
> 25/03/26 15:44:05 INFO CheckAllocator: Using DefaultAllocationManager at
> memory/netty/DefaultAllocationManagerFactory.class
> Exception in thread "main" org.apache.spark.SparkException:
> org.sparkproject.io.grpc.StatusRuntimeException: UNAUTHENTICATED: Invalid
> authentication token
> at
> org.apache.spark.sql.connect.client.GrpcExceptionConverter.toThrowable(GrpcExceptionConverter.scala:162)
> at
> org.apache.spark.sql.connect.client.GrpcExceptionConverter.convert(GrpcExceptionConverter.scala:61)
> at
> org.apache.spark.sql.connect.client.CustomSparkConnectBlockingStub.analyzePlan(CustomSparkConnectBlockingStub.scala:75)
> at
> org.apache.spark.sql.connect.client.SparkConnectClient.analyze(SparkConnectClient.scala:110)
> at
> org.apache.spark.sql.connect.client.SparkConnectClient.analyze(SparkConnectClient.scala:256)
> at
> org.apache.spark.sql.connect.client.SparkConnectClient.analyze(SparkConnectClient.scala:227)
> at
> org.apache.spark.sql.connect.SparkSession.version$lzycompute(SparkSession.scala:92)
> at
> org.apache.spark.sql.connect.SparkSession.version(SparkSession.scala:91)
> at
> org.apache.spark.sql.application.ConnectRepl$$anon$1.<init>(ConnectRepl.scala:106)
> at
> org.apache.spark.sql.application.ConnectRepl$.$anonfun$doMain$1(ConnectRepl.scala:105)
> at
> org.apache.spark.sql.connect.SparkSession$.withLocalConnectServer(SparkSession.scala:824)
> at
> org.apache.spark.sql.application.ConnectRepl$.doMain(ConnectRepl.scala:67)
> at
> org.apache.spark.sql.application.ConnectRepl$.main(ConnectRepl.scala:57)
> at org.apache.spark.sql.application.ConnectRepl.main(ConnectRepl.scala)
> at
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
> at
> java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.base/java.lang.reflect.Method.invoke(Method.java:569)
> at
> org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
> at
> org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1027)
> at
> org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:204)
> at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:227)
> at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:96)
> at
> org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1132)
> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1141)
> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> 25/03/26 15:44:05 INFO ShutdownHookManager: Shutdown hook called
> 25/03/26 15:44:05 INFO ShutdownHookManager: Deleting directory
> /private/var/folders/j2/cfn7w6795538n_416_27rkqm0000gn/T/spark-17717972-4070-43c7-b306-a060de27a16e
>
> jps
> 12611 SparkSubmit
> ps -ef | grep 12611
> 501 12611 1 0 3:43下午 ttys002 0:04.77
> /Users/yangjie01/Tools/zulu17/bin/java -cp
> hive-jackson/*:/Users/yangjie01/Tools/spark-4.1.0-SNAPSHOT-bin-3.4.1/conf/:/Users/yangjie01/Tools/spark-4.1.0-SNAPSHOT-bin-3.4.1/jars/slf4j-api-2.0.17.jar:/Users/yangjie01/Tools/spark-4.1.0-SNAPSHOT-bin-3.4.1/jars/*
> -Dscala.usejavacp=true -Xmx1g -XX:+IgnoreUnrecognizedVMOptions
> --add-modules=jdk.incubator.vector
> --add-opens=java.base/java.lang=ALL-UNNAMED
> --add-opens=java.base/java.lang.invoke=ALL-UNNAMED
> --add-opens=java.base/java.lang.reflect=ALL-UNNAMED
> --add-opens=java.base/java.io=ALL-UNNAMED
> --add-opens=java.base/java.net=ALL-UNNAMED
> --add-opens=java.base/java.nio=ALL-UNNAMED
> --add-opens=java.base/java.util=ALL-UNNAMED
> --add-opens=java.base/java.util.concurrent=ALL-UNNAMED
> --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED
> --add-opens=java.base/jdk.internal.ref=ALL-UNNAMED
> --add-opens=java.base/sun.nio.ch=ALL-UNNAMED
> --add-opens=java.base/sun.nio.cs=ALL-UNNAMED
> --add-opens=java.base/sun.security.action=ALL-UNNAMED
> --add-opens=java.base/sun.util.calendar=ALL-UNNAMED
> --add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED
> -Djdk.reflect.useDirectMethodHandle=false
> -Dio.netty.tryReflectionSetAccessible=true --enable-native-access=ALL-UNNAMED
> -Dderby.connection.requireAuthentication=false
> org.apache.spark.deploy.SparkSubmit --master local --conf
> spark.app.submitTime=1742975034128 --conf
> spark.sql.artifact.isolation.alwaysApplyClassloader=true --conf
> spark.app.name=Spark shell --conf spark.sql.artifact.isolation.enabled=true
> --conf spark.submit.deployMode=client --conf
> spark.ui.showConsoleProgress=true --class
> org.apache.spark.sql.connect.service.SparkConnectServer --name Spark Connect
> server spark-internal{code}
--
This message was sent by Atlassian Jira
(v8.20.10#820010)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]