LuciferYang commented on PR #39623:
URL: https://github.com/apache/spark/pull/39623#issuecomment-1385110864

   ```
   
SparkListenerJobStart(21,1673947875879,WrappedArray(org.apache.spark.scheduler.StageInfo@21de138),{spark.driver.port=55977,
 spark.app.startTime=1673947842913, 
spark.rdd.scope={"id":"164","name":"collect"}, 
spark.sql.parquet.fieldId.read.enabled=true, spark.sql.shuffle.partitions=5, 
spark.sql.codegen.factoryMode=CODEGEN_ONLY, 
spark.sql.optimizer.dynamicPartitionPruning.reuseBroadcastOnly=true, 
spark.sql.adaptive.enabled=false, spark.sql.execution.id=22, 
spark.hadoop.fs.file.impl=org.apache.spark.DebugFilesystem, 
spark.sql.warehouse.dir=file:/Users/yangjie01/SourceCode/git/spark-mine-12/spark-warehouse/org.apache.spark.sql.DynamicPartitionPruningV1SuiteAEOff,
 spark.rdd.scope.noOverride=true, spark.master=local[2], 
spark.sql.codegen.fallback=false, spark.executor.id=driver, 
spark.app.name=test-sql-context, 
spark.sql.optimizer.excludedRules=org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation,
 spark.driver.host=localhost, spark.app.id=local-1673947844680, 
spark.sql.testkey=t
 rue, spark.job.description=Listing leaf files and directories for 100 
paths:<br/>file:/Users/yangjie01/SourceCode/git/spark-mine-12/spark-warehouse/org.apache.spark.sql.DynamicPartitionPruningV1SuiteAEOff/fact/one=26/two=27/three=28,
 ..., __fetch_continuous_blocks_in_batch_enabled=true, 
spark.executor.extraJavaOptions=-Djava.net.preferIPv6Addresses=false 
-XX:+IgnoreUnrecognizedVMOptions --add-opens=java.base/java.lang=ALL-UNNAMED 
--add-opens=java.base/java.lang.invoke=ALL-UNNAMED 
--add-opens=java.base/java.lang.reflect=ALL-UNNAMED 
--add-opens=java.base/java.io=ALL-UNNAMED 
--add-opens=java.base/java.net=ALL-UNNAMED 
--add-opens=java.base/java.nio=ALL-UNNAMED 
--add-opens=java.base/java.util=ALL-UNNAMED 
--add-opens=java.base/java.util.concurrent=ALL-UNNAMED 
--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED 
--add-opens=java.base/sun.nio.ch=ALL-UNNAMED 
--add-opens=java.base/sun.nio.cs=ALL-UNNAMED 
--add-opens=java.base/sun.security.action=ALL-UNNAMED 
--add-opens=java.base/sun.u
 til.calendar=ALL-UNNAMED 
--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED 
-Djdk.reflect.useDirectMethodHandle=false, 
spark.unsafe.exceptionOnMemoryLeak=true, spark.sql.execution.root.id=22, 
spark.driver.extraJavaOptions=-Djava.net.preferIPv6Addresses=false 
-XX:+IgnoreUnrecognizedVMOptions --add-opens=java.base/java.lang=ALL-UNNAMED 
--add-opens=java.base/java.lang.invoke=ALL-UNNAMED 
--add-opens=java.base/java.lang.reflect=ALL-UNNAMED 
--add-opens=java.base/java.io=ALL-UNNAMED 
--add-opens=java.base/java.net=ALL-UNNAMED 
--add-opens=java.base/java.nio=ALL-UNNAMED 
--add-opens=java.base/java.util=ALL-UNNAMED 
--add-opens=java.base/java.util.concurrent=ALL-UNNAMED 
--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED 
--add-opens=java.base/sun.nio.ch=ALL-UNNAMED 
--add-opens=java.base/sun.nio.cs=ALL-UNNAMED 
--add-opens=java.base/sun.security.action=ALL-UNNAMED 
--add-opens=java.base/sun.util.calendar=ALL-UNNAMED 
--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED -D
 jdk.reflect.useDirectMethodHandle=false})
   17:31:15.888 ERROR org.apache.spark.scheduler.AsyncEventQueue: Listener 
SQLAppStatusListener threw an exception
   java.lang.NullPointerException
        at 
org.apache.spark.status.protobuf.StoreTypes$SQLExecutionUIData$Builder.setDescription(StoreTypes.java:46500)
        at 
org.apache.spark.status.protobuf.sql.SQLExecutionUIDataSerializer.serialize(SQLExecutionUIDataSerializer.scala:34)
        at 
org.apache.spark.status.protobuf.sql.SQLExecutionUIDataSerializer.serialize(SQLExecutionUIDataSerializer.scala:28)
        at 
org.apache.spark.status.protobuf.KVStoreProtobufSerializer.serialize(KVStoreProtobufSerializer.scala:30)
        at org.apache.spark.util.kvstore.RocksDB.write(RocksDB.java:188)
        at 
org.apache.spark.status.ElementTrackingStore.write(ElementTrackingStore.scala:123)
        at 
org.apache.spark.status.ElementTrackingStore.write(ElementTrackingStore.scala:127)
        at org.apache.spark.status.LiveEntity.write(LiveEntity.scala:50)
        at 
org.apache.spark.sql.execution.ui.SQLAppStatusListener.update(SQLAppStatusListener.scala:462)
        at 
org.apache.spark.sql.execution.ui.SQLAppStatusListener.onJobStart(SQLAppStatusListener.scala:126)
        at 
org.apache.spark.scheduler.SparkListenerBus.doPostEvent(SparkListenerBus.scala:37)
        at 
org.apache.spark.scheduler.SparkListenerBus.doPostEvent$(SparkListenerBus.scala:28)
        at 
org.apache.spark.scheduler.AsyncEventQueue.doPostEvent(AsyncEventQueue.scala:37)
        at 
org.apache.spark.scheduler.AsyncEventQueue.doPostEvent(AsyncEventQueue.scala:37)
        at org.apache.spark.util.ListenerBus.postToAll(ListenerBus.scala:117)
        at org.apache.spark.util.ListenerBus.postToAll$(ListenerBus.scala:101)
        at 
org.apache.spark.scheduler.AsyncEventQueue.super$postToAll(AsyncEventQueue.scala:105)
        at 
org.apache.spark.scheduler.AsyncEventQueue.$anonfun$dispatch$1(AsyncEventQueue.scala:105)
        at 
scala.runtime.java8.JFunction0$mcJ$sp.apply(JFunction0$mcJ$sp.java:23)
        at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)
        at 
org.apache.spark.scheduler.AsyncEventQueue.org$apache$spark$scheduler$AsyncEventQueue$$dispatch(AsyncEventQueue.scala:100)
        at 
org.apache.spark.scheduler.AsyncEventQueue$$anon$2.$anonfun$run$1(AsyncEventQueue.scala:96)
        at org.apache.spark.util.Utils$.tryOrStopSparkContext(Utils.scala:1444)
        at 
org.apache.spark.scheduler.AsyncEventQueue$$anon$2.run(AsyncEventQueue.scala:96)
   ```


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to