panbingkun commented on PR #45326:
URL: https://github.com/apache/spark/pull/45326#issuecomment-2147009357
```
"grpc-default-executor-1" #82 daemon prio=5 os_prio=31 cpu=9.92ms
elapsed=678.54s tid=0x0000000128b9c800 nid=0xe10b runnable [0x000000030cdbc000]
java.lang.Thread.State: RUNNABLE
at java.io.FileOutputStream.writeBytes([email protected]/Native Method)
at
java.io.FileOutputStream.write([email protected]/FileOutputStream.java:349)
at
java.io.BufferedOutputStream.flushBuffer([email protected]/BufferedOutputStream.java:81)
at
java.io.BufferedOutputStream.flush([email protected]/BufferedOutputStream.java:142)
- locked <0x00000007c1713350> (a java.io.BufferedOutputStream)
at java.io.PrintStream.write([email protected]/PrintStream.java:570)
- locked <0x00000007c1713328> (a java.io.PrintStream)
at
org.apache.logging.log4j.core.util.CloseShieldOutputStream.write(CloseShieldOutputStream.java:53)
at
org.apache.logging.log4j.core.appender.OutputStreamManager.writeToDestination(OutputStreamManager.java:263)
- locked <0x00000007c1a31050> (a
org.apache.logging.log4j.core.appender.OutputStreamManager)
at
org.apache.logging.log4j.core.appender.OutputStreamManager.flushBuffer(OutputStreamManager.java:296)
- locked <0x00000007c1a31050> (a
org.apache.logging.log4j.core.appender.OutputStreamManager)
at
org.apache.logging.log4j.core.appender.OutputStreamManager.flush(OutputStreamManager.java:307)
- locked <0x00000007c1a31050> (a
org.apache.logging.log4j.core.appender.OutputStreamManager)
at
org.apache.logging.log4j.core.appender.AbstractOutputStreamAppender.directEncodeEvent(AbstractOutputStreamAppender.java:229)
at
org.apache.logging.log4j.core.appender.AbstractOutputStreamAppender.tryAppend(AbstractOutputStreamAppender.java:220)
at
org.apache.logging.log4j.core.appender.AbstractOutputStreamAppender.append(AbstractOutputStreamAppender.java:211)
at
org.apache.logging.log4j.core.config.AppenderControl.tryCallAppender(AppenderControl.java:160)
at
org.apache.logging.log4j.core.config.AppenderControl.callAppender0(AppenderControl.java:133)
at
org.apache.logging.log4j.core.config.AppenderControl.callAppenderPreventRecursion(AppenderControl.java:124)
at
org.apache.logging.log4j.core.config.AppenderControl.callAppender(AppenderControl.java:88)
at
org.apache.logging.log4j.core.config.LoggerConfig.callAppenders(LoggerConfig.java:705)
at
org.apache.logging.log4j.core.config.LoggerConfig.processLogEvent(LoggerConfig.java:663)
at
org.apache.logging.log4j.core.config.LoggerConfig.log(LoggerConfig.java:639)
at
org.apache.logging.log4j.core.config.LoggerConfig.log(LoggerConfig.java:575)
at
org.apache.logging.log4j.core.config.AwaitCompletionReliabilityStrategy.log(AwaitCompletionReliabilityStrategy.java:92)
at org.apache.logging.log4j.core.Logger.log(Logger.java:169)
at
org.apache.logging.log4j.spi.AbstractLogger.tryLogMessage(AbstractLogger.java:2906)
at
org.apache.logging.log4j.spi.AbstractLogger.logMessageTrackRecursion(AbstractLogger.java:2859)
at
org.apache.logging.log4j.spi.AbstractLogger.logMessageSafely(AbstractLogger.java:2841)
at
org.apache.logging.log4j.spi.AbstractLogger.logMessage(AbstractLogger.java:2625)
at
org.apache.logging.log4j.spi.AbstractLogger.logIfEnabled(AbstractLogger.java:2373)
at org.apache.logging.slf4j.Log4jLogger.info(Log4jLogger.java:173)
at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.logAuditEvent(HiveMetaStore.java:310)
at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.logInfo(HiveMetaStore.java:783)
at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.startFunction(HiveMetaStore.java:788)
at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_database(HiveMetaStore.java:960)
at
jdk.internal.reflect.NativeMethodAccessorImpl.invoke0([email protected]/Native
Method)
at
jdk.internal.reflect.NativeMethodAccessorImpl.invoke([email protected]/NativeMethodAccessorImpl.java:77)
at
jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke([email protected]/DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke([email protected]/Method.java:568)
at
org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
at
org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
at jdk.proxy2.$Proxy31.get_database(jdk.proxy2/Unknown Source)
at
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.getDatabase(HiveMetaStoreClient.java:1302)
at
jdk.internal.reflect.NativeMethodAccessorImpl.invoke0([email protected]/Native
Method)
at
jdk.internal.reflect.NativeMethodAccessorImpl.invoke([email protected]/NativeMethodAccessorImpl.java:77)
at
jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke([email protected]/DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke([email protected]/Method.java:568)
at
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:173)
at jdk.proxy2.$Proxy32.getDatabase(jdk.proxy2/Unknown Source)
at org.apache.hadoop.hive.ql.metadata.Hive.getDatabase(Hive.java:1563)
at
org.apache.hadoop.hive.ql.metadata.Hive.databaseExists(Hive.java:1552)
at
org.apache.spark.sql.hive.client.Shim_v2_0.databaseExists(HiveShim.scala:913)
at
org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$databaseExists$1(HiveClientImpl.scala:401)
at
org.apache.spark.sql.hive.client.HiveClientImpl$$Lambda$1692/0x0000007001b7a560.apply$mcZ$sp(Unknown
Source)
at
scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.scala:17)
at
org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:298)
at
org.apache.spark.sql.hive.client.HiveClientImpl$$Lambda$1693/0x0000007001b7a848.apply(Unknown
Source)
at
org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:229)
at
org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:228)
- locked <0x00000007c2bdfb88> (a
org.apache.spark.sql.hive.client.IsolatedClientLoader)
at
org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:278)
at
org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:401)
at
org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:192)
at
org.apache.spark.sql.hive.HiveExternalCatalog$$Lambda$1656/0x0000007001b50000.apply$mcZ$sp(Unknown
Source)
at
scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.scala:17)
at
org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:100)
- locked <0x00000007c2d986b0> (a
org.apache.spark.sql.hive.HiveExternalCatalog)
at
org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:192)
at
org.apache.spark.sql.catalyst.catalog.ExternalCatalogWithListener.databaseExists(ExternalCatalogWithListener.scala:69)
at
org.apache.spark.sql.catalyst.catalog.SessionCatalog.databaseExists(SessionCatalog.scala:321)
at
org.apache.spark.sql.catalyst.catalog.SessionCatalog.requireDbExists(SessionCatalog.scala:251)
at
org.apache.spark.sql.catalyst.catalog.SessionCatalog.getTableRawMetadata(SessionCatalog.scala:542)
at
org.apache.spark.sql.catalyst.catalog.SessionCatalog.getTableMetadata(SessionCatalog.scala:528)
at
org.apache.spark.sql.execution.datasources.v2.V2SessionCatalog.loadTable(V2SessionCatalog.scala:90)
at
org.apache.spark.sql.connector.catalog.CatalogV2Util$.getTable(CatalogV2Util.scala:368)
at
org.apache.spark.sql.connector.catalog.CatalogV2Util$.loadTable(CatalogV2Util.scala:349)
at
org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.$anonfun$resolveRelation$6(Analyzer.scala:1293)
at
org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$$Lambda$2191/0x0000007001fc7ba8.apply(Unknown
Source)
at scala.Option.orElse(Option.scala:477)
at
org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.$anonfun$resolveRelation$2(Analyzer.scala:1292)
at
org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$$Lambda$2189/0x0000007001fc7508.apply(Unknown
Source)
at scala.Option.orElse(Option.scala:477)
at
org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.org$apache$spark$sql$catalyst$analysis$Analyzer$ResolveRelations$$resolveRelation(Analyzer.scala:1275)
at
org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$14.applyOrElse(Analyzer.scala:1125)
at
org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$14.applyOrElse(Analyzer.scala:1089)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.$anonfun$resolveOperatorsUpWithPruning$3(AnalysisHelper.scala:138)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$Lambda$1229/0x00000070019bd030.apply(Unknown
Source)
at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(origin.scala:84)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.$anonfun$resolveOperatorsUpWithPruning$1(AnalysisHelper.scala:138)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$$Lambda$1227/0x00000070019bc990.apply(Unknown
Source)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.allowInvokingTransformsInAnalyzer(AnalysisHelper.scala:386)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsUpWithPruning(AnalysisHelper.scala:134)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsUpWithPruning$(AnalysisHelper.scala:130)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsUpWithPruning(LogicalPlan.scala:35)
at
org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.apply(Analyzer.scala:1089)
at
org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.apply(Analyzer.scala:1048)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$2(RuleExecutor.scala:226)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor$$Lambda$1200/0x00000070019acbd8.apply(Unknown
Source)
at scala.collection.LinearSeqOps.foldLeft(LinearSeq.scala:183)
at scala.collection.LinearSeqOps.foldLeft$(LinearSeq.scala:179)
at scala.collection.immutable.List.foldLeft(List.scala:79)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1(RuleExecutor.scala:223)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1$adapted(RuleExecutor.scala:215)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor$$Lambda$1199/0x000000700199fbd0.apply(Unknown
Source)
at scala.collection.immutable.List.foreach(List.scala:334)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:215)
at
org.apache.spark.sql.catalyst.analysis.Analyzer.org$apache$spark$sql$catalyst$analysis$Analyzer$$executeSameContext(Analyzer.scala:225)
at
org.apache.spark.sql.catalyst.analysis.Analyzer.$anonfun$execute$1(Analyzer.scala:221)
at
org.apache.spark.sql.catalyst.analysis.Analyzer$$Lambda$1193/0x000000700196f8a0.apply(Unknown
Source)
at
org.apache.spark.sql.catalyst.analysis.AnalysisContext$.withNewAnalysisContext(Analyzer.scala:177)
at
org.apache.spark.sql.catalyst.analysis.Analyzer.execute(Analyzer.scala:221)
at
org.apache.spark.sql.catalyst.analysis.Analyzer.execute(Analyzer.scala:192)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$executeAndTrack$1(RuleExecutor.scala:186)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor$$Lambda$1192/0x000000700196f178.apply(Unknown
Source)
at
org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:89)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.executeAndTrack(RuleExecutor.scala:186)
at
org.apache.spark.sql.catalyst.analysis.Analyzer.$anonfun$executeAndCheck$1(Analyzer.scala:213)
at
org.apache.spark.sql.catalyst.analysis.Analyzer$$Lambda$1191/0x000000700196eeb0.apply(Unknown
Source)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:393)
at
org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:212)
at
org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:92)
at
org.apache.spark.sql.execution.QueryExecution$$Lambda$1164/0x000000700194ead8.apply(Unknown
Source)
at
org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:138)
at
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$2(QueryExecution.scala:225)
at
org.apache.spark.sql.execution.QueryExecution$$Lambda$1166/0x000000700194f068.apply(Unknown
Source)
at
org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:599)
at
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:225)
at
org.apache.spark.sql.execution.QueryExecution$$Lambda$1165/0x000000700194eda0.apply(Unknown
Source)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:923)
at
org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:224)
at
org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:92)
- locked <0x00000007c91211a0> (a
org.apache.spark.sql.execution.QueryExecution)
at
org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:89)
at
org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:73)
at org.apache.spark.sql.Dataset$.$anonfun$ofRows$1(Dataset.scala:94)
at
org.apache.spark.sql.Dataset$$$Lambda$3378/0x0000007002331550.apply(Unknown
Source)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:923)
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:92)
at
org.apache.spark.sql.connect.service.SparkConnectAnalyzeHandler.process(SparkConnectAnalyzeHandler.scala:124)
at
org.apache.spark.sql.connect.service.SparkConnectAnalyzeHandler.$anonfun$handle$1(SparkConnectAnalyzeHandler.scala:48)
at
org.apache.spark.sql.connect.service.SparkConnectAnalyzeHandler.$anonfun$handle$1$adapted(SparkConnectAnalyzeHandler.scala:47)
at
org.apache.spark.sql.connect.service.SparkConnectAnalyzeHandler$$Lambda$1019/0x0000007001648700.apply(Unknown
Source)
at
org.apache.spark.sql.connect.service.SessionHolder.$anonfun$withSession$2(SessionHolder.scala:329)
at
org.apache.spark.sql.connect.service.SessionHolder$$Lambda$1090/0x00000070017f47c8.apply(Unknown
Source)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:923)
at
org.apache.spark.sql.connect.service.SessionHolder.$anonfun$withSession$1(SessionHolder.scala:329)
at
org.apache.spark.sql.connect.service.SessionHolder$$Lambda$1085/0x00000070017f32b8.apply(Unknown
Source)
at
org.apache.spark.JobArtifactSet$.withActiveJobArtifactState(JobArtifactSet.scala:94)
at
org.apache.spark.sql.artifact.ArtifactManager.$anonfun$withResources$1(ArtifactManager.scala:79)
at
org.apache.spark.sql.artifact.ArtifactManager$$Lambda$1089/0x00000070017f4500.apply(Unknown
Source)
at org.apache.spark.util.Utils$.withContextClassLoader(Utils.scala:185)
at
org.apache.spark.sql.artifact.ArtifactManager.withResources(ArtifactManager.scala:78)
at
org.apache.spark.sql.connect.service.SessionHolder.withSession(SessionHolder.scala:328)
at
org.apache.spark.sql.connect.service.SparkConnectAnalyzeHandler.handle(SparkConnectAnalyzeHandler.scala:47)
at
org.apache.spark.sql.connect.service.SparkConnectService.analyzePlan(SparkConnectService.scala:97)
at
org.apache.spark.connect.proto.SparkConnectServiceGrpc$MethodHandlers.invoke(SparkConnectServiceGrpc.java:907)
at
org.sparkproject.connect.grpc.io.grpc.stub.ServerCalls$UnaryServerCallHandler$UnaryServerCallListener.onHalfClose(ServerCalls.java:182)
at
org.sparkproject.connect.grpc.io.grpc.internal.ServerCallImpl$ServerStreamListenerImpl.halfClosed(ServerCallImpl.java:356)
at
org.sparkproject.connect.grpc.io.grpc.internal.ServerImpl$JumpToApplicationThreadServerStreamListener$1HalfClosed.runInContext(ServerImpl.java:861)
at
org.sparkproject.connect.grpc.io.grpc.internal.ContextRunnable.run(ContextRunnable.java:37)
at
org.sparkproject.connect.grpc.io.grpc.internal.SerializingExecutor.run(SerializingExecutor.java:133)
at
java.util.concurrent.ThreadPoolExecutor.runWorker([email protected]/ThreadPoolExecutor.java:1136)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run([email protected]/ThreadPoolExecutor.java:635)
at java.lang.Thread.run([email protected]/Thread.java:840)
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]