[
https://issues.apache.org/jira/browse/SPARK-7843?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14557616#comment-14557616
]
Yin Huai commented on SPARK-7843:
---------------------------------
In thrift server log, I see
{code}
15/05/23 19:49:47 INFO metastore.HiveMetaStore: 1: get_table : db=default
tbl=test
15/05/23 19:49:47 ERROR metastore.RetryingHMSHandler:
java.lang.RuntimeException: java.util.NoSuchElementException
at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.logAuditEvent(HiveMetaStore.java:290)
at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.logInfo(HiveMetaStore.java:624)
at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.startFunction(HiveMetaStore.java:629)
at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.startTableFunction(HiveMetaStore.java:645)
at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_table(HiveMetaStore.java:1555)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at
org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:105)
at com.sun.proxy.$Proxy22.get_table(Unknown Source)
at
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.getTable(HiveMetaStoreClient.java:997)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89)
at com.sun.proxy.$Proxy23.getTable(Unknown Source)
at org.apache.hadoop.hive.ql.metadata.Hive.getTable(Hive.java:976)
at
org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$getTableOption$1.apply(ClientWrapper.scala:185)
at
org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$getTableOption$1.apply(ClientWrapper.scala:181)
at
org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:139)
at
org.apache.spark.sql.hive.client.ClientWrapper.getTableOption(ClientWrapper.scala:181)
at
org.apache.spark.sql.hive.client.ClientInterface$class.getTable(ClientInterface.scala:112)
at
org.apache.spark.sql.hive.client.ClientWrapper.getTable(ClientWrapper.scala:58)
at
org.apache.spark.sql.hive.HiveMetastoreCatalog.lookupRelation(HiveMetastoreCatalog.scala:227)
at
org.apache.spark.sql.hive.HiveContext$$anon$2.org$apache$spark$sql$catalyst$analysis$OverrideCatalog$$super$lookupRelation(HiveContext.scala:355)
at
org.apache.spark.sql.catalyst.analysis.OverrideCatalog$$anonfun$lookupRelation$3.apply(Catalog.scala:165)
at
org.apache.spark.sql.catalyst.analysis.OverrideCatalog$$anonfun$lookupRelation$3.apply(Catalog.scala:165)
at scala.Option.getOrElse(Option.scala:120)
at
org.apache.spark.sql.catalyst.analysis.OverrideCatalog$class.lookupRelation(Catalog.scala:165)
at
org.apache.spark.sql.hive.HiveContext$$anon$2.lookupRelation(HiveContext.scala:355)
at org.apache.spark.sql.SQLContext.table(SQLContext.scala:737)
at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:60)
at
org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:57)
at
org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:57)
at
org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:68)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:88)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:88)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:148)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:87)
at
org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:922)
at
org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:922)
at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:147)
at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:131)
at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:51)
at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:727)
at
org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.run(Shim13.scala:178)
at
org.apache.hive.service.cli.session.HiveSessionImpl.executeStatementInternal(HiveSessionImpl.java:231)
at
org.apache.hive.service.cli.session.HiveSessionImpl.executeStatementAsync(HiveSessionImpl.java:218)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at
org.apache.hive.service.cli.session.HiveSessionProxy.invoke(HiveSessionProxy.java:79)
at
org.apache.hive.service.cli.session.HiveSessionProxy.access$000(HiveSessionProxy.java:37)
at
org.apache.hive.service.cli.session.HiveSessionProxy$1.run(HiveSessionProxy.java:64)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1121)
at
org.apache.hadoop.hive.shims.HadoopShimsSecure.doAs(HadoopShimsSecure.java:493)
at
org.apache.hive.service.cli.session.HiveSessionProxy.invoke(HiveSessionProxy.java:60)
at com.sun.proxy.$Proxy24.executeStatementAsync(Unknown Source)
at
org.apache.hive.service.cli.CLIService.executeStatementAsync(CLIService.java:233)
at
org.apache.hive.service.cli.thrift.ThriftCLIService.ExecuteStatement(ThriftCLIService.java:344)
at
org.apache.hive.service.cli.thrift.TCLIService$Processor$ExecuteStatement.getResult(TCLIService.java:1313)
at
org.apache.hive.service.cli.thrift.TCLIService$Processor$ExecuteStatement.getResult(TCLIService.java:1298)
at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39)
at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39)
at
org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:55)
at
org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:206)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.util.NoSuchElementException
at java.util.HashMap$HashIterator.nextEntry(HashMap.java:929)
at java.util.HashMap$KeyIterator.next(HashMap.java:960)
at
org.apache.hadoop.security.UserGroupInformation.<init>(UserGroupInformation.java:430)
at
org.apache.hadoop.security.UserGroupInformation.getCurrentUser(UserGroupInformation.java:452)
at
org.apache.hadoop.hive.shims.HadoopShimsSecure.getUGIForConf(HadoopShimsSecure.java:433)
at
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.logAuditEvent(HiveMetaStore.java:288)
... 73 more
{code}
It looks like we are hitting https://issues.apache.org/jira/browse/HADOOP-7101,
which affects hadoop 1.0.4
(https://github.com/apache/hadoop/blob/release-1.0.4/src/core/org/apache/hadoop/security/UserGroupInformation.java#L449-453).
It has been fixed in hadoop 1.2.0 (see
https://github.com/apache/hadoop/blob/release-1.2.0/src/core/org/apache/hadoop/security/UserGroupInformation.java#L500-509).
> Several thrift server failures in Spark 1.4 sbt build with hadoop 1
> -------------------------------------------------------------------
>
> Key: SPARK-7843
> URL: https://issues.apache.org/jira/browse/SPARK-7843
> Project: Spark
> Issue Type: Bug
> Components: SQL
> Affects Versions: 1.4.0
> Reporter: Yin Huai
> Priority: Critical
> Attachments:
> HiveThriftBinaryServerSuite-spark-yhuai-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2-1-Yins-MBP-2.out
>
>
> The following tests are failing all the time (starting from
> https://amplab.cs.berkeley.edu/jenkins/view/Spark/job/Spark-1.4-SBT/117/)
> {code}
> Test Result (8 failures / +8)
> org.apache.spark.sql.hive.thriftserver.HiveThriftBinaryServerSuite.JDBC query
> execution
> org.apache.spark.sql.hive.thriftserver.HiveThriftBinaryServerSuite.SPARK-3004
> regression: result set containing NULL
> org.apache.spark.sql.hive.thriftserver.HiveThriftBinaryServerSuite.SPARK-4292
> regression: result set iterator issue
> org.apache.spark.sql.hive.thriftserver.HiveThriftBinaryServerSuite.SPARK-4309
> regression: Date type support
> org.apache.spark.sql.hive.thriftserver.HiveThriftBinaryServerSuite.SPARK-4407
> regression: Complex type support
> org.apache.spark.sql.hive.thriftserver.HiveThriftBinaryServerSuite.test
> multiple session
> org.apache.spark.sql.hive.thriftserver.HiveThriftHttpServerSuite.JDBC query
> execution
> org.apache.spark.sql.hive.thriftserver.UISeleniumSuite.thrift server ui test
> {code}
--
This message was sent by Atlassian JIRA
(v6.3.4#6332)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]