anass el created HIVE-21607: -------------------------------- Summary: NoSuchMethodError: org.apache.hive.common.util.HiveStringUtils.joinIgnoringEmpty Key: HIVE-21607 URL: https://issues.apache.org/jira/browse/HIVE-21607 Project: Hive Issue Type: Bug Reporter: anass el
Use Hive *1.2.1000.2.6.5.79-2* spark *: 1.6.3.2.6.5.79-2* *10:39:23.252 [Driver] ERROR org.apache.spark.deploy.yarn.ApplicationMaster - User class threw exception: java.lang.NoSuchMethodError: org.apache.hive.common.util.HiveStringUtils.joinIgnoringEmpty([Ljava/lang/String;C)Ljava/lang/String;* *java.lang.NoSuchMethodError: org.apache.hive.common.util.HiveStringUtils.joinIgnoringEmpty([Ljava/lang/String;C)Ljava/lang/String;* *at org.apache.hadoop.hive.serde2.ColumnProjectionUtils.appendReadColumns(ColumnProjectionUtils.java:104) ~[hive-serde-1.2.1000.2.6.5.79-2.jar:1.2.1000.2.6.5.79-2]* *at org.apache.spark.sql.hive.HiveShim$.appendReadColumns(HiveShim.scala:78) ~[spark-hive_2.10-1.6.3.2.6.5.79-2.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.hive.execution.HiveTableScan.addColumnMetadataToConf(HiveTableScan.scala:88) ~[spark-hive_2.10-1.6.3.2.6.5.79-2.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.hive.execution.HiveTableScan.<init>(HiveTableScan.scala:74) ~[spark-hive_2.10-1.6.3.2.6.5.79-2.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.hive.HiveStrategies$HiveTableScans$$anonfun$3.apply(HiveStrategies.scala:77) ~[spark-hive_2.10-1.6.3.2.6.5.79-2.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.hive.HiveStrategies$HiveTableScans$$anonfun$3.apply(HiveStrategies.scala:77) ~[spark-hive_2.10-1.6.3.2.6.5.79-2.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.execution.SparkPlanner.pruneFilterProject(SparkPlanner.scala:82) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.hive.HiveStrategies$HiveTableScans$.apply(HiveStrategies.scala:73) ~[spark-hive_2.10-1.6.3.2.6.5.79-2.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:59) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.catalyst.planning.QueryPlanner.planLater(QueryPlanner.scala:54) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.execution.SparkStrategies$Aggregation$.apply(SparkStrategies.scala:217) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:59) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.catalyst.planning.QueryPlanner.planLater(QueryPlanner.scala:54) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.execution.SparkStrategies$BasicOperators$.apply(SparkStrategies.scala:363) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:59) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.execution.QueryExecution.sparkPlan$lzycompute(QueryExecution.scala:47) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.execution.QueryExecution.sparkPlan(QueryExecution.scala:45) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:52) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:52) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:55) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:55) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.DataFrame.rdd$lzycompute(DataFrame.scala:1638) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.DataFrame.rdd(DataFrame.scala:1635) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at org.apache.spark.sql.DataFrame.toJavaRDD(DataFrame.scala:1649) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* *at test.SparkLoad.main(SparkLoad.java:56) ~[tests-batch1.jar:2.3.3]* *at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_162]* *at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_162]* *at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_162]* *at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_162]* *at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:561) ~[spark-hdp-assembly.jar:1.6.3.2.6.5.79-2]* -- This message was sent by Atlassian JIRA (v7.6.3#76005)