rahultoall commented on issue #2302:
URL: https://github.com/apache/hudi/issues/2302#issuecomment-742278466


   @bvaradar there was ahudi-hadoop-mr-bundle-0.6.0.jar in spark which i have 
removed now and restarted the cluster. i already have 
hudi-hadoop-mr-bundle-0.6.0.jar in hive aux path. but still it fails to 
register the table in hive. the program executes with the following error.
   i have started spark -shell using the following command
   
    ./spark-shell --jars 
../jars/hudi-spark-bundle_2.11-0.6.0.jar,../jars/spark-avro_2.11-2.4.7.jar 
--conf "spark.serializer=org.apache.spializer.KryoSerializer" --conf 
"spark.sql.hive.convertMetastoreParquet=false"
   
   
   198754 [main] ERROR org.apache.hudi.hive.HiveSyncTool  - Got runtime 
exception when hive syncing
   org.apache.hudi.hive.HoodieHiveSyncException: Failed in executing SQL CREATE 
EXTERNAL TABLE  IF NOT EXISTS `hudi_poc`.`trips_hive`( `_hoodie_commit_time` 
string, `_hoodie_commit_seqno` string, `_hoodie_record_key` string, 
`_hoodie_partition_path` string, `_hoodie_file_name` string, `trip_id` bigint, 
`route_type` string, `destination` string, `createdDate` string) ROW FORMAT 
SERDE 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe' STORED AS 
INPUTFORMAT 'org.apache.hudi.hadoop.HoodieParquetInputFormat' OUTPUTFORMAT 
'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat' LOCATION 
'/rahul/hudi_tables/trips_hive'
           at 
org.apache.hudi.hive.HoodieHiveClient.updateHiveSQL(HoodieHiveClient.java:352)
           at 
org.apache.hudi.hive.HoodieHiveClient.createTable(HoodieHiveClient.java:262)
           at 
org.apache.hudi.hive.HiveSyncTool.syncSchema(HiveSyncTool.java:176)
           at 
org.apache.hudi.hive.HiveSyncTool.syncHoodieTable(HiveSyncTool.java:130)
           at 
org.apache.hudi.hive.HiveSyncTool.syncHoodieTable(HiveSyncTool.java:94)
           at 
org.apache.hudi.HoodieSparkSqlWriter$.org$apache$hudi$HoodieSparkSqlWriter$$syncHive(HoodieSparkSqlWriter.scala:321)
           at 
org.apache.hudi.HoodieSparkSqlWriter$$anonfun$metaSync$2.apply(HoodieSparkSqlWriter.scala:363)
           at 
org.apache.hudi.HoodieSparkSqlWriter$$anonfun$metaSync$2.apply(HoodieSparkSqlWriter.scala:359)
           at scala.collection.mutable.HashSet.foreach(HashSet.scala:78)
           at 
org.apache.hudi.HoodieSparkSqlWriter$.metaSync(HoodieSparkSqlWriter.scala:359)
           at 
org.apache.hudi.HoodieSparkSqlWriter$.commitAndPerformPostOperations(HoodieSparkSqlWriter.scala:417)
           at 
org.apache.hudi.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala:205)
           at 
org.apache.hudi.DefaultSource.createRelation(DefaultSource.scala:125)
           at 
org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:45)
           at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
           at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
           at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:86)
           at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
           at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
           at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
           at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
           at 
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
           at 
org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
           at 
org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:83)
           at 
org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:81)
           at 
org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:696)
           at 
org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:696)
           at 
org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:80)
           at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:127)
           at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:75)
           at 
org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:696)
           at 
org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:305)
           at 
org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:291)
           at 
org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:249)
           at 
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.upsert_hive(<console>:59)
           at 
$line41.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:44)
           at 
$line41.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:49)
           at 
$line41.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:51)
           at 
$line41.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:53)
           at $line41.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:55)
           at $line41.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:57)
           at $line41.$read$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:59)
           at $line41.$read$$iw$$iw$$iw$$iw$$iw.<init>(<console>:61)
           at $line41.$read$$iw$$iw$$iw$$iw.<init>(<console>:63)
           at $line41.$read$$iw$$iw$$iw.<init>(<console>:65)
           at $line41.$read$$iw$$iw.<init>(<console>:67)
           at $line41.$read$$iw.<init>(<console>:69)
           at $line41.$read.<init>(<console>:71)
           at $line41.$read$.<init>(<console>:75)
           at $line41.$read$.<clinit>(<console>)
           at $line41.$eval$.$print$lzycompute(<console>:7)
           at $line41.$eval$.$print(<console>:6)
           at $line41.$eval.$print(<console>)
           at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
           at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
           at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
           at java.lang.reflect.Method.invoke(Method.java:483)
           at 
scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:793)
           at 
scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1054)
           at 
scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:645)
           at 
scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:644)
           at 
scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)
           at 
scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19)
           at 
scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:644)
           at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:576)
           at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:572)
           at 
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:819)
           at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:691)
           at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:404)
           at scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:425)
           at 
org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:285)
           at org.apache.spark.repl.SparkILoop.runClosure(SparkILoop.scala:159)
           at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:182)
           at org.apache.spark.repl.Main$.doMain(Main.scala:78)
           at org.apache.spark.repl.Main$.main(Main.scala:58)
           at org.apache.spark.repl.Main.main(Main.scala)
           at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
           at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
           at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
           at java.lang.reflect.Method.invoke(Method.java:483)
           at 
org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
           at 
org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:845)
           at 
org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:161)
           at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:184)
           at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
           at 
org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:920)
           at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:929)
           at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
   Caused by: java.sql.SQLException: java.lang.ClassNotFoundException: 
org.apache.hudi.hadoop.HoodieParquetInputFormat
           at org.apache.hive.jdbc.HiveStatement.execute(HiveStatement.java:296)
           at 
org.apache.hudi.hive.HoodieHiveClient.updateHiveSQL(HoodieHiveClient.java:350)
           ... 87 more
   


----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to