amaranathv commented on issue #764: Hoodie 0.4.7:  Error upserting bucketType 
UPDATE for partition #, No value present
URL: https://github.com/apache/incubator-hudi/issues/764#issuecomment-510581260
 
 
   I am getting same error.
   
   scala> 
.save("/datalake/888/888/888/hive/warehouse/test_hudi_spark_no_part_1_mor")
   19/07/11 12:31:45 WARN TaskSetManager: Lost task 0.0 in stage 304.0 (TID 
464, 88888.uhc.com, executor 2): 
com.uber.hoodie.exception.HoodieUpsertException: Error upserting bucketType 
UPDATE for partition :0
           at 
com.uber.hoodie.table.HoodieCopyOnWriteTable.handleUpsertPartition(HoodieCopyOnWriteTable.java:274)
           at 
com.uber.hoodie.HoodieWriteClient.lambda$upsertRecordsInternal$7ef77fd$1(HoodieWriteClient.java:451)
           at 
org.apache.spark.api.java.JavaRDDLike$$anonfun$mapPartitionsWithIndex$1.apply(JavaRDDLike.scala:102)
           at 
org.apache.spark.api.java.JavaRDDLike$$anonfun$mapPartitionsWithIndex$1.apply(JavaRDDLike.scala:102)
           at 
org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
           at 
org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
           at 
org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
           at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
           at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
           at 
org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
           at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
           at org.apache.spark.rdd.RDD$$anonfun$8.apply(RDD.scala:336)
           at org.apache.spark.rdd.RDD$$anonfun$8.apply(RDD.scala:334)
           at 
org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1055)
           at 
org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1029)
           at 
org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:969)
           at 
org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:1029)
           at 
org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:760)
           at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:334)
           at org.apache.spark.rdd.RDD.iterator(RDD.scala:285)
           at 
org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
           at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
           at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
           at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
           at org.apache.spark.scheduler.Task.run(Task.scala:108)
           at 
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
           at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
           at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
           at java.lang.Thread.run(Thread.java:748)
   Caused by: com.uber.hoodie.exception.HoodieUpsertException: Failed to 
initialize HoodieAppendHandle for FileId: 
951d569b-188d-46e4-ad94-a32525fac797-0 on commit 20190711123144 on HDFS path 
/datalake/optum/optuminsight/udw/hive/warehouse/test_hudi_spark_no_part_1_mor
           at 
com.uber.hoodie.io.HoodieAppendHandle.init(HoodieAppendHandle.java:141)
           at 
com.uber.hoodie.io.HoodieAppendHandle.doAppend(HoodieAppendHandle.java:193)
           at 
com.uber.hoodie.table.HoodieMergeOnReadTable.handleUpdate(HoodieMergeOnReadTable.java:118)
           at 
com.uber.hoodie.table.HoodieCopyOnWriteTable.handleUpsertPartition(HoodieCopyOnWriteTable.java:266)
           ... 28 more
   Caused by: java.lang.IllegalArgumentException: Can not create a Path from an 
empty string
           at org.apache.hadoop.fs.Path.checkPathArg(Path.java:130)
           at org.apache.hadoop.fs.Path.<init>(Path.java:138)
           at org.apache.hadoop.fs.Path.<init>(Path.java:92)
           at 
com.uber.hoodie.io.HoodieAppendHandle.createLogWriter(HoodieAppendHandle.java:277)
           at 
com.uber.hoodie.io.HoodieAppendHandle.init(HoodieAppendHandle.java:132)
           ... 31 more
   
   19/07/11 12:31:45 ERROR TaskSetManager: Task 0 in stage 304.0 failed 4 
times; aborting job
   org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in 
stage 304.0 failed 4 times, most recent failure: Lost task 0.3 in stage 304.0 
(TID 467, dbslt1829.uhc.com, executor 2): 
com.uber.hoodie.exception.HoodieUpsertException: Error upserting bucketType 
UPDATE for partition :0
           at 
com.uber.hoodie.table.HoodieCopyOnWriteTable.handleUpsertPartition(HoodieCopyOnWriteTable.java:274)
           at 
com.uber.hoodie.HoodieWriteClient.lambda$upsertRecordsInternal$7ef77fd$1(HoodieWriteClient.java:451)
           at 
org.apache.spark.api.java.JavaRDDLike$$anonfun$mapPartitionsWithIndex$1.apply(JavaRDDLike.scala:102)
           at 
org.apache.spark.api.java.JavaRDDLike$$anonfun$mapPartitionsWithIndex$1.apply(JavaRDDLike.scala:102)
           at 
org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
           at 
org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
           at 
org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
           at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
           at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
           at 
org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
           at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
           at org.apache.spark.rdd.RDD$$anonfun$8.apply(RDD.scala:336)
           at org.apache.spark.rdd.RDD$$anonfun$8.apply(RDD.scala:334)
           at 
org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1055)
           at 
org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1029)
           at 
org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:969)
           at 
org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:1029)
           at 
org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:760)
           at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:334)
           at org.apache.spark.rdd.RDD.iterator(RDD.scala:285)
           at 
org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
           at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
           at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
           at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
           at org.apache.spark.scheduler.Task.run(Task.scala:108)
           at 
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
           at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
           at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
           at java.lang.Thread.run(Thread.java:748)
   Caused by: com.uber.hoodie.exception.HoodieUpsertException: Failed to 
initialize HoodieAppendHandle for FileId: 
951d569b-188d-46e4-ad94-a32525fac797-0 on commit 20190711123144 on HDFS path 
/datalake/888/99999/9999/hive/warehouse/test_hudi_spark_no_part_1_mor
           at 
com.uber.hoodie.io.HoodieAppendHandle.init(HoodieAppendHandle.java:141)
           at 
com.uber.hoodie.io.HoodieAppendHandle.doAppend(HoodieAppendHandle.java:193)
           at 
com.uber.hoodie.table.HoodieMergeOnReadTable.handleUpdate(HoodieMergeOnReadTable.java:118)
           at 
com.uber.hoodie.table.HoodieCopyOnWriteTable.handleUpsertPartition(HoodieCopyOnWriteTable.java:266)
           ... 28 more
   Caused by: java.lang.IllegalArgumentException: Can not create a Path from an 
empty string
           at org.apache.hadoop.fs.Path.checkPathArg(Path.java:130)
           at org.apache.hadoop.fs.Path.<init>(Path.java:138)
           at org.apache.hadoop.fs.Path.<init>(Path.java:92)
           at 
com.uber.hoodie.io.HoodieAppendHandle.createLogWriter(HoodieAppendHandle.java:277)
           at 
com.uber.hoodie.io.HoodieAppendHandle.init(HoodieAppendHandle.java:132)
           ... 31 more
   
   Driver stacktrace:
     at 
org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1517)
     at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1505)
     at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1504)
     at 
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
     at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
     at 
org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1504)
     at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
     at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
     at scala.Option.foreach(Option.scala:257)
     at 
org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:814)
     at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1732)
     at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1687)
     at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1676)
     at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
     at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:630)
     at org.apache.spark.SparkContext.runJob(SparkContext.scala:2029)
     at org.apache.spark.SparkContext.runJob(SparkContext.scala:2050)
     at org.apache.spark.SparkContext.runJob(SparkContext.scala:2069)
     at org.apache.spark.SparkContext.runJob(SparkContext.scala:2094)
     at org.apache.spark.rdd.RDD.count(RDD.scala:1158)
     at 
com.uber.hoodie.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala:149)
     at com.uber.hoodie.DefaultSource.createRelation(DefaultSource.scala:90)
     at 
org.apache.spark.sql.execution.datasources.DataSource.write(DataSource.scala:469)
     at 
org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:50)
     at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
     at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
     at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
     at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
     at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
     at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
     at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
     at 
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
     at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
     at 
org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:92)
     at 
org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:92)
     at 
org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:609)
     at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:233)
     at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:217)
     ... 54 elided
   Caused by: com.uber.hoodie.exception.HoodieUpsertException: Error upserting 
bucketType UPDATE for partition :0
     at 
com.uber.hoodie.table.HoodieCopyOnWriteTable.handleUpsertPartition(HoodieCopyOnWriteTable.java:274)
     at 
com.uber.hoodie.HoodieWriteClient.lambda$upsertRecordsInternal$7ef77fd$1(HoodieWriteClient.java:451)
     at 
org.apache.spark.api.java.JavaRDDLike$$anonfun$mapPartitionsWithIndex$1.apply(JavaRDDLike.scala:102)
     at 
org.apache.spark.api.java.JavaRDDLike$$anonfun$mapPartitionsWithIndex$1.apply(JavaRDDLike.scala:102)
     at 
org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
     at 
org.apache.spark.rdd.RDD$$anonfun$mapPartitionsWithIndex$1$$anonfun$apply$26.apply(RDD.scala:844)
     at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
     at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
     at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
     at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
     at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
     at org.apache.spark.rdd.RDD$$anonfun$8.apply(RDD.scala:336)
     at org.apache.spark.rdd.RDD$$anonfun$8.apply(RDD.scala:334)
     at 
org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1055)
     at 
org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1029)
     at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:969)
     at 
org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:1029)
     at 
org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:760)
     at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:334)
     at org.apache.spark.rdd.RDD.iterator(RDD.scala:285)
     at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
     at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
     at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
     at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
     at org.apache.spark.scheduler.Task.run(Task.scala:108)
     at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
     at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
     at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
     at java.lang.Thread.run(Thread.java:748)
   Caused by: com.uber.hoodie.exception.HoodieUpsertException: Failed to 
initialize HoodieAppendHandle for FileId: 
951d569b-188d-46e4-ad94-a32525fac797-0 on commit 20190711123144 on HDFS path 
/datalake/9999/999/999/hive/warehouse/test_hudi_spark_no_part_1_mor
     at com.uber.hoodie.io.HoodieAppendHandle.init(HoodieAppendHandle.java:141)
     at 
com.uber.hoodie.io.HoodieAppendHandle.doAppend(HoodieAppendHandle.java:193)
     at 
com.uber.hoodie.table.HoodieMergeOnReadTable.handleUpdate(HoodieMergeOnReadTable.java:118)
     at 
com.uber.hoodie.table.HoodieCopyOnWriteTable.handleUpsertPartition(HoodieCopyOnWriteTable.java:266)
     ... 28 more
   Caused by: java.lang.IllegalArgumentException: Can not create a Path from an 
empty string
     at org.apache.hadoop.fs.Path.checkPathArg(Path.java:130)
     at org.apache.hadoop.fs.Path.<init>(Path.java:138)
     at org.apache.hadoop.fs.Path.<init>(Path.java:92)
     at 
com.uber.hoodie.io.HoodieAppendHandle.createLogWriter(HoodieAppendHandle.java:277)
     at com.uber.hoodie.io.HoodieAppendHandle.init(HoodieAppendHandle.java:132)
     ... 31 more
   

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to