aizain opened a new issue, #9023:
URL: https://github.com/apache/hudi/issues/9023

   **_Tips before filing an issue_**
   
   - Have you gone through our [FAQs](https://hudi.apache.org/learn/faq/)?
   
   - Join the mailing list to engage in conversations and get faster support at 
[email protected].
   
   - If you have triaged this as a bug, then file an 
[issue](https://issues.apache.org/jira/projects/HUDI/issues) directly.
   
   **Describe the problem you faced**
   
   A clear and concise description of the problem.
   
   **To Reproduce**
   
   Steps to reproduce the behavior:
   
   1. start spark task
   2. processs data
   3. write to hudi do upsert
   
   **Expected behavior**
   
   
   
   
   **Environment Description**
   
   * Hudi version :
   0.12.3
   
   * Spark version :
   2.4.3
   
   * Hive version :
   
   * Hadoop version :
   
   * Storage (HDFS/S3/GCS..) :
   
   * Running on Docker? (yes/no) :
   no
   
   **Additional context**
   
   .hoodie/20230620200048221.commit.requested
   
   .hoodie/20230620200048221.inflight
   {
     "partitionToWriteStats" : {
       "0" : [ {
         "fileId" : "",
         "path" : null,
         "prevCommit" : "null",
         "numWrites" : 0,
         "numDeletes" : 0,
         "numUpdateWrites" : 0,
         "numInserts" : 7,
         "totalWriteBytes" : 0,
         "totalWriteErrors" : 0,
         "tempPath" : null,
         "partitionPath" : null,
         "totalLogRecords" : 0,
         "totalLogFilesCompacted" : 0,
         "totalLogSizeCompacted" : 0,
         "totalUpdatedRecordsCompacted" : 0,
         "totalLogBlocks" : 0,
         "totalCorruptLogBlock" : 0,
         "totalRollbackBlocks" : 0,
         "fileSizeInBytes" : 0,
         "minEventTime" : null,
         "maxEventTime" : null,
         "runtimeStats" : null
       }, {
         "fileId" : "d2655ba8-c137-4faf-a5a2-ea51add34744-0",
         "path" : null,
         "prevCommit" : "20230620155330839",
         "numWrites" : 0,
         "numDeletes" : 0,
         "numUpdateWrites" : 7,
         "numInserts" : 0,
         "totalWriteBytes" : 0,
         "totalWriteErrors" : 0,
         "tempPath" : null,
         "partitionPath" : null,
         "totalLogRecords" : 0,
         "totalLogFilesCompacted" : 0,
         "totalLogSizeCompacted" : 0,
         "totalUpdatedRecordsCompacted" : 0,
         "totalLogBlocks" : 0,
         "totalCorruptLogBlock" : 0,
         "totalRollbackBlocks" : 0,
         "fileSizeInBytes" : 0,
         "minEventTime" : null,
         "maxEventTime" : null,
         "runtimeStats" : null
       } ],
       "1" : [ {
         "fileId" : "",
         "path" : null,
         "prevCommit" : "null",
         "numWrites" : 0,
         "numDeletes" : 0,
         "numUpdateWrites" : 0,
         "numInserts" : 6,
         "totalWriteBytes" : 0,
         "totalWriteErrors" : 0,
         "tempPath" : null,
         "partitionPath" : null,
         "totalLogRecords" : 0,
         "totalLogFilesCompacted" : 0,
         "totalLogSizeCompacted" : 0,
         "totalUpdatedRecordsCompacted" : 0,
         "totalLogBlocks" : 0,
         "totalCorruptLogBlock" : 0,
         "totalRollbackBlocks" : 0,
         "fileSizeInBytes" : 0,
         "minEventTime" : null,
         "maxEventTime" : null,
         "runtimeStats" : null
       }, {
         "fileId" : "9f7da90e-ea95-4f8d-8835-e572822fb38e-0",
         "path" : null,
         "prevCommit" : "20230620155330839",
         "numWrites" : 0,
         "numDeletes" : 0,
         "numUpdateWrites" : 5,
         "numInserts" : 0,
         "totalWriteBytes" : 0,
         "totalWriteErrors" : 0,
         "tempPath" : null,
         "partitionPath" : null,
         "totalLogRecords" : 0,
         "totalLogFilesCompacted" : 0,
         "totalLogSizeCompacted" : 0,
         "totalUpdatedRecordsCompacted" : 0,
         "totalLogBlocks" : 0,
         "totalCorruptLogBlock" : 0,
         "totalRollbackBlocks" : 0,
         "fileSizeInBytes" : 0,
         "minEventTime" : null,
         "maxEventTime" : null,
         "runtimeStats" : null
       } ],
       "2" : [ {
         "fileId" : "",
         "path" : null,
         "prevCommit" : "null",
         "numWrites" : 0,
         "numDeletes" : 0,
         "numUpdateWrites" : 0,
         "numInserts" : 6,
         "totalWriteBytes" : 0,
         "totalWriteErrors" : 0,
         "tempPath" : null,
         "partitionPath" : null,
         "totalLogRecords" : 0,
         "totalLogFilesCompacted" : 0,
         "totalLogSizeCompacted" : 0,
         "totalUpdatedRecordsCompacted" : 0,
         "totalLogBlocks" : 0,
         "totalCorruptLogBlock" : 0,
         "totalRollbackBlocks" : 0,
         "fileSizeInBytes" : 0,
         "minEventTime" : null,
         "maxEventTime" : null,
         "runtimeStats" : null
       }, {
         "fileId" : "2efbdb50-9d1e-4006-82e2-edbefb1a9719-0",
         "path" : null,
         "prevCommit" : "20230620155330839",
         "numWrites" : 0,
         "numDeletes" : 0,
         "numUpdateWrites" : 9,
         "numInserts" : 0,
         "totalWriteBytes" : 0,
         "totalWriteErrors" : 0,
         "tempPath" : null,
         "partitionPath" : null,
         "totalLogRecords" : 0,
         "totalLogFilesCompacted" : 0,
         "totalLogSizeCompacted" : 0,
         "totalUpdatedRecordsCompacted" : 0,
         "totalLogBlocks" : 0,
         "totalCorruptLogBlock" : 0,
         "totalRollbackBlocks" : 0,
         "fileSizeInBytes" : 0,
         "minEventTime" : null,
         "maxEventTime" : null,
         "runtimeStats" : null
       } ],
      ... ...
   ,
     "compacted" : false,
     "extraMetadata" : { },
     "operationType" : "UPSERT"
   
   
   .hoodie/20230620200051136.rollback.requested         
                                                                                
            
   
Objavro.schema{"type":"record","name":"HoodieRollbackPlan","namespace":"org.apache.hudi.avro.model","fields":[{"name":"instantToRollback","type":["null",{"type":"record","name":"HoodieInstantInfo","fields":[{"name":"commitTime","type":{"type":"string","avro.java.string":"String"}},{"name":"action","type":{"type":"string","avro.java.string":"String"}}]}],"doc":"Hoodie
 instant that needs to be rolled 
back","default":null},{"name":"RollbackRequests","type":["null",{"type":"array","items":{"type":"record","name":"HoodieRollbackRequest","fields":[{"name":"partitionPath","type":{"type":"string","avro.java.string":"String"}},{"name":"fileId","type":["null",{"type":"string","avro.java.string":"String"}],"default":null},{"name":"latestBaseInstant","type":["null",{"type":"string","avro.java.string":"String"}],"default":null},{"name":"filesToBeDeleted","type":{"type":"array","items":{"type":"string","avro.java.string":"String"},"default":[]},"default":[]},{"name":"logBlocksToBeDeleted","type
 
":["null",{"type":"map","values":"long","avro.java.string":"String"}],"default":null}]}}],"default":null},{"name":"version","type":["int","null"],"default":1}]}/W{%S<"20230620170717221
                      commit/W{%S%
   
   
   **Stacktrace**
   
   ```Add the stacktrace of the error.```
   
   org.apache.hudi.exception.HoodieRollbackException: Failed to rollback 
xxxxx//tablexxx commits 20230620200048221
        at 
org.apache.hudi.client.BaseHoodieWriteClient.rollback(BaseHoodieWriteClient.java:808)
        at 
org.apache.hudi.client.BaseHoodieWriteClient.rollbackFailedWrites(BaseHoodieWriteClient.java:1220)
        at 
org.apache.hudi.client.BaseHoodieWriteClient.rollbackFailedWrites(BaseHoodieWriteClient.java:1203)
        at 
org.apache.hudi.client.BaseHoodieWriteClient.rollbackFailedWrites(BaseHoodieWriteClient.java:1191)
        at 
org.apache.hudi.client.BaseHoodieWriteClient.lambda$startCommitWithTime$97cdbdca$1(BaseHoodieWriteClient.java:991)
        at 
org.apache.hudi.common.util.CleanerUtils.rollbackFailedWrites(CleanerUtils.java:153)
        at 
org.apache.hudi.client.BaseHoodieWriteClient.startCommitWithTime(BaseHoodieWriteClient.java:990)
        at 
org.apache.hudi.client.BaseHoodieWriteClient.startCommitWithTime(BaseHoodieWriteClient.java:983)
        at 
org.apache.hudi.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala:339)
        at org.apache.hudi.DefaultSource.createRelation(DefaultSource.scala:145)
        at 
org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:45)
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:86)
        at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:137)
        at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:133)
        at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:161)
        at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at 
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:158)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:133)
        at 
org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:81)
        at 
org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:81)
        at 
org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:676)
        at 
org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:676)
        at 
org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78)
        at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125)
        at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73)
        at 
org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:676)
        at 
org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:285)
        at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:271)
        at 
scala.collection.parallel.mutable.ParArray$ParArrayIterator.foreach_quick(ParArray.scala:143)
        at 
scala.collection.parallel.mutable.ParArray$ParArrayIterator.foreach(ParArray.scala:136)
        at 
scala.collection.parallel.ParIterableLike$Foreach.leaf(ParIterableLike.scala:972)
        at 
scala.collection.parallel.Task$$anonfun$tryLeaf$1.apply$mcV$sp(Tasks.scala:49)
        at 
scala.collection.parallel.Task$$anonfun$tryLeaf$1.apply(Tasks.scala:48)
        at 
scala.collection.parallel.Task$$anonfun$tryLeaf$1.apply(Tasks.scala:48)
        at scala.collection.parallel.Task$class.tryLeaf(Tasks.scala:51)
        at 
scala.collection.parallel.ParIterableLike$Foreach.tryLeaf(ParIterableLike.scala:969)
        at 
scala.collection.parallel.AdaptiveWorkStealingTasks$WrappedTask$class.internal(Tasks.scala:159)
        at 
scala.collection.parallel.AdaptiveWorkStealingForkJoinTasks$WrappedTask.internal(Tasks.scala:443)
        at 
scala.collection.parallel.AdaptiveWorkStealingTasks$WrappedTask$class.compute(Tasks.scala:149)
        at 
scala.collection.parallel.AdaptiveWorkStealingForkJoinTasks$WrappedTask.compute(Tasks.scala:443)
        at 
scala.concurrent.forkjoin.RecursiveAction.exec(RecursiveAction.java:160)
        at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
        at 
scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
        at 
scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
        at 
scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
   Caused by: org.apache.hudi.exception.HoodieRollbackException: Error rolling 
back using marker files written for [==>20230620200048221__commit__INFLIGHT]
        at 
org.apache.hudi.table.action.rollback.MarkerBasedRollbackStrategy.getRollbackRequests(MarkerBasedRollbackStrategy.java:103)
        at 
org.apache.hudi.table.action.rollback.BaseRollbackPlanActionExecutor.requestRollback(BaseRollbackPlanActionExecutor.java:109)
        at 
org.apache.hudi.table.action.rollback.BaseRollbackPlanActionExecutor.execute(BaseRollbackPlanActionExecutor.java:132)
        at 
org.apache.hudi.table.HoodieSparkCopyOnWriteTable.scheduleRollback(HoodieSparkCopyOnWriteTable.java:207)
        at 
org.apache.hudi.client.BaseHoodieWriteClient.lambda$rollback$7(BaseHoodieWriteClient.java:782)
        at org.apache.hudi.common.util.Option.orElseGet(Option.java:142)
        at 
org.apache.hudi.client.BaseHoodieWriteClient.rollback(BaseHoodieWriteClient.java:782)
        ... 51 more
   Caused by: java.lang.IllegalArgumentException: No enum constant 
org.apache.hudi.common.table.marker.MarkerType.
        at java.lang.Enum.valueOf(Enum.java:238)
        at 
org.apache.hudi.common.table.marker.MarkerType.valueOf(MarkerType.java:25)
        at 
org.apache.hudi.common.util.MarkerUtils.readMarkerType(MarkerUtils.java:113)
        at 
org.apache.hudi.table.marker.MarkerBasedRollbackUtils.getAllMarkerPaths(MarkerBasedRollbackUtils.java:67)
        at 
org.apache.hudi.table.action.rollback.MarkerBasedRollbackStrategy.getRollbackRequests(MarkerBasedRollbackStrategy.java:76)
        ... 57 more


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to