[ 
https://issues.apache.org/jira/browse/HUDI-3393?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17633820#comment-17633820
 ] 

Filipe Souza edited comment on HUDI-3393 at 11/14/22 2:06 PM:
--------------------------------------------------------------

I'm still having this issue in version 0.12.1.
Can you help me?
{code:java}
org.apache.hudi.exception.HoodieRollbackException: Failed to rollback 
vivo_recarga_current commits 20221111220422669
        at 
org.apache.hudi.client.BaseHoodieWriteClient.rollback(BaseHoodieWriteClient.java:785)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.rollbackFailedWrites(BaseHoodieWriteClient.java:1194)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.rollbackFailedWrites(BaseHoodieWriteClient.java:1177)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.rollbackFailedWrites(BaseHoodieWriteClient.java:1165)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.lambda$startCommitWithTime$97cdbdca$1(BaseHoodieWriteClient.java:966)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.common.util.CleanerUtils.rollbackFailedWrites(CleanerUtils.java:153)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.startCommitWithTime(BaseHoodieWriteClient.java:965)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.startCommitWithTime(BaseHoodieWriteClient.java:958)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala:330) 
~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.spark.sql.hudi.command.InsertIntoHoodieTableCommand$.run(InsertIntoHoodieTableCommand.scala:101)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.spark.sql.hudi.command.InsertIntoHoodieTableCommand.run(InsertIntoHoodieTableCommand.scala:60)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:103)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107)
 ~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.SQLExecution$.executeQuery$1(SQLExecution.scala:114)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$7(SQLExecution.scala:139)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107)
 ~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:139)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:245)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:138)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779) 
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:68)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:100)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:96)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:615)
 ~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:177)
 ~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:615)
 ~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)
 ~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
 ~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
 ~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
 ~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
 ~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:591) 
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:96)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:83)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:81)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.sql.Dataset.<init>(Dataset.scala:222) 
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:102) 
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779) 
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:99) 
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:622) 
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779) 
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:617) 
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:651) 
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:67)
 ~[spark-hive-thriftserver_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:384)
 ~[spark-hive-thriftserver_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1(SparkSQLCLIDriver.scala:504)
 ~[spark-hive-thriftserver_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1$adapted(SparkSQLCLIDriver.scala:498)
 ~[spark-hive-thriftserver_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at scala.collection.Iterator.foreach(Iterator.scala:943) 
~[scala-library-2.12.15.jar:?]
        at scala.collection.Iterator.foreach$(Iterator.scala:943) 
~[scala-library-2.12.15.jar:?]
        at scala.collection.AbstractIterator.foreach(Iterator.scala:1431) 
~[scala-library-2.12.15.jar:?]
        at scala.collection.IterableLike.foreach(IterableLike.scala:74) 
~[scala-library-2.12.15.jar:?]
        at scala.collection.IterableLike.foreach$(IterableLike.scala:73) 
~[scala-library-2.12.15.jar:?]
        at scala.collection.AbstractIterable.foreach(Iterable.scala:56) 
~[scala-library-2.12.15.jar:?]
        at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processLine(SparkSQLCLIDriver.scala:498)
 ~[spark-hive-thriftserver_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:336) 
~[hive-cli-2.3.9-amzn-2.jar:2.3.9-amzn-2]
        at 
org.apache.hadoop.hive.cli.CliDriver.processReader(CliDriver.java:474) 
~[hive-cli-2.3.9-amzn-2.jar:2.3.9-amzn-2]
        at org.apache.hadoop.hive.cli.CliDriver.processFile(CliDriver.java:490) 
~[hive-cli-2.3.9-amzn-2.jar:2.3.9-amzn-2]
        at 
org.apache.hadoop.hive.cli.CliDriver.processInitFiles(CliDriver.java:501) 
~[hive-cli-2.3.9-amzn-2.jar:2.3.9-amzn-2]
        at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:195)
 ~[spark-hive-thriftserver_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
 ~[spark-hive-thriftserver_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
~[?:1.8.0_352]
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
~[?:1.8.0_352]
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 ~[?:1.8.0_352]
        at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_352]
        at 
org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) 
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1006)
 ~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180) 
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203) 
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90) 
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1095) 
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1104) 
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) 
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
Caused by: org.apache.hudi.exception.HoodieRollbackException: Found commits 
after time :20221111220422669, please rollback greater commits first
        at 
org.apache.hudi.table.action.rollback.BaseRollbackActionExecutor.validateRollbackCommitSequence(BaseRollbackActionExecutor.java:171)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.table.action.rollback.BaseRollbackActionExecutor.doRollbackAndGetStats(BaseRollbackActionExecutor.java:210)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.table.action.rollback.BaseRollbackActionExecutor.runRollback(BaseRollbackActionExecutor.java:108)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.table.action.rollback.BaseRollbackActionExecutor.execute(BaseRollbackActionExecutor.java:135)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.table.HoodieSparkCopyOnWriteTable.rollback(HoodieSparkCopyOnWriteTable.java:281)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.rollback(BaseHoodieWriteClient.java:768)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        ... 76 more {code}


was (Author: JIRAUSER295756):
I'm still having this issue on version 0.12.1.
Can you help me?
{code:java}
org.apache.hudi.exception.HoodieRollbackException: Failed to rollback 
vivo_recarga_current commits 20221111220422669
        at 
org.apache.hudi.client.BaseHoodieWriteClient.rollback(BaseHoodieWriteClient.java:785)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.rollbackFailedWrites(BaseHoodieWriteClient.java:1194)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.rollbackFailedWrites(BaseHoodieWriteClient.java:1177)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.rollbackFailedWrites(BaseHoodieWriteClient.java:1165)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.lambda$startCommitWithTime$97cdbdca$1(BaseHoodieWriteClient.java:966)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.common.util.CleanerUtils.rollbackFailedWrites(CleanerUtils.java:153)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.startCommitWithTime(BaseHoodieWriteClient.java:965)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.startCommitWithTime(BaseHoodieWriteClient.java:958)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala:330) 
~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.spark.sql.hudi.command.InsertIntoHoodieTableCommand$.run(InsertIntoHoodieTableCommand.scala:101)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.spark.sql.hudi.command.InsertIntoHoodieTableCommand.run(InsertIntoHoodieTableCommand.scala:60)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:103)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107)
 ~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.SQLExecution$.executeQuery$1(SQLExecution.scala:114)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$7(SQLExecution.scala:139)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107)
 ~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:139)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:245)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:138)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779) 
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:68)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:100)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:96)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:615)
 ~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:177)
 ~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:615)
 ~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)
 ~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
 ~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
 ~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
 ~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
 ~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:591) 
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:96)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:83)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:81)
 ~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.sql.Dataset.<init>(Dataset.scala:222) 
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:102) 
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779) 
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:99) 
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:622) 
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779) 
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:617) 
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:651) 
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:67)
 ~[spark-hive-thriftserver_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:384)
 ~[spark-hive-thriftserver_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1(SparkSQLCLIDriver.scala:504)
 ~[spark-hive-thriftserver_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1$adapted(SparkSQLCLIDriver.scala:498)
 ~[spark-hive-thriftserver_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at scala.collection.Iterator.foreach(Iterator.scala:943) 
~[scala-library-2.12.15.jar:?]
        at scala.collection.Iterator.foreach$(Iterator.scala:943) 
~[scala-library-2.12.15.jar:?]
        at scala.collection.AbstractIterator.foreach(Iterator.scala:1431) 
~[scala-library-2.12.15.jar:?]
        at scala.collection.IterableLike.foreach(IterableLike.scala:74) 
~[scala-library-2.12.15.jar:?]
        at scala.collection.IterableLike.foreach$(IterableLike.scala:73) 
~[scala-library-2.12.15.jar:?]
        at scala.collection.AbstractIterable.foreach(Iterable.scala:56) 
~[scala-library-2.12.15.jar:?]
        at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processLine(SparkSQLCLIDriver.scala:498)
 ~[spark-hive-thriftserver_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:336) 
~[hive-cli-2.3.9-amzn-2.jar:2.3.9-amzn-2]
        at 
org.apache.hadoop.hive.cli.CliDriver.processReader(CliDriver.java:474) 
~[hive-cli-2.3.9-amzn-2.jar:2.3.9-amzn-2]
        at org.apache.hadoop.hive.cli.CliDriver.processFile(CliDriver.java:490) 
~[hive-cli-2.3.9-amzn-2.jar:2.3.9-amzn-2]
        at 
org.apache.hadoop.hive.cli.CliDriver.processInitFiles(CliDriver.java:501) 
~[hive-cli-2.3.9-amzn-2.jar:2.3.9-amzn-2]
        at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:195)
 ~[spark-hive-thriftserver_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
 ~[spark-hive-thriftserver_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
~[?:1.8.0_352]
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
~[?:1.8.0_352]
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 ~[?:1.8.0_352]
        at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_352]
        at 
org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) 
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1006)
 ~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180) 
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203) 
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90) 
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at 
org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1095) 
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1104) 
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) 
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
Caused by: org.apache.hudi.exception.HoodieRollbackException: Found commits 
after time :20221111220422669, please rollback greater commits first
        at 
org.apache.hudi.table.action.rollback.BaseRollbackActionExecutor.validateRollbackCommitSequence(BaseRollbackActionExecutor.java:171)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.table.action.rollback.BaseRollbackActionExecutor.doRollbackAndGetStats(BaseRollbackActionExecutor.java:210)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.table.action.rollback.BaseRollbackActionExecutor.runRollback(BaseRollbackActionExecutor.java:108)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.table.action.rollback.BaseRollbackActionExecutor.execute(BaseRollbackActionExecutor.java:135)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.table.HoodieSparkCopyOnWriteTable.rollback(HoodieSparkCopyOnWriteTable.java:281)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.rollback(BaseHoodieWriteClient.java:768)
 ~[hudi-spark3.3-bundle_2.12-0.12.1.jar:0.12.1]
        ... 76 more {code}

> Deltastreamer fails during replace commit rollback and cannot proceed
> ---------------------------------------------------------------------
>
>                 Key: HUDI-3393
>                 URL: https://issues.apache.org/jira/browse/HUDI-3393
>             Project: Apache Hudi
>          Issue Type: Task
>          Components: deltastreamer
>            Reporter: sivabalan narayanan
>            Assignee: sivabalan narayanan
>            Priority: Critical
>             Fix For: 0.11.0
>
>         Attachments: image.png
>
>   Original Estimate: 3h
>  Remaining Estimate: 3h
>
> When a clustering commit failed mid-way and deltastreamer restarted. 
> there could be one delta commit that could have succeeded. And so next time 
> when clustering was attempted, it tries to rollback the older pending 
> clustering and fails with "found commits after X, rollback greater commits 
> first".
>  
> {code:java}
> [2022-02-08 09:01:55,808] {{ssh_operator.py:143}} INFO - 22/02/08 09:01:55 
> INFO Client:        client token: N/A       diagnostics: User class threw 
> exception: org.apache.hudi.exception.HoodieRollbackException: Found commits 
> after time :20220208081622, please rollback greater commits first    at 
> org.apache.hudi.table.action.rollback.BaseRollbackActionExecutor.validateRollbackCommitSequence(BaseRollbackActionExecutor.java:148)
>  at 
> org.apache.hudi.table.action.rollback.BaseRollbackActionExecutor.doRollbackAndGetStats(BaseRollbackActionExecutor.java:179)
>   at 
> org.apache.hudi.table.action.rollback.BaseRollbackActionExecutor.execute(BaseRollbackActionExecutor.java:102)
>         at 
> org.apache.hudi.table.HoodieSparkCopyOnWriteTable.rollback(HoodieSparkCopyOnWriteTable.java:237)
>      at 
> org.apache.hudi.client.AbstractHoodieWriteClient.rollbackInflightClustering(AbstractHoodieWriteClient.java:971)
>       at 
> org.apache.hudi.client.SparkRDDWriteClient.cluster(SparkRDDWriteClient.java:341)
>      at 
> org.apache.hudi.client.AbstractHoodieWriteClient.lambda$runAnyPendingClustering$1(AbstractHoodieWriteClient.java:475)
>         at 
> java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1384)
>        at 
> java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:647)  
> at 
> org.apache.hudi.client.AbstractHoodieWriteClient.runAnyPendingClustering(AbstractHoodieWriteClient.java:471)
>  at 
> org.apache.hudi.client.AbstractHoodieWriteClient.runTableServicesInline(AbstractHoodieWriteClient.java:453)
>   at 
> org.apache.hudi.client.AbstractHoodieWriteClient.commitStats(AbstractHoodieWriteClient.java:194)
>      at 
> org.apache.hudi.client.SparkRDDWriteClient.commit(SparkRDDWriteClient.java:121)
>       at 
> org.apache.hudi.client.SparkRDDWriteClient.commit(SparkRDDWriteClient.java:72)
>        at 
> org.apache.hudi.client.AbstractHoodieWriteClient.commit(AbstractHoodieWriteClient.java:162)
>   at 
> org.apache.hudi.utilities.deltastreamer.DeltaSync.writeToSink(DeltaSync.java:465)
>     at 
> org.apache.hudi.utilities.deltastreamer.DeltaSync.syncOnce(DeltaSync.java:282)
>        at 
> org.apache.hudi.utilities.deltastreamer.HoodieDeltaStreamer.lambda$sync$2(HoodieDeltaStreamer.java:172)
>       at org.apache.hudi.common.util.Option.ifPresent(Option.java:96) at 
> org.apache.hudi.utilities.deltastreamer.HoodieDeltaStreamer.sync(HoodieDeltaStreamer.java:170)
>        at 
> org.apache.hudi.utilities.deltastreamer.HoodieDeltaStreamer.main(HoodieDeltaStreamer.java:472)
>        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)  at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
>        at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:498)     at 
> org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:735)​
>       ApplicationMaster host: ip-172-19-25-155.eu-west-1.compute.internal     
> ApplicationMaster RPC port: 38211       queue: default  start time: 
> 1644310580365       final status: FAILED    tracking URL: 
> http://ip-172-19-25-101.eu-west-1.compute.internal:20888/proxy/application_1643535866994_31483/
>    user: root22/02/08 09:01:55 ERROR Client: Application diagnostics message: 
> User class threw exception: 
> org.apache.hudi.exception.HoodieRollbackException: Found commits after time 
> :20220208081622, please rollback greater commits first      at 
> org.apache.hudi.table.action.rollback.BaseRollbackActionExecutor.validateRollbackCommitSequence(BaseRollbackActionExecutor.java:148)
>  at 
> org.apache.hudi.table.action.rollback.BaseRollbackActionExecutor.doRollbackAndGetStats(BaseRollbackActionExecutor.java:179)
>   at 
> org.apache.hudi.table.action.rollback.BaseRollbackActionExecutor.execute(BaseRollbackActionExecutor.java:102)
>         at 
> org.apache.hudi.table.HoodieSparkCopyOnWriteTable.rollback(HoodieSparkCopyOnWriteTable.java:237)
>      at 
> org.apache.hudi.client.AbstractHoodieWriteClient.rollbackInflightClustering(AbstractHoodieWriteClient.java:971)
>       at 
> org.apache.hudi.client.SparkRDDWriteClient.cluster(SparkRDDWriteClient.java:341)
>      at 
> org.apache.hudi.client.AbstractHoodieWriteClient.lambda$runAnyPendingClustering$1(AbstractHoodieWriteClient.java:475)
>         at 
> java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1384)
>        at 
> java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:647)  
> at 
> org.apache.hudi.client.AbstractHoodieWriteClient.runAnyPendingClustering(AbstractHoodieWriteClient.java:471)
>  at 
> org.apache.hudi.client.AbstractHoodieWriteClient.runTableServicesInline(AbstractHoodieWriteClient.java:453)
>   at 
> org.apache.hudi.client.AbstractHoodieWriteClient.commitStats(AbstractHoodieWriteClient.java:194)
>      at 
> org.apache.hudi.client.SparkRDDWriteClient.commit(SparkRDDWriteClient.java:121)
>       at 
> org.apache.hudi.client.SparkRDDWriteClient.commit(SparkRDDWriteClient.java:72)
>        at 
> org.apache.hudi.client.AbstractHoodieWriteClient.commit(AbstractHoodieWriteClient.java:162)
>   at 
> org.apache.hudi.utilities.deltastreamer.DeltaSync.writeToSink(DeltaSync.java:465)
>     at 
> org.apache.hudi.utilities.deltastreamer.DeltaSync.syncOnce(DeltaSync.java:282)
>        at 
> org.apache.hudi.utilities.deltastreamer.HoodieDeltaStreamer.lambda$sync$2(HoodieDeltaStreamer.java:172)
>       at org.apache.hudi.common.util.Option.ifPresent(Option.java:96) at 
> org.apache.hudi.utilities.deltastreamer.HoodieDeltaStreamer.sync(HoodieDeltaStreamer.java:170)
>        at 
> org.apache.hudi.utilities.deltastreamer.HoodieDeltaStreamer.main(HoodieDeltaStreamer.java:472)
>        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)  at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
>        at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:498)     at 
> org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:735)
>  {code}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to