eyjian opened a new issue, #8971:
URL: https://github.com/apache/hudi/issues/8971

   hudi: 0..12.1
   spark: 3.1
   
   ```
   Caused by: java.sql.SQLException: java.util.concurrent.ExecutionException: 
java.lang.RuntimeException: org.apache.hudi.exception.HoodieRollbackException: 
Failed to rollback hdfs://hadoop-01/user/warehouse/test.db/t_test commits 
20230611103836000
   at 
org.apache.hudi.client.BaseHoodieWriteClient.rollback(BaseHoodieWriteClient.java:793)
   at 
org.apache.hudi.client.BaseHoodieWriteClient.rollbackFailedWrites(BaseHoodieWriteClient.java:1203)
   at 
org.apache.hudi.client.BaseHoodieWriteClient.rollbackFailedWrites(BaseHoodieWriteClient.java:1186)
   at 
org.apache.hudi.client.BaseHoodieWriteClient.lambda$clean$33796fd2$1(BaseHoodieWriteClient.java:866)
   at 
org.apache.hudi.common.util.CleanerUtils.rollbackFailedWrites(CleanerUtils.java:146)
   at 
org.apache.hudi.client.BaseHoodieWriteClient.clean(BaseHoodieWriteClient.java:865)
   at 
org.apache.hudi.client.BaseHoodieWriteClient.clean(BaseHoodieWriteClient.java:847)
   at 
org.apache.hudi.client.BaseHoodieWriteClient.clean(BaseHoodieWriteClient.java:901)
   at 
org.apache.hudi.client.BaseHoodieWriteClient.autoCleanOnCommit(BaseHoodieWriteClient.java:622)
   at 
org.apache.hudi.client.BaseHoodieWriteClient.postCommit(BaseHoodieWriteClient.java:541)
   at 
org.apache.hudi.client.BaseHoodieWriteClient.commitStats(BaseHoodieWriteClient.java:239)
   at 
org.apache.hudi.client.SparkRDDWriteClient.commit(SparkRDDWriteClient.java:127)
   at 
org.apache.hudi.HoodieSparkSqlWriter$.commitAndPerformPostOperations(HoodieSparkSqlWriter.scala:727)
   at 
org.apache.hudi.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala:351)
   at 
org.apache.spark.sql.hudi.command.InsertIntoHoodieTableCommand$.run(InsertIntoHoodieTableCommand.scala:101)
   at 
org.apache.spark.sql.hudi.command.InsertIntoHoodieTableCommand.run(InsertIntoHoodieTableCommand.scala:60)
   at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
   at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
   at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
   at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:231)
   at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3699)
   at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:105)
   at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:172)
   at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:92)
   at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:801)
   at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:66)
   at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3697)
   at org.apache.spark.sql.Dataset.<init>(Dataset.scala:231)
   at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:102)
   at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:801)
   at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96)
   at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:623)
   at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:801)
   at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:616)
   at org.apache.livy.thriftserver.session.SqlJob.executeSql(SqlJob.java:93)
   at org.apache.livy.thriftserver.session.SqlJob.call(SqlJob.java:73)
   at org.apache.livy.thriftserver.session.SqlJob.call(SqlJob.java:40)
   at org.apache.livy.rsc.driver.JobWrapper.call(JobWrapper.java:84)
   at org.apache.livy.rsc.driver.JobWrapper.call(JobWrapper.java:34)
   at java.util.concurrent.FutureTask.run(FutureTask.java:266)
   at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
   at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
   at java.lang.Thread.run(Thread.java:750)
   Caused by: org.apache.spark.SparkException: Job aborted due to stage 
failure: Task 5 in stage 48.0 failed 4 times, most recent failure: Lost task 
5.3 in stage 48.0 (TID 3300) (9.19.19.15 executor 4): 
org.apache.hudi.exception.HoodieIOException: Error appending rollback block
   at 
org.apache.hudi.table.action.rollback.BaseRollbackHelper.lambda$maybeDeleteAndCollectStats$309309f3$1(BaseRollbackHelper.java:148)
   at 
org.apache.hudi.client.common.HoodieSparkEngineContext.lambda$flatMap$7d470b86$1(HoodieSparkEngineContext.java:137)
   at 
org.apache.spark.api.java.JavaRDDLike.$anonfun$flatMap$1(JavaRDDLike.scala:125)
   at scala.collection.Iterator$$anon$11.nextCur(Iterator.scala:484)
   at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:490)
   at scala.collection.Iterator.foreach(Iterator.scala:941)
   at scala.collection.Iterator.foreach$(Iterator.scala:941)
   at scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
   at scala.collection.generic.Growable.$plus$plus$eq(Growable.scala:62)
   at scala.collection.generic.Growable.$plus$plus$eq$(Growable.scala:53)
   at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:105)
   at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:49)
   at scala.collection.TraversableOnce.to(TraversableOnce.scala:315)
   at scala.collection.TraversableOnce.to$(TraversableOnce.scala:313)
   at scala.collection.AbstractIterator.to(Iterator.scala:1429)
   at scala.collection.TraversableOnce.toBuffer(TraversableOnce.scala:307)
   at scala.collection.TraversableOnce.toBuffer$(TraversableOnce.scala:307)
   at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1429)
   at scala.collection.TraversableOnce.toArray(TraversableOnce.scala:294)
   at scala.collection.TraversableOnce.toArray$(TraversableOnce.scala:288)
   at scala.collection.AbstractIterator.toArray(Iterator.scala:1429)
   at org.apache.spark.rdd.RDD.$anonfun$collect$2(RDD.scala:1030)
   at org.apache.spark.SparkContext.$anonfun$runJob$5(SparkContext.scala:2341)
   at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
   at org.apache.spark.scheduler.Task.run(Task.scala:131)
   at 
org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:497)
   at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1483)
   at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:500)
   at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
   at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
   at java.lang.Thread.run(Thread.java:750)
   Caused by: org.apache.hadoop.ipc.RemoteException(java.io.IOException): 
Unexpected BlockUCState: 
BP-914671314-9.138.19.7-1623767356429:blk_13321110200_13089176187 is 
UNDER_RECOVERY but not UNDER_CONSTRUCTION
   at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkUCBlock(FSNamesystem.java:5282)
   at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.updatePipelineInternal(FSNamesystem.java:5409)
   at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.updatePipeline(FSNamesystem.java:5394)
   at 
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.updatePipeline(NameNodeRpcServer.java:1049)
   at 
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.updatePipeline(ClientNamenodeProtocolServerSideTranslatorPB.java:975)
   at 
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
   at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:529)
   at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
   at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1039)
   at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:963)
   at java.base/java.security.AccessController.doPrivileged(Native Method)
   at java.base/javax.security.auth.Subject.doAs(Subject.java:423)
   at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:2065)
   at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3047)
   
   at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1564)
   at org.apache.hadoop.ipc.Client.call(Client.java:1510)
   at org.apache.hadoop.ipc.Client.call(Client.java:1417)
   at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:232)
   at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118)
   at com.sun.proxy.$Proxy22.updatePipeline(Unknown Source)
   at 
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.updatePipeline(ClientNamenodeProtocolTranslatorPB.java:967)
   at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
   at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
   at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
   at java.lang.reflect.Method.invoke(Method.java:498)
   at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:411)
   at 
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:163)
   at 
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:155)
   at 
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
   at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:346)
   at com.sun.proxy.$Proxy23.updatePipeline(Unknown Source)
   at org.apache.hadoop.hdfs.DataStreamer.updatePipeline(DataStreamer.java:1646)
   at 
org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1522)
   at 
org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1487)
   at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:726)
   ```


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to