imzhouhao commented on issue #6115:
URL: https://github.com/apache/paimon/issues/6115#issuecomment-3213011879

   happen again。
   2025-08-22 12:01:55
   java.lang.Exception: Could not perform checkpoint 15 for operator Source: 
Custom Source -> Map -> *anonymous_datastream_source$1*[1] -> Calc[2] -> Map -> 
Writer : incremental_dw_kafka2paimon_test_paralism4 (70/80)#0.
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.triggerCheckpointAsyncInMailbox(StreamTask.java:1171)
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$triggerCheckpointAsync$12(StreamTask.java:1118)
        at 
org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$SynchronizedStreamTaskActionExecutor.runThrowing(StreamTaskActionExecutor.java:93)
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.Mail.run(Mail.java:90)
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMail(MailboxProcessor.java:398)
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMailsWhenDefaultActionUnavailable(MailboxProcessor.java:367)
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMail(MailboxProcessor.java:352)
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:229)
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:836)
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:785)
        at 
org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:937)
        at 
org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:916)
        at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:730)
        at org.apache.flink.runtime.taskmanager.Task.run(Task.java:550)
        at java.lang.Thread.run(Thread.java:748)
   Caused by: java.io.IOException: 
org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.ipc.RetriableException):
 Acquire lock failed, retry later. Id: 1403180, inode tracing: 
paimon/default.db/incremental_dw_kafka2paimon_test_paralism4/dt=20250822/hour=11/ctime=2025082211/bucket-0/data-433c350c-30dd-4ba7-b0bd-7a04bba14526-36.orc
        at 
org.apache.hadoop.hdfs.server.namenode.FSDirectory.fromINodeId(FSDirectory.java:956)
        at 
org.apache.hadoop.hdfs.server.namenode.FSDirectory.resolvePath(FSDirectory.java:920)
        at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.updateBlockForPipeline(FSNamesystem.java:6757)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.updateBlockForPipeline(NameNodeRpcServer.java:1070)
        at 
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.updateBlockForPipeline(ClientNamenodeProtocolServerSideTranslatorPB.java:1178)
        at 
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
        at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:713)
        at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:975)
        at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1008)
        at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:929)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:422)
        at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1726)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2797)
   
        at 
org.apache.paimon.flink.sink.StoreSinkWriteImpl.prepareCommit(StoreSinkWriteImpl.java:221)
        at 
org.apache.paimon.flink.sink.TableWriteOperator.prepareCommit(TableWriteOperator.java:128)
        at 
org.apache.paimon.flink.sink.RowDataStoreWriteOperator.prepareCommit(RowDataStoreWriteOperator.java:205)
        at 
org.apache.paimon.flink.sink.PrepareCommitOperator.emitCommittables(PrepareCommitOperator.java:104)
        at 
org.apache.paimon.flink.sink.PrepareCommitOperator.prepareSnapshotPreBarrier(PrepareCommitOperator.java:84)
        at 
org.apache.flink.streaming.runtime.tasks.RegularOperatorChain.prepareSnapshotPreBarrier(RegularOperatorChain.java:89)
        at 
org.apache.flink.streaming.runtime.tasks.SubtaskCheckpointCoordinatorImpl.checkpointState(SubtaskCheckpointCoordinatorImpl.java:334)
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$performCheckpoint$13(StreamTask.java:1286)
        at 
org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$SynchronizedStreamTaskActionExecutor.runThrowing(StreamTaskActionExecutor.java:93)
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.performCheckpoint(StreamTask.java:1274)
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.triggerCheckpointAsyncInMailbox(StreamTask.java:1159)
        ... 14 more
   Caused by: 
org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.ipc.RetriableException):
 Acquire lock failed, retry later. Id: 1403180, inode tracing: 
paimon/default.db/incremental_dw_kafka2paimon_test_paralism4/dt=20250822/hour=11/ctime=2025082211/bucket-0/data-433c350c-30dd-4ba7-b0bd-7a04bba14526-36.orc
        at 
org.apache.hadoop.hdfs.server.namenode.FSDirectory.fromINodeId(FSDirectory.java:956)
        at 
org.apache.hadoop.hdfs.server.namenode.FSDirectory.resolvePath(FSDirectory.java:920)
        at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.updateBlockForPipeline(FSNamesystem.java:6757)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.updateBlockForPipeline(NameNodeRpcServer.java:1070)
        at 
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.updateBlockForPipeline(ClientNamenodeProtocolServerSideTranslatorPB.java:1178)
        at 
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
        at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:713)
        at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:975)
        at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1008)
        at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:929)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:422)
        at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1726)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2797)
   
        at org.apache.hadoop.ipc.Client.call(Client.java:1603)
        at org.apache.hadoop.ipc.Client.call(Client.java:1524)
        at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:228)
        at com.sun.proxy.$Proxy38.updateBlockForPipeline(Unknown Source)
        at 
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.updateBlockForPipeline(ClientNamenodeProtocolTranslatorPB.java:1078)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:252)
        at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:104)
        at com.sun.proxy.$Proxy39.updateBlockForPipeline(Unknown Source)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
org.apache.hadoop.hdfs.RpcResponseHandler.invoke(RpcResponseHandler.java:55)
        at com.sun.proxy.$Proxy39.updateBlockForPipeline(Unknown Source)
        at 
org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.setupPipelineForAppendOrRecovery(DFSOutputStream.java:1566)
        at 
org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.processDatanodeError(DFSOutputStream.java:1249)
        at 
org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:741)
   


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: issues-unsubscr...@paimon.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to