MorningGlow opened a new issue, #9672:
URL: https://github.com/apache/hudi/issues/9672
**use version:**
flink 1.15.2 hudi 0.12.3 hive 3.1.2 hadoop 3.2.4
CREATE TABLE WF_UNITINFOTRAVEL_HUDI
( `ID` STRING
,`WORKORDER` STRING COMMENT '工单'
,`UNITID` STRING COMMENT '序号'
,`PARTID` STRING COMMENT '成品料号ID'
,`PARTNAME` STRING
,`ROUTEID` STRING COMMENT '流程ID'
,`ROUTENAME` STRING
,`LINEID` STRING COMMENT '线别ID'
,`LINENAME` STRING
,`CURPROCESSID` STRING COMMENT '当前制程ID'
,`CURPROCESSNAME` STRING
,`EQPID` STRING COMMENT '工作站(设备ID)'
,`NEXTPROCESSID` STRING COMMENT 'backing,input'
,`NEXTPROCESSNAME` STRING
,`CURRENTSTATUS` STRING COMMENT '当前状态(Pss,Fail,Scrap)'
,`INPROCESSTIME` STRING COMMENT '进入制程时间'
,`OUTPROCESSTIME` STRING COMMENT '离开制程时间'
,`INPDLINETIME` STRING COMMENT '进入生产线时间'
,`OUTPDLINETIME` STRING COMMENT '离开生产线时间'
,`PALLETNO` STRING COMMENT '栈板号'
,`CONTAINER` STRING COMMENT '货柜'
,`QCNO` STRING COMMENT '抽验编号'
,`QCRESULT` STRING COMMENT '抽验结果'
,`REWORKWO` STRING COMMENT '重工号'
,`BOXNO` STRING COMMENT '包装盒/袋子'
,`PANELNO` STRING COMMENT '大板序号'
,`BOARDNO` DECIMAL(10,0) COMMENT '穴位'
,`CARRIER` STRING
,`COVER` STRING
,`BASE` STRING
,`MAGAZINE` STRING
,`ACTION` STRING COMMENT '操作动作:如 InDryBox、OutDryBox、InOvenBox、OutOvenBox'
,`OPT2` STRING
,`OPT3` STRING
,`OPT4` STRING
,`OPT5` STRING
,`SORTCODE` DECIMAL(20,0)
,`ENABLED` DECIMAL(1,0)
,`CREATEDATE` STRING
,`CREATEUSERID` STRING
,`CREATEUSERNAME` STRING
,`MODIFYDATE` STRING
,`MODIFYUSERID` STRING
,`MODIFYUSERNAME` STRING
,`REMARK` STRING
,`RULENAME` STRING
,`CARTONNO` STRING COMMENT '箱号'
,`XFLAG` STRING COMMENT 'X板标识:OK/NG'
,`PASSCOUNT` DECIMAL(10,0) COMMENT '当前制程过站成功数量'
,`SPLITFLAG` STRING COMMENT '分板标识:Y/N'
,`LEDBIN` STRING
,`PROCESSGRADE` STRING
,`HOLDREASON` STRING
,`UNITID56` STRING
,`ELAPSEDMILLISECONDS` DECIMAL(19,0) COMMENT '耗时(ms)'
,`BATCHID` STRING COMMENT '批次ID'
,`DT` STRING
,primary key (ID) not enforced)
PARTITIONED BY (`DT`)
with(
-- 'read.streaming.enabled' = 'true',
'path' =
'hdfs://ks2p-hadoop01:9000/data/hive/warehouse/test.db/TEST_WF_UNITINFOTRAVEL',
'hoodie.parquet.small.file.limit' = '125829120',
'hoodie.parquet.max.file.size' = '134217728',
'hive_sync.enable' = 'true',
'connector' = 'hudi',
'read.streaming.check-interval' = '3',
'hive_sync.metastore.uris' = 'thrift://ks2p-hadoop01:9083',
'hive_sync.table' = 'TEST_WF_UNITINFOTRAVEL',
'hive_sync.db' = 'demo',
'compaction.trigger.strategy' = 'num_commits',
'changelog.enabled' = 'true',
'write.rate.limit' = '90000',
'hive_sync.support_timestamp' = 'true',
'compaction.async.enabled' = 'true',
'write.operation' = 'upsert',
'hoodie.datasource.write.recordkey.field' = 'ID',
'hoodie.datasource.write.precombine.field' = 'MODIFYDATE',
'compaction.delta_commits' = '2',
'table.type' = 'MERGE_ON_READ',
'hive_sync.mode' = 'hms',
'hoodie.metadata.index.bloom.filter.enable'='true',
'hoodie.metadata.enable'='true',
'hoodie.metadata.index.column.stats.enable'='true',
'hoodie.enable.data.skipping'='true',
'hoodie.metadata.compact.max.delta.commits'='2',
'hoodie.metadata.index.column.stats.file.group.count'='8',
'hoodie.write.concurrency.mode'='optimistic_concurrency_control',
'hoodie.cleaner.policy.failed.writes'='LAZY',
'hoodie.write.lock.provider'='org.apache.hudi.client.transaction.lock.InProcessLockProvider'
-- 'hoodie.metadata.index.column.stats.column.list'='ID,WORKORDER'
);
**error**
```
java.io.FileNotFoundException: File does not exist:
/data/hive/warehouse/test.db/TEST_WF_UNITINFOTRAVEL/.hoodie/metadata/column_stats/.hoodie_partition_metadata_0
(inode 44976335) [Lease. Holder: DFSClient_NONMAPREDUCE_-1936765705_74,
pending creates: 1]
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:2929)
at
org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.analyzeFileState(FSDirWriteFileOp.java:599)
at
org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.validateAddBlock(FSDirWriteFileOp.java:171)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2808)
at
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:910)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:577)
at
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:549)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:518)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1086)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1035)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:963)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1762)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2960)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
Method) ~[?:1.8.0_261]
at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
~[?:1.8.0_261]
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
~[?:1.8.0_261]
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
~[?:1.8.0_261]
at
org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121)
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
at
org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88)
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
at
org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1842)
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
at
org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1638)
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:704)
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
Caused by: org.apache.hadoop.ipc.RemoteException: File does not exist:
/data/hive/warehouse/test.db/TEST_WF_UNITINFOTRAVEL/.hoodie/metadata/column_stats/.hoodie_partition_metadata_0
(inode 44976335) [Lease. Holder: DFSClient_NONMAPREDUCE_-1936765705_74,
pending creates: 1]
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:2929)
at
org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.analyzeFileState(FSDirWriteFileOp.java:599)
at
org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.validateAddBlock(FSDirWriteFileOp.java:171)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2808)
at
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:910)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:577)
at
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:549)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:518)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1086)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1035)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:963)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1762)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2960)
at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1489)
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
at org.apache.hadoop.ipc.Client.call(Client.java:1435)
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
at org.apache.hadoop.ipc.Client.call(Client.java:1345)
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:227)
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
at com.sun.proxy.$Proxy40.addBlock(Unknown Source) ~[?:?]
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:444)
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
at sun.reflect.GeneratedMethodAccessor13.invoke(Unknown Source) ~[?:?]
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
~[?:1.8.0_261]
at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_261]
```



If possible, could you please provide a solution first? Thank you
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]