# 主要原因为Orc在新版本后使用的WriterVersion为ORC_517
# 导致低版本的Hive解析不了
# 自实现OrcFile类,修改回旧版本
static {
CURRENT_WRITER = WriterVersion.HIVE_13083;
memoryManager = null;
}
------------------------------------------------------------------
发件人:大罗 <[email protected]>
发送时间:2020年9月8日(星期二) 16:55
收件人:user-zh <[email protected]>
主 题:Re: flink sql 1.11.1 could not insert hive orc record
Hi ,我例子中的hive orc表,不是事务表,如图:
createtab_stmt
CREATE TABLE `dest_orc`(
`i` int)
PARTITIONED BY (
`ts` string)
ROW FORMAT SERDE
'org.apache.hadoop.hive.ql.io.orc.OrcSerde'
STORED AS INPUTFORMAT
'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'
OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
LOCATION
'hdfs://nameservice1/opt/user/hive/warehouse/dw.db/dest_orc'
TBLPROPERTIES (
'is_generic'='false',
'orc.compress'='SNAPPY',
'transient_lastDdlTime'='1599555226')
--
Sent from: http://apache-flink.147419.n8.nabble.com/