用1.1.0试了一下也没复现,你insert语句中的中文如果换成英文试试看解析能不能过呢
On Mon, Aug 2, 2021 at 3:05 PM Asahi Lee <[email protected]> wrote: > hive 1.1.0版本 > > > > > ------------------ 原始邮件 ------------------ > 发件人: > "user-zh" > < > [email protected]>; > 发送时间: 2021年8月2日(星期一) 中午12:23 > 收件人: "user-zh"<[email protected]>; > > 主题: Re: flink 1.13.1 使用hive方言,执行hive sql解析报错 > > > > 我本地试了一下没有复现你的问题,你的hive版本是什么呢? > > On Fri, Jul 30, 2021 at 3:00 PM Asahi Lee <[email protected]> > wrote: > > > CREATE TABLE `cosldatacenter.ods_emp_maindata_iadc_paramvalue`( > > &nbsp; `paramvalue_id` string COMMENT '',&nbsp; > > &nbsp; `platform_id` string COMMENT '',&nbsp; > > &nbsp; `equipment_id` string COMMENT '',&nbsp; > > &nbsp; `param_id` string COMMENT '',&nbsp; > > &nbsp; `param_value` string COMMENT '',&nbsp; > > &nbsp; `remark` string COMMENT '',&nbsp; > > &nbsp; `create_time` string COMMENT '',&nbsp; > > &nbsp; `creator` string COMMENT '',&nbsp; > > &nbsp; `update_time` string COMMENT '',&nbsp; > > &nbsp; `update_person` string COMMENT '',&nbsp; > > &nbsp; `record_flag` double COMMENT '',&nbsp; > > &nbsp; `subject_id` string COMMENT '',&nbsp; > > &nbsp; `output_unit` string COMMENT '',&nbsp; > > &nbsp; `show_seq` double COMMENT '') > > COMMENT '' > > ROW FORMAT SERDE&nbsp; > > &nbsp; 'org.apache.hadoop.hive.ql.io.orc.OrcSerde'&nbsp; > > WITH SERDEPROPERTIES (&nbsp; > > &nbsp; 'field.delim'=',',&nbsp; > > &nbsp; 'serialization.format'=',')&nbsp; > > STORED AS INPUTFORMAT&nbsp; > > &nbsp; 'org.apache.hadoop.hive.ql.io > .orc.OrcInputFormat'&nbsp; > > OUTPUTFORMAT&nbsp; > > &nbsp; 'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat' > > LOCATION > > &nbsp; > > > 'hdfs://hadoop02:8020/user/hive/warehouse/cosldatacenter.db/ods_emp_maindata_iadc_paramvalue' > > TBLPROPERTIES ( > > &nbsp; 'COLUMN_STATS_ACCURATE'='false',&nbsp; > > &nbsp; 'last_modified_by'='root',&nbsp; > > &nbsp; 'last_modified_time'='1621834335',&nbsp; > > &nbsp; 'numFiles'='0',&nbsp; > > &nbsp; 'numRows'='-1',&nbsp; > > &nbsp; 'rawDataSize'='-1',&nbsp; > > &nbsp; 'totalSize'='0',&nbsp; > > &nbsp; 'transient_lastDdlTime'='1621834335') > > > > > > > > CREATE TABLE `cosldatacenter.ods_emp_md_large_equip`( > > &nbsp; `large_equip_id` string COMMENT '',&nbsp; > > &nbsp; `equip_name` string COMMENT '',&nbsp; > > &nbsp; `equip_type` string COMMENT '',&nbsp; > > &nbsp; `equip_function` string COMMENT '',&nbsp; > > &nbsp; `equip_board` string COMMENT '',&nbsp; > > &nbsp; `ship_yard` string COMMENT '',&nbsp; > > &nbsp; `manufacturer_date` string COMMENT '',&nbsp; > > &nbsp; `enqueue_date` string COMMENT '',&nbsp; > > &nbsp; `dockrepair_date` string COMMENT '',&nbsp; > > &nbsp; `scrap_date` string COMMENT '',&nbsp; > > &nbsp; `enqueue_mode` string COMMENT '',&nbsp; > > &nbsp; `work_for_org` string COMMENT '',&nbsp; > > &nbsp; `work_in_org` string COMMENT '',&nbsp; > > &nbsp; `old_age` string COMMENT '',&nbsp; > > &nbsp; `create_time` date COMMENT '',&nbsp; > > &nbsp; `creator` string COMMENT '',&nbsp; > > &nbsp; `update_time` date COMMENT '',&nbsp; > > &nbsp; `update_person` string COMMENT '',&nbsp; > > &nbsp; `record_flag` double COMMENT '',&nbsp; > > &nbsp; `data_timestamp` string COMMENT '',&nbsp; > > &nbsp; `work_unit_id` string COMMENT '',&nbsp; > > &nbsp; `work_status` string COMMENT '',&nbsp; > > &nbsp; `work_location` string COMMENT '',&nbsp; > > &nbsp; `work_area` string COMMENT '',&nbsp; > > &nbsp; `equip_code` string COMMENT '',&nbsp; > > &nbsp; `shi_main_power` double COMMENT '',&nbsp; > > &nbsp; `shi_total_len` double COMMENT '',&nbsp; > > &nbsp; `shi_type_width` double COMMENT '',&nbsp; > > &nbsp; `shi_type_depth` double COMMENT '',&nbsp; > > &nbsp; `shi_design_draft` double COMMENT '',&nbsp; > > &nbsp; `shi_total_tonnage` double COMMENT '',&nbsp; > > &nbsp; `shi_load_tonnage` double COMMENT '',&nbsp; > > &nbsp; `remark` string COMMENT '',&nbsp; > > &nbsp; `unit_classification1` string COMMENT '',&nbsp; > > &nbsp; `unit_classification2` string COMMENT '') > > COMMENT '' > > ROW FORMAT SERDE&nbsp; > > &nbsp; 'org.apache.hadoop.hive.ql.io.orc.OrcSerde'&nbsp; > > WITH SERDEPROPERTIES (&nbsp; > > &nbsp; 'field.delim'=',',&nbsp; > > &nbsp; 'serialization.format'=',')&nbsp; > > STORED AS INPUTFORMAT&nbsp; > > &nbsp; 'org.apache.hadoop.hive.ql.io > .orc.OrcInputFormat'&nbsp; > > OUTPUTFORMAT&nbsp; > > &nbsp; 'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat' > > LOCATION > > &nbsp; > > > 'hdfs://hadoop02:8020/user/hive/warehouse/cosldatacenter.db/ods_emp_md_large_equip' > > TBLPROPERTIES ( > > &nbsp; 'COLUMN_STATS_ACCURATE'='false',&nbsp; > > &nbsp; 'last_modified_by'='root',&nbsp; > > &nbsp; 'last_modified_time'='1621834338',&nbsp; > > &nbsp; 'numFiles'='0',&nbsp; > > &nbsp; 'numRows'='-1',&nbsp; > > &nbsp; 'rawDataSize'='-1',&nbsp; > > &nbsp; 'totalSize'='0',&nbsp; > > &nbsp; 'transient_lastDdlTime'='1621834338') > > > > > > > > CREATE TABLE `cosldatacenter.ods_emp_maindata_iadc_paramdef`( > > &nbsp; `param_id` string COMMENT '',&nbsp; > > &nbsp; `iadc_id` string COMMENT '',&nbsp; > > &nbsp; `param_code` string COMMENT '',&nbsp; > > &nbsp; `param_en` string COMMENT '',&nbsp; > > &nbsp; `param_cn` string COMMENT '',&nbsp; > > &nbsp; `output_standard` string COMMENT '',&nbsp; > > &nbsp; `output_unit` string COMMENT '',&nbsp; > > &nbsp; `param_type` string COMMENT '',&nbsp; > > &nbsp; `param_value` string COMMENT '',&nbsp; > > &nbsp; `remark` string COMMENT '',&nbsp; > > &nbsp; `create_time` string COMMENT '',&nbsp; > > &nbsp; `creator` string COMMENT '',&nbsp; > > &nbsp; `update_time` string COMMENT '',&nbsp; > > &nbsp; `update_person` string COMMENT '',&nbsp; > > &nbsp; `record_flag` double COMMENT '') > > COMMENT '' > > ROW FORMAT SERDE&nbsp; > > &nbsp; 'org.apache.hadoop.hive.ql.io.orc.OrcSerde'&nbsp; > > WITH SERDEPROPERTIES (&nbsp; > > &nbsp; 'field.delim'=',',&nbsp; > > &nbsp; 'serialization.format'=',')&nbsp; > > STORED AS INPUTFORMAT&nbsp; > > &nbsp; 'org.apache.hadoop.hive.ql.io > .orc.OrcInputFormat'&nbsp; > > OUTPUTFORMAT&nbsp; > > &nbsp; 'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat' > > LOCATION > > &nbsp; > > > 'hdfs://hadoop02:8020/user/hive/warehouse/cosldatacenter.db/ods_emp_maindata_iadc_paramdef' > > TBLPROPERTIES ( > > &nbsp; 'COLUMN_STATS_ACCURATE'='false',&nbsp; > > &nbsp; 'last_modified_by'='root',&nbsp; > > &nbsp; 'last_modified_time'='1621834335',&nbsp; > > &nbsp; 'numFiles'='0',&nbsp; > > &nbsp; 'numRows'='-1',&nbsp; > > &nbsp; 'rawDataSize'='-1',&nbsp; > > &nbsp; 'totalSize'='0',&nbsp; > > &nbsp; 'transient_lastDdlTime'='1621834335') > > > > > > > > CREATE TABLE `cosldatacenter.dw_riginfoparam`( > > &nbsp; `large_equip_id` string,&nbsp; > > &nbsp; `equip_code` string,&nbsp; > > &nbsp; `equip_name` string,&nbsp; > > &nbsp; `enqueue_date` string,&nbsp; > > &nbsp; `shi_total_len` double,&nbsp; > > &nbsp; `shi_type_width` double,&nbsp; > > &nbsp; `shi_type_depth` double,&nbsp; > > &nbsp; `moonpool` string,&nbsp; > > &nbsp; `maxwindvelocity` string,&nbsp; > > &nbsp; `maxwaveheight` string,&nbsp; > > &nbsp; `airgap` string,&nbsp; > > &nbsp; `maxopewaterdepth` string,&nbsp; > > &nbsp; `drilldepthcap` string,&nbsp; > > &nbsp; `drillvl` string,&nbsp; > > &nbsp; `drillwater` string,&nbsp; > > &nbsp; `potablewater` string) > > ROW FORMAT SERDE&nbsp; > > &nbsp; 'org.apache.hadoop.hive.ql.io.orc.OrcSerde'&nbsp; > > WITH SERDEPROPERTIES (&nbsp; > > &nbsp; 'field.delim'=',',&nbsp; > > &nbsp; 'serialization.format'=',')&nbsp; > > STORED AS INPUTFORMAT&nbsp; > > &nbsp; 'org.apache.hadoop.hive.ql.io > .orc.OrcInputFormat'&nbsp; > > OUTPUTFORMAT&nbsp; > > &nbsp; 'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat' > > LOCATION > > &nbsp; > > > 'hdfs://hadoop02:8020/user/hive/warehouse/cosldatacenter.db/dw_riginfoparam' > > TBLPROPERTIES ( > > &nbsp; 'COLUMN_STATS_ACCURATE'='false',&nbsp; > > &nbsp; 'numFiles'='1',&nbsp; > > &nbsp; 'numRows'='-1',&nbsp; > > &nbsp; 'rawDataSize'='-1',&nbsp; > > &nbsp; 'totalSize'='1564',&nbsp; > > &nbsp; 'transient_lastDdlTime'='1627353556') > > > > > > > > > > > > > > > > ------------------&nbsp;原始邮件&nbsp;------------------ > > 发件人: > > > "user-zh" > > > < > > [email protected]&gt;; > > 发送时间:&nbsp;2021年7月30日(星期五) 中午11:18 > > 收件人:&nbsp;"user-zh"<[email protected]&gt;; > > > > 主题:&nbsp;Re: flink 1.13.1 使用hive方言,执行hive sql解析报错 > > > > > > > > 你好, > > > > 能不能把你insert语句中使用到的表的DDL发一下?贴一下show create table的结果就可以了。 > > > > On Thu, Jul 29, 2021 at 9:03 PM Asahi Lee <[email protected] > &gt; > > wrote: > > > > &gt; hi! > > &gt; 我验证了,不是else的问题,下面的sql也报同样的问题?Invalid table alias or column > reference > > 'u' > > &gt; ,我的sql里面没有'u'的名称! > > &gt; CREATE CATALOG `tempo_df_hive_default_catalog` WITH( > > &gt; &amp;nbsp; &amp;nbsp; 'type' = 'hive', > > &gt; &amp;nbsp; &amp;nbsp; 'default-database' = 'default' > > &gt; ); > > &gt; USE CATALOG tempo_df_hive_default_catalog; > > &gt; CREATE TABLE IF NOT EXISTS `default`.`tempo_blackhole_table` > ( > > &gt; &amp;nbsp; &amp;nbsp;f0 INT > > &gt; ); > > &gt; insert into cosldatacenter.dw_riginfoparam > > &gt; select&amp;nbsp; > > &gt; c.LARGE_EQUIP_ID, > > &gt; c.EQUIP_CODE, > > &gt; c.EQUIP_NAME, > > &gt; c.ENQUEUE_DATE, > > &gt; c.SHI_TOTAL_LEN, > > &gt; c.SHI_TYPE_WIDTH, > > &gt; c.SHI_TYPE_DEPTH, > > &gt; case when b.param_cn = '月池尺寸' then a.param_value else null > end as > > Moonpool, > > &gt; case when b.param_cn = '最大风速' then a.param_value else null > end as > > &gt; MaxWindvelocity, > > &gt; case when b.param_cn = '最大波浪高度' then a.param_value else null > end as > > &gt; MaxWaveheight, > > &gt; case when b.param_cn = '气隙' then a.param_value else null end > as > > Airgap, > > &gt; case when b.param_cn = '设计最大作业水深' then a.param_value else > null end as > > &gt; MaxOpeWaterdepth, > > &gt; case when b.param_cn = '额定钻井深度' then a.param_value else null > end as > > &gt; DrilldepthCap, > > &gt; case when b.param_cn = '钻井可变载荷' then a.param_value else null > end as > > &gt; DrillVL, > > &gt; case when b.param_cn = '钻井水' then a.param_value else null > end as > > &gt; DrillWater, > > &gt; case when b.param_cn = '生活水' then a.param_value else null > end as > > &gt; PotableWater > > &gt; from cosldatacenter.ods_emp_maindata_iadc_paramvalue > a&amp;nbsp; > > &gt; inner join cosldatacenter.ods_emp_maindata_iadc_paramdef b on > > a.param_id = > > &gt; b.param_id > > &gt; inner join cosldatacenter.ods_emp_md_large_equip c on > > &gt; a.SUBJECT_ID=c.LARGE_EQUIP_ID; > > &gt; INSERT INTO `default`.`tempo_blackhole_table` SELECT 1 ; > > &gt; > > &gt; > > &gt; > > &gt; > > &gt; > > &gt; org.apache.hadoop.hive.ql.parse.SemanticException: Line > 2:178 Invalid > > &gt; table alias or column reference 'u': (possible column names > are: > > &gt; a.paramvalue_id, a.platform_id, a.equipment_id, a.param_id, > > a.param_value, > > &gt; a.remark, a.create_time, a.creator, a.update_time, > a.update_person, > > &gt; a.record_flag, a.subject_id, a.output_unit, a.show_seq, > b.param_id, > > &gt; b.iadc_id, b.param_code, b.param_en, b.param_cn, > b.output_standard, > > &gt; b.output_unit, b.param_type, b.param_value, b.remark, > b.create_time, > > &gt; b.creator, b.update_time, b.update_person, b.record_flag, > > c.large_equip_id, > > &gt; c.equip_name, c.equip_type, c.equip_function, c.equip_board, > > c.ship_yard, > > &gt; c.manufacturer_date, c.enqueue_date, c.dockrepair_date, > c.scrap_date, > > &gt; c.enqueue_mode, c.work_for_org, c.work_in_org, c.old_age, > > c.create_time, > > &gt; c.creator, c.update_time, c.update_person, c.record_flag, > > c.data_timestamp, > > &gt; c.work_unit_id, c.work_status, c.work_location, c.work_area, > > c.equip_code, > > &gt; c.shi_main_power, c.shi_total_len, c.shi_type_width, > c.shi_type_depth, > > &gt; c.shi_design_draft, c.shi_total_tonnage, c.shi_load_tonnage, > c.remark, > > &gt; c.unit_classification1, c.unit_classification2) > > &gt; > > &gt; > > &gt; > > &gt; > > &gt; > ------------------&amp;nbsp;原始邮件&amp;nbsp;------------------ > > &gt; 发件人: > > > &gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; > > "user-zh" > > > &gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; > > < > > &gt; [email protected]&amp;gt;; > > &gt; 发送时间:&amp;nbsp;2021年7月29日(星期四) 下午3:32 > > &gt; 收件人:&amp;nbsp;"user-zh"<[email protected] > &amp;gt;; > > &gt; > > &gt; 主题:&amp;nbsp;Re: flink 1.13.1 使用hive方言,执行hive sql解析报错 > > &gt; > > &gt; > > &gt; > > &gt; 看起来是sql语法报错,这里面的ELSE呢? > > &gt; > > &gt; 祝好, > > &gt; Leonard > > &gt; > > &gt; > > &gt; &amp;gt; 在 2021年7月27日,20:04,Asahi Lee > <[email protected]&amp;gt; > > 写道: > > &gt; &amp;gt; > > &gt; &amp;gt; CASE > > &gt; > > > &amp;gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; > > WHEN mipd.`param_cn` = '月池尺寸' THEN > > &gt; > > > &amp;gt;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; > > mipv.`param_value`&amp;amp;nbsp; > > &gt; &amp;gt;&nbsp;&nbsp;&nbsp; END AS `Moonpool` > > > > > > > > -- > > Best regards! > > Rui Li > > > > -- > Best regards! > Rui Li -- Best regards! Rui Li
