set table.dynamic-table-options.enabled=true;

sql-client的话这样配置,不要引号


在 2021-12-21 20:20:11,"Fei Han" <[email protected]> 写道:
>@all:
>大家好!
>  我在实时读取hive的时候动态参数不生效,另外flink是否可以通过流读方式读取hive的普通表呢?
>版本如下:
>    Flink版本1.13.3
>   Hive版本hive2.1.1-CDH6.2.0
>设置的参数是set 'table.dynamic-table-options.enabled'='true'
>报错如下:
>INSERT INTO qhc_catalog.qhc_hms.qhc_ods_assassin_dept
>select * from qhc_catalog.qhc_assassin_ods.assassin_dept /*+ 
>OPTIONS('streaming-source.enable'='true', 
>'streaming-source.partition-order'='create-time') */
>2021-12-21 19:56:45,198 ERROR com.flink.streaming.core.JobApplication          
>            [] - 任务执行失败:
>org.apache.flink.table.api.ValidationException: The 'OPTIONS' hint is allowed 
>only when the config option 'table.dynamic-table-options.enabled' is set to 
>true.
>       at 
> org.apache.flink.table.planner.plan.schema.CatalogSourceTable.createFinalCatalogTable(CatalogSourceTable.java:104)
>  ~[flink-table-blink_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.flink.table.planner.plan.schema.CatalogSourceTable.toRel(CatalogSourceTable.java:79)
>  ~[flink-table-blink_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.calcite.sql2rel.SqlToRelConverter.toRel(SqlToRelConverter.java:3585)
>  ~[flink-table_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.calcite.sql2rel.SqlToRelConverter.convertIdentifier(SqlToRelConverter.java:2507)
>  ~[flink-table_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.calcite.sql2rel.SqlToRelConverter.convertFrom(SqlToRelConverter.java:2140)
>  ~[flink-table_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.calcite.sql2rel.SqlToRelConverter.convertFrom(SqlToRelConverter.java:2050)
>  ~[flink-table_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.calcite.sql2rel.SqlToRelConverter.convertSelectImpl(SqlToRelConverter.java:663)
>  ~[flink-table_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.calcite.sql2rel.SqlToRelConverter.convertSelect(SqlToRelConverter.java:644)
>  ~[flink-table_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.calcite.sql2rel.SqlToRelConverter.convertQueryRecursive(SqlToRelConverter.java:3438)
>  ~[flink-table_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.calcite.sql2rel.SqlToRelConverter.convertQuery(SqlToRelConverter.java:570)
>  ~[flink-table_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.flink.table.planner.calcite.FlinkPlannerImpl.org$apache$flink$table$planner$calcite$FlinkPlannerImpl$$rel(FlinkPlannerImpl.scala:169)
>  ~[flink-table-blink_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.flink.table.planner.calcite.FlinkPlannerImpl.rel(FlinkPlannerImpl.scala:161)
>  ~[flink-table-blink_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.flink.table.planner.operations.SqlToOperationConverter.toQueryOperation(SqlToOperationConverter.java:989)
>  ~[flink-table-blink_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.flink.table.planner.operations.SqlToOperationConverter.convertSqlQuery(SqlToOperationConverter.java:958)
>  ~[flink-table-blink_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.flink.table.planner.operations.SqlToOperationConverter.convert(SqlToOperationConverter.java:283)
>  ~[flink-table-blink_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.flink.table.planner.operations.SqlToOperationConverter.convertSqlInsert(SqlToOperationConverter.java:603)
>  ~[flink-table-blink_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.flink.table.planner.operations.SqlToOperationConverter.convert(SqlToOperationConverter.java:272)
>  ~[flink-table-blink_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.flink.table.planner.delegation.ParserImpl.parse(ParserImpl.java:101)
>  ~[flink-table-blink_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.flink.table.api.internal.StatementSetImpl.addInsertSql(StatementSetImpl.java:51)
>  ~[flink-table_2.12-1.13.3.jar:1.13.3]
>       at 
> com.flink.streaming.core.execute.ExecuteSql.exeSql(ExecuteSql.java:38) 
> ~[flink-streaming-core-1.3.0.RELEASE.jar:1.3.0.RELEASE]
>       at com.flink.streaming.core.JobApplication.main(JobApplication.java:80) 
> ~[flink-streaming-core-1.3.0.RELEASE.jar:1.3.0.RELEASE]
>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
> ~[?:1.8.0_211]
>       at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
> ~[?:1.8.0_211]
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  ~[?:1.8.0_211]
>       at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_211]
>       at 
> org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:355)
>  ~[flink-dist_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:222)
>  ~[flink-dist_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:114) 
> ~[flink-dist_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.flink.client.cli.CliFrontend.executeProgram(CliFrontend.java:812) 
> ~[flink-dist_2.12-1.13.3.jar:1.13.3]
>       at org.apache.flink.client.cli.CliFrontend.run(CliFrontend.java:246) 
> ~[flink-dist_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.flink.client.cli.CliFrontend.parseAndRun(CliFrontend.java:1054) 
> ~[flink-dist_2.12-1.13.3.jar:1.13.3]
>       at 
> org.apache.flink.client.cli.CliFrontend.lambda$main$10(CliFrontend.java:1132) 
> ~[flink-dist_2.12-1.13.3.jar:1.13.3]
>       at java.security.AccessController.doPrivileged(Native Method) 
> ~[?:1.8.0_211]
>       at javax.security.auth.Subject.doAs(Subject.java:422) [?:1.8.0_211]
>       at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1898)
>  [flink-shaded-hadoop-3-uber-3.1.1.7.2.8.0-224-9.0.jar:3.1.1.7.2.8.0-224-9.0]
>       at 
> org.apache.flink.runtime.security.contexts.HadoopSecurityContext.runSecured(HadoopSecurityContext.java:41)
>  [flink-dist_2.12-1.13.3.jar:1.13.3]
>       at org.apache.flink.client.cli.CliFrontend.main(CliFrontend.java:1132) 
> [flink-dist_2.12-1.13.3.jar:1.13.3]
>

回复