各位老师们好,以下代码在开发环境中可以执行,打包部署后报错:
代码:
CREATE VIEW used_num_common
(toolName,region,type,flavor,used_num)
AS
select info.toolName as toolName,r.regionName as 
region,f.type,f.flavor,count(1) as used_num from
tbl_schedule_job/*+ OPTIONS('server-id'='1001-1031') */ job
join
tbl_schedule_task/*+ OPTIONS('server-id'='2001-2031') */ task
on job.jobId = task.jobId
join
tbl_broker_node/*+ OPTIONS('server-id'='3001-3031') */  node
on task.nodeId = node.id
join
tbl_app_info/*+ OPTIONS('server-id'='4001-4031') */ info
on job.appId = info.appId
join
tbl_region r
on node.region/*+ OPTIONS('server-id'='5001-5031') */ = r.region
join
tbl_flavor/*+ OPTIONS('server-id'='6001-6031') */  f
on node.resourcesSpec = f.flavor
where job.jobStatus in ('RUNNING','ERROR','INITING')
and task.taskStatus in ('RUNNING','ERROR','INITING')
and node.machineStatus <> 'DELETED'
and toolName is not null
group by info.toolName,r.regionName,f.type,f.flavor
…
打包部署后报错如下:
The main method caused an error: class org.apache.calcite.sql.SqlSyntax$6: 
SPECIAL
2022-02-08 13:33:39,350 WARN 
org.apache.flink.client.deployment.application.DetachedApplicationRunner [] - 
Could not execute application:
org.apache.flink.client.program.ProgramInvocationException: The main method 
caused an error: class org.apache.calcite.sql.SqlSyntax$6: SPECIAL
    at 
org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:372)
 ~[flink-dist_2.11-1.13.0.jar:1.13.0]
    at 
org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:222)
 ~[flink-dist_2.11-1.13.0.jar:1.13.0]
    at org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:114) 
~[flink-dist_2.11-1.13.0.jar:1.13.0]
    at 
org.apache.flink.client.deployment.application.DetachedApplicationRunner.tryExecuteJobs(DetachedApplicationRunner.java:84)
 ~[flink-dist_2.11-1.13.0.jar:1.13.0]
    at 
org.apache.flink.client.deployment.application.DetachedApplicationRunner.run(DetachedApplicationRunner.java:70)
 ~[flink-dist_2.11-1.13.0.jar:1.13.0]
    at 
org.apache.flink.runtime.webmonitor.handlers.JarRunHandler.lambda$handleRequest$0(JarRunHandler.java:102)
 ~[flink-dist_2.11-1.13.0.jar:1.13.0]
    at 
java.util.concurrent.CompletableFuture$AsyncSupply.run(CompletableFuture.java:1700)
 [?:?]
    at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) 
[?:?]
    at java.util.concurrent.FutureTask.run(FutureTask.java:264) [?:?]
    at 
java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:304)
 [?:?]
    at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) 
[?:?]
    at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) 
[?:?]
    at java.lang.Thread.run(Thread.java:829) [?:?]
Caused by: java.lang.UnsupportedOperationException: class 
org.apache.calcite.sql.SqlSyntax$6: SPECIAL
    at org.apache.calcite.util.Util.needToImplement(Util.java:1075) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at org.apache.calcite.sql.SqlSyntax$6.unparse(SqlSyntax.java:116) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at org.apache.calcite.sql.SqlOperator.unparse(SqlOperator.java:329) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at org.apache.calcite.sql.SqlDialect.unparseCall(SqlDialect.java:453) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at org.apache.calcite.sql.SqlCall.unparse(SqlCall.java:101) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at org.apache.calcite.sql.SqlJoin$SqlJoinOperator.unparse(SqlJoin.java:199) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at org.apache.calcite.sql.SqlDialect.unparseCall(SqlDialect.java:453) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at org.apache.calcite.sql.SqlCall.unparse(SqlCall.java:104) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at org.apache.calcite.sql.SqlJoin$SqlJoinOperator.unparse(SqlJoin.java:199) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at org.apache.calcite.sql.SqlDialect.unparseCall(SqlDialect.java:453) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at org.apache.calcite.sql.SqlCall.unparse(SqlCall.java:104) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at org.apache.calcite.sql.SqlJoin$SqlJoinOperator.unparse(SqlJoin.java:199) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at org.apache.calcite.sql.SqlDialect.unparseCall(SqlDialect.java:453) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at org.apache.calcite.sql.SqlCall.unparse(SqlCall.java:104) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at org.apache.calcite.sql.SqlJoin$SqlJoinOperator.unparse(SqlJoin.java:199) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at org.apache.calcite.sql.SqlDialect.unparseCall(SqlDialect.java:453) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at org.apache.calcite.sql.SqlCall.unparse(SqlCall.java:104) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at 
org.apache.calcite.sql.SqlSelectOperator.unparse(SqlSelectOperator.java:176) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at org.apache.calcite.sql.SqlDialect.unparseCall(SqlDialect.java:453) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at org.apache.calcite.sql.SqlSelect.unparse(SqlSelect.java:246) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at org.apache.calcite.sql.SqlNode.toSqlString(SqlNode.java:154) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at org.apache.calcite.sql.SqlNode.toSqlString(SqlNode.java:176) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at org.apache.calcite.sql.SqlNode.toSqlString(SqlNode.java:185) 
~[flink-table_2.11-1.13.0.jar:1.13.0]
    at 
org.apache.flink.table.planner.operations.SqlToOperationConverter.getQuotedSqlString(SqlToOperationConverter.java:962)
 ~[flink-table-blink_2.11-1.13.0.jar:1.13.0]
    at 
org.apache.flink.table.planner.utils.Expander$Expanded.substitute(Expander.java:183)
 ~[flink-table-blink_2.11-1.13.0.jar:1.13.0]
    at 
org.apache.flink.table.planner.operations.SqlToOperationConverter.convertViewQuery(SqlToOperationConverter.java:846)
 ~[flink-table-blink_2.11-1.13.0.jar:1.13.0]
    at 
org.apache.flink.table.planner.operations.SqlToOperationConverter.convertCreateView(SqlToOperationConverter.java:815)
 ~[flink-table-blink_2.11-1.13.0.jar:1.13.0]
    at 
org.apache.flink.table.planner.operations.SqlToOperationConverter.convert(SqlToOperationConverter.java:246)
 ~[flink-table-blink_2.11-1.13.0.jar:1.13.0]
    at 
org.apache.flink.table.planner.delegation.ParserImpl.parse(ParserImpl.java:99) 
~[flink-table-blink_2.11-1.13.0.jar:1.13.0]
    at 
org.apache.flink.table.api.internal.TableEnvironmentImpl.executeSql(TableEnvironmentImpl.java:722)
 ~[flink-table_2.11-1.13.0.jar:1.13.0]
    at com.pro.TemporalJoinTest.main(TemporalJoinTest.java:182) ~[?:?]
    at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
~[?:?]
    at 
jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
 ~[?:?]
    at 
jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 ~[?:?]
    at java.lang.reflect.Method.invoke(Method.java:566) ~[?:?]
    at 
org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:355)
 ~[flink-dist_2.11-1.13.0.jar:1.13.0]
    ... 12 more
2022-02-08 13:33:39,352 ERROR 
org.apache.flink.runtime.webmonitor.handlers.JarRunHandler [] - Exception 
occurred in REST handler: Could not execute application.

看这个报错是格式的问题,但如果是格式的问题,本地应该也无法运行,也在网上看到了类似的报错,没有搜到解决方案。
https://www.mail-archive.com/user-zh@flink.apache.org/msg10502.html
flinksql1.12建view流式读取hive分区表不支持,报错信息:[ERROR] Could not execute SQL statement. 
Reason: java.lang.UnsupportedOperationException: class 
org.apache.calcite.sql.SqlSyntax$6: SPECIAL
如果将OPTIONS放置在INSERT…SELECT..语句下,打包部署后也可以成功运行,但因为虚拟视图中使用了多个表,直接在INSERT…SELECT语句中指定OPTIONS,会造成MySQL
 CDC同步SERVER ID冲突。
本地开发环境与部署环境添加的JAR包都是一样的,flink 1.13,MySQL CDC 2.1.1

本地引入的依赖,部署的时候会将不打包标签下的依赖排除:
<!--        flink-table相关依赖(不打包)-->
        <dependency>
            <groupId>org.apache.flink</groupId>
            
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
            <version>${flink.version}</version>
<!--            <scope>provided</scope>-->
        </dependency>
        <!-- flink运行时的webUI -->
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-runtime-web_${scala.binary.version}</artifactId>
            <version>${flink.version}</version>
<!--            <scope>provided</scope>-->
        </dependency>
        <dependency>
            <groupId>org.apache.flink</groupId>
            
<artifactId>flink-streaming-scala_${scala.binary.version}</artifactId>
            <version>${flink.version}</version>
<!--            <scope>provided</scope>-->
        </dependency>
        <dependency>
            <groupId>org.apache.flink</groupId>
            
<artifactId>flink-table-planner-blink_${scala.binary.version}</artifactId>
            <version>${flink.version}</version>
<!--            <scope>provided</scope>-->
        </dependency>
        <dependency>
            <groupId>org.apache.flink</groupId>
            
<artifactId>flink-table-api-java-bridge_${scala.binary.version}</artifactId>
            <version>${flink.version}</version>
<!--            <scope>provided</scope>-->
        </dependency>
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-core</artifactId>
            <version>${flink.version}</version>
<!--            <scope>provided</scope>-->
        </dependency>
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-json</artifactId>
            <version>${flink.version}</version>
<!--            <scope>provided</scope>-->
        </dependency>


        <!--flink连接客户端-->
        <dependency>
            <groupId>mysql</groupId>
            <artifactId>mysql-connector-java</artifactId>
            <version>8.0.19</version>
        </dependency>
        <dependency>
            <groupId>org.postgresql</groupId>
            <artifactId>postgresql</artifactId>
            <version>9.2-1004-jdbc4</version>
<!--            <scope>provided</scope>-->
        </dependency>
        <!--必须打包-->
        <dependency>
            <groupId>org.apache.flink</groupId>
            
<artifactId>flink-connector-kafka_${scala.binary.version}</artifactId>
            <version>${flink.version}</version>
<!--            <scope>provided</scope>-->
        </dependency>
        <!--必须打包-->
        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka-clients</artifactId>
            <version>0.11.0.2</version>
<!--            <scope>provided</scope>-->
        </dependency>
        <dependency>
            <groupId>org.apache.flink</groupId>
            
<artifactId>flink-connector-jdbc_${scala.binary.version}</artifactId>
            <version>${flink.version}</version>
            <!--<scope>provided</scope>-->
        </dependency>
        <!--必须打包-->

<!--        flink sql相关依赖(不打包)-->
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-clients_${scala.binary.version}</artifactId>
            <version>${flink.version}</version>
<!--            <scope>provided</scope>-->
        </dependency>
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-sql-client_${scala.binary.version}</artifactId>
            <version>${flink.version}</version>
<!--            <scope>provided</scope>-->
        </dependency>
        <dependency>
            <groupId>com.ververica</groupId>
            <artifactId>flink-connector-mysql-cdc</artifactId>
            <!-- the dependency is available only for stable releases. -->
            <version>2.1.1</version>
<!--            <scope>provided</scope>-->
        </dependency>

<!--        存储设置-->
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-statebackend-rocksdb_2.11</artifactId>
            <version>${flink.version}</version>
<!--            <scope>provided</scope>-->
        </dependency>


<!--        工具类依赖-->
        <dependency>
            <groupId>com.github.jsqlparser</groupId>
            <artifactId>jsqlparser</artifactId>
            <version>1.2</version>
        </dependency>

        <dependency>
            <groupId>org.slf4j</groupId>
            <artifactId>slf4j-api</artifactId>
            <version>1.7.25</version>
            <!--<scope>provided</scope>-->
        </dependency>
        <dependency>
            <groupId>org.slf4j</groupId>
            <artifactId>slf4j-simple</artifactId>
            <version>1.7.25</version>
            <!--<scope>provided</scope>-->
        </dependency>
    </dependencies>
部署机器上的JAR包:
[cid:image001.png@01D821C7.272B7E80]
感谢您的阅读与解答。

回复