[
https://issues.apache.org/jira/browse/FLINK-18571?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
JasonLee closed FLINK-18571.
----------------------------
Resolution: Fixed
> when i use sql-client write to hive but throw a exception "This is a bug.
> Please consider filing an issue"
> ----------------------------------------------------------------------------------------------------------
>
> Key: FLINK-18571
> URL: https://issues.apache.org/jira/browse/FLINK-18571
> Project: Flink
> Issue Type: Bug
> Components: Table SQL / Client
> Affects Versions: 1.11.0
> Reporter: JasonLee
> Priority: Major
> Fix For: 1.12.0
>
>
> when i use the sql-client write to hive there throw a exception
> Exception in thread "main" org.apache.flink.table.client.SqlClientException:
> Unexpected exception. This is a bug. Please consider filing an
> issue.Exception in thread "main"
> org.apache.flink.table.client.SqlClientException: Unexpected exception. This
> is a bug. Please consider filing an issue. at
> org.apache.flink.table.client.SqlClient.main(SqlClient.java:213)Caused by:
> java.lang.RuntimeException: Error running SQL job. at
> org.apache.flink.table.client.gateway.local.LocalExecutor.lambda$executeUpdateInternal$14(LocalExecutor.java:598)
> at
> org.apache.flink.table.client.gateway.local.ExecutionContext.wrapClassLoader(ExecutionContext.java:255)
> at
> org.apache.flink.table.client.gateway.local.LocalExecutor.executeUpdateInternal(LocalExecutor.java:592)
> at
> org.apache.flink.table.client.gateway.local.LocalExecutor.executeUpdate(LocalExecutor.java:515)
> at
> org.apache.flink.table.client.cli.CliClient.callInsert(CliClient.java:596) at
> org.apache.flink.table.client.cli.CliClient.callCommand(CliClient.java:315)
> at java.util.Optional.ifPresent(Optional.java:159) at
> org.apache.flink.table.client.cli.CliClient.open(CliClient.java:212) at
> org.apache.flink.table.client.SqlClient.openCli(SqlClient.java:142) at
> org.apache.flink.table.client.SqlClient.start(SqlClient.java:114) at
> org.apache.flink.table.client.SqlClient.main(SqlClient.java:201)Caused by:
> java.util.concurrent.ExecutionException:
> org.apache.flink.runtime.client.JobSubmissionException: Failed to submit
> JobGraph. at
> java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:357)
> at java.util.concurrent.CompletableFuture.get(CompletableFuture.java:1895) at
> org.apache.flink.table.client.gateway.local.LocalExecutor.lambda$executeUpdateInternal$14(LocalExecutor.java:595)
> ... 10 moreCaused by:
> org.apache.flink.runtime.client.JobSubmissionException: Failed to submit
> JobGraph. at
> org.apache.flink.client.program.rest.RestClusterClient.lambda$submitJob$7(RestClusterClient.java:366)
> at
> java.util.concurrent.CompletableFuture.uniExceptionally(CompletableFuture.java:870)
> at
> java.util.concurrent.CompletableFuture$UniExceptionally.tryFire(CompletableFuture.java:852)
> at
> java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474)
> at
> java.util.concurrent.CompletableFuture.completeExceptionally(CompletableFuture.java:1977)
> at
> org.apache.flink.runtime.concurrent.FutureUtils.lambda$retryOperationWithDelay$8(FutureUtils.java:292)
> at
> java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:760)
> at
> java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:736)
> at
> java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474)
> at
> java.util.concurrent.CompletableFuture.postFire(CompletableFuture.java:561)
> at
> java.util.concurrent.CompletableFuture$UniCompose.tryFire(CompletableFuture.java:929)
> at
> java.util.concurrent.CompletableFuture$Completion.run(CompletableFuture.java:442)
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
> at java.lang.Thread.run(Thread.java:745)Caused by:
> org.apache.flink.runtime.rest.util.RestClientException:
> [org.apache.flink.runtime.rest.handler.RestHandlerException: Failed to
> deserialize JobGraph. at
> org.apache.flink.runtime.rest.handler.job.JobSubmitHandler.lambda$loadJobGraph$2(JobSubmitHandler.java:127)
> at
> java.util.concurrent.CompletableFuture$AsyncSupply.run(CompletableFuture.java:1590)
> at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
> at java.util.concurrent.FutureTask.run(FutureTask.java:266) at
> java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180)
> at
> java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293)
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
> at java.lang.Thread.run(Thread.java:745)Caused by:
> java.lang.ClassCastException: cannot assign instance of
> java.util.Collections$UnmodifiableList to field
> org.apache.flink.runtime.jobgraph.JobVertex.operatorIDs of type
> java.util.ArrayList in instance of
> org.apache.flink.runtime.jobgraph.JobVertex at
> java.io.ObjectStreamClass$FieldReflector.setObjFieldValues(ObjectStreamClass.java:2133)
> at java.io.ObjectStreamClass.setObjFieldValues(ObjectStreamClass.java:1305)
> at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2024)
> at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942) at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808) at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018) at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942) at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808) at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018) at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942) at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808) at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:373) at
> java.util.ArrayList.readObject(ArrayList.java:791) at
> sun.reflect.GeneratedMethodAccessor23.invoke(Unknown Source) at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:498) at
> java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1058) at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1909) at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808) at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018) at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942) at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808) at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:373) at
> java.util.HashMap.readObject(HashMap.java:1404) at
> sun.reflect.GeneratedMethodAccessor24.invoke(Unknown Source) at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:498) at
> java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1058) at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1909) at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808) at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018) at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942) at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808) at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:373) at
> org.apache.flink.runtime.rest.handler.job.JobSubmitHandler.lambda$loadJobGraph$2(JobSubmitHandler.java:125)
> ... 8 more] at
> org.apache.flink.runtime.rest.RestClient.parseResponse(RestClient.java:390)
> at
> org.apache.flink.runtime.rest.RestClient.lambda$submitRequest$3(RestClient.java:374)
> at
> java.util.concurrent.CompletableFuture.uniCompose(CompletableFuture.java:952)
> at
> java.util.concurrent.CompletableFuture$UniCompose.tryFire(CompletableFuture.java:926)
> ... 4 more
> and the sql statements are as follows
>
> CREATE TABLE kafka_table (
> name VARCHAR COMMENT '姓名',
> age int COMMENT '年龄',
> ts BIGINT COMMENT '时间戳',
> t as TO_TIMESTAMP(FROM_UNIXTIME(ts/1000,'yyyy-MM-dd HH:mm:ss')),
> proctime as PROCTIME(),
> WATERMARK FOR t AS t - INTERVAL '5' SECOND
> )
> WITH (
> 'connector.type' = 'kafka', -- 使用 kafka connector
> 'connector.version' = 'universal', -- kafka 版本
> 'connector.topic' = 'jason_flink', -- kafka topic
> 'connector.startup-mode' = 'latest-offset', -- 从起始 offset 开始读取
> 'connector.properties.bootstrap.servers' =
> 'master:9092,storm1:9092,storm2:9092', -- broker连接信息
> 'connector.properties.group.id' = 'jason_flink_test',
> 'update-mode' = 'append',
> 'format.type' = 'json', -- 数据源格式为 json
> 'format.derive-schema' = 'true' -- 从 DDL schema 确定 json 解析规则
> );
> CREATE TABLE fs_table (
> name STRING,
> age int,
> dt STRING
> ) PARTITIONED BY (dt) WITH (
> 'connector'='filesystem',
> 'path'='/home/jason/bigdata/',
> 'format'='parquet',
> 'sink.partition-commit.delay'='1s',
> 'sink.partition-commit.policy.kind'='success-file'
> );
> INSERT INTO fs_table SELECT name, age, DATE_FORMAT(t, 'yyyy-MM-dd') FROM
> kafka_table;
> i can run the same statement at local( in IDE) the cant run it in sql-client
>
>
--
This message was sent by Atlassian Jira
(v8.3.4#803005)