[
https://issues.apache.org/jira/browse/FLINK-19957?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17446403#comment-17446403
]
XiangQianLiu commented on FLINK-19957:
--------------------------------------
h3. The idea can perform the task of flick on hive, but not on the cluster
* flink on yarn log
*
{code:java}
//代码占位符
java.lang.ExceptionInInitializerError
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at
org.apache.hive.common.util.ReflectionUtil.newInstance(ReflectionUtil.java:83)
at
org.apache.hadoop.hive.ql.exec.Registry.registerUDAF(Registry.java:238)
at
org.apache.hadoop.hive.ql.exec.Registry.registerUDAF(Registry.java:231)
at
org.apache.hadoop.hive.ql.exec.FunctionRegistry.<clinit>(FunctionRegistry.java:430)
at
org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:243)
at
org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:227)
at org.apache.hadoop.hive.ql.metadata.Hive.<init>(Hive.java:384)
at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:328)
at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:308)
at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:284)
at
org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:581)
at
org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:544)
at
org.apache.flink.table.planner.delegation.hive.HiveParser.startSessionState(HiveParser.java:348)
at
org.apache.flink.table.planner.delegation.hive.HiveParser.parse(HiveParser.java:215)
at
org.apache.flink.table.api.internal.TableEnvironmentImpl.executeSql(TableEnvironmentImpl.java:724)
at
org.explore.exec.Runner.flinkonhive.FlinkSqlKafkaOnHive.main(FlinkSqlKafkaOnHive.java:92)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:355)
at
org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:222)
at
org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:114)
at
org.apache.flink.client.cli.CliFrontend.executeProgram(CliFrontend.java:812)
at org.apache.flink.client.cli.CliFrontend.run(CliFrontend.java:246)
at
org.apache.flink.client.cli.CliFrontend.parseAndRun(CliFrontend.java:1054)
at
org.apache.flink.client.cli.CliFrontend.lambda$main$10(CliFrontend.java:1132)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1875)
at
org.apache.flink.runtime.security.contexts.HadoopSecurityContext.runSecured(HadoopSecurityContext.java:41)
at org.apache.flink.client.cli.CliFrontend.main(CliFrontend.java:1132)
Caused by: java.lang.RuntimeException: java.lang.IllegalArgumentException:
Unrecognized Hadoop major version number: 3.0.0-cdh6.3.2
at
org.apache.hadoop.hive.shims.ShimLoader.getHadoopShims(ShimLoader.java:102)
at
org.apache.hadoop.hive.ql.udf.UDAFPercentile.<clinit>(UDAFPercentile.java:51)
... 36 more
Caused by: java.lang.IllegalArgumentException: Unrecognized Hadoop major
version number: 3.0.0-cdh6.3.2
at
org.apache.hadoop.hive.shims.ShimLoader.getMajorVersion(ShimLoader.java:177)
at
org.apache.hadoop.hive.shims.ShimLoader.loadShims(ShimLoader.java:144)
at
org.apache.hadoop.hive.shims.ShimLoader.getHadoopShims(ShimLoader.java:99)
... 37 more{code}
* lib catalogue
*
{code:java}
//代码占位符
-rw-r--r-- 1 root root 167761 11月 19 11:35 antlr-runtime-3.5.2.jar
-rw-r--r-- 1 root root 7786583 11月 15 12:54
flink-connector-hive_2.11-1.13.2.jar
-rw-r--r-- 1 root root 355553 11月 15 16:23
flink-connector-kafka_2.11-1.13.2.jar
-rw-r--r-- 1 root root 115016309 11月 15 15:09 flink-dist_2.11-1.13.2.jar
-rw-r--r-- 1 root root 148126 11月 15 15:10 flink-json-1.13.2.jar
-rw-r--r-- 1 root root 42224163 11月 13 16:32
flink-sql-connector-hive-2.2.0_2.11-1.13.3.jar
-rw-r--r-- 1 root root 36420572 7月 23 19:37 flink-table_2.11-1.13.2.jar
-rw-r--r-- 1 root root 509451 11月 15 15:12 flink-table-api-java-1.13.2.jar
-rw-r--r-- 1 root root 125882 11月 15 15:10
flink-table-api-java-bridge_2.11-1.13.2.jar
-rw-r--r-- 1 root root 40981118 7月 23 19:37 flink-table-blink_2.11-1.13.2.jar
-rw-r--r-- 1 root root 918203 11月 15 15:12 flink-table-common-1.13.2.jar
-rw-r--r-- 1 root root 37223607 11月 15 15:13
flink-table-planner-blink_2.11-1.13.2.jar
-rw-r--r-- 1 root root 1367591 11月 15 15:14
flink-table-runtime-blink_2.11-1.13.2.jar
-rw-r--r-- 1 root root 35803898 11月 9 2019 hive-exec-2.1.1-cdh6.3.2.jar
-rw-r--r-- 1 root root 67114 3月 25 2020 log4j-1.2-api-2.12.1.jar
-rw-r--r-- 1 root root 276771 3月 25 2020 log4j-api-2.12.1.jar
-rw-r--r-- 1 root root 1674433 3月 25 2020 log4j-core-2.12.1.jar
-rw-r--r-- 1 root root 23518 3月 25 2020 log4j-slf4j-impl-2.12.1.jar {code}
- pom as follows
{code:java}
//代码占位符 pom
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<encoding>UTF-8</encoding>
<scala.version>2.11.12</scala.version>
<scala.binary.version>2.11</scala.binary.version>
<flink.version>1.13.2</flink.version>
<hadoop.version>3.0.0-cdh6.3.2</hadoop.version>
<mysql.connector.version>5.1.47</mysql.connector.version>
<flink.mysql.cdc.version>2.0.2</flink.mysql.cdc.version>
<fastjson.version>1.2.68</fastjson.version>
<lombok.version>1.18.12</lombok.version>
<hive.version>2.1.1-cdh6.3.2</hive.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
<exclusions>
<exclusion>
<artifactId>commons-io</artifactId>
<groupId>commons-io</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-json</artifactId>
<version>${flink.version}</version>
<!-- <scope>provided</scope>-->
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner-blink_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
<!-- <scope>provided</scope>-->
<exclusions>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-hive_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
<!-- <scope>provided</scope>-->
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>${hive.version}</version>
<exclusions>
<exclusion>
<artifactId>hive-shims</artifactId>
<groupId>org.apache.hive</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.antlr</groupId>
<artifactId>antlr-runtime</artifactId>
<version>3.5.2</version>
</dependency>
<!-- https://mvnrepository.com/artifact/commons-io/commons-io -->
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.7</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-parquet_2.11</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-avro</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-orc_2.11</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.0</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<appendAssemblyId>false</appendAssemblyId>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
<archive>
<manifest>
<!-- main class -->
<mainClass>xx</mainClass>
</manifest>
</archive>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>assembly</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>{code}
> flink-sql-connector-hive incompatible with cdh6
> -----------------------------------------------
>
> Key: FLINK-19957
> URL: https://issues.apache.org/jira/browse/FLINK-19957
> Project: Flink
> Issue Type: Bug
> Components: Connectors / Hive
> Affects Versions: 1.11.2
> Environment: Flink 1.11.2
> Hadoop 3.0.0-cdh6.3.1
> Hive 2.1.1-cdh6.3.1
> Reporter: Cheng Pan
> Priority: Minor
> Labels: auto-deprioritized-critical, auto-deprioritized-major
>
> According to Flink docs, we should use flink-sql-connector-hive-2.2.0(which
> should compatible with hive 2.0.0 - 2.2.0), actually, we got a exception:
> Unrecognized Hadoop major version number: 3.0.0-cdh6.3.1;
> If use flink-sql-connector-hive-2.3.6 (which should compatible with 2.3.0 -
> 2.3.6), encounter another exception: org.apache.thrift.TApplicationException:
> Invalid method name: 'get_table_req'
> If copy flink-connector-hive_2.11-1.11.2.jar and hive-exec-2.1.1-cdh6.3.1.jar
> to flink/lib, not work again. Caused by: java.lang.ClassNotFoundException:
> com.facebook.fb303.FacebookService$Iface
--
This message was sent by Atlassian Jira
(v8.20.1#820001)