[ 
https://issues.apache.org/jira/browse/FLINK-28903?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17578291#comment-17578291
 ] 

Nicholas Jiang commented on FLINK-28903:
----------------------------------------

[~lzljs3620320] , when the value of hive.metastore.use.SSL or 
hive.metastore.sasl.enabled is true, there is above exception for Hive 2.x 
including Hive 2.3.

> flink-table-store-hive-catalog could not shade hive-shims-0.23
> --------------------------------------------------------------
>
>                 Key: FLINK-28903
>                 URL: https://issues.apache.org/jira/browse/FLINK-28903
>             Project: Flink
>          Issue Type: Bug
>          Components: Table Store
>    Affects Versions: table-store-0.3.0
>            Reporter: Nicholas Jiang
>            Priority: Major
>              Labels: pull-request-available
>             Fix For: table-store-0.3.0
>
>
> flink-table-store-hive-catalog could not shade hive-shims-0.23 because 
> artifactSet doesn't include hive-shims-0.23 and the minimizeJar is set to 
> true. The exception is as follows:
> {code:java}
> Caused by: java.lang.RuntimeException: Unable to instantiate 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.HiveMetaStoreClient
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1708)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:83)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:97)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.hive.HiveCatalog.createClient(HiveCatalog.java:380)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.hive.HiveCatalog.<init>(HiveCatalog.java:80) 
> ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.hive.HiveCatalogFactory.create(HiveCatalogFactory.java:51)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.file.catalog.CatalogFactory.createCatalog(CatalogFactory.java:93)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.connector.FlinkCatalogFactory.createCatalog(FlinkCatalogFactory.java:62)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.connector.FlinkCatalogFactory.createCatalog(FlinkCatalogFactory.java:57)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.connector.FlinkCatalogFactory.createCatalog(FlinkCatalogFactory.java:31)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.factories.FactoryUtil.createCatalog(FactoryUtil.java:428)
>  ~[flink-table-api-java-uber-1.15.1.jar:1.15.1]
>     at 
> org.apache.flink.table.api.internal.TableEnvironmentImpl.createCatalog(TableEnvironmentImpl.java:1356)
>  ~[flink-table-api-java-uber-1.15.1.jar:1.15.1]
>     at 
> org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:1111)
>  ~[flink-table-api-java-uber-1.15.1.jar:1.15.1]
>     at 
> org.apache.flink.table.client.gateway.local.LocalExecutor.lambda$executeOperation$3(LocalExecutor.java:209)
>  ~[flink-sql-client-1.15.1.jar:1.15.1]
>     at 
> org.apache.flink.table.client.gateway.context.ExecutionContext.wrapClassLoader(ExecutionContext.java:88)
>  ~[flink-sql-client-1.15.1.jar:1.15.1]
>     at 
> org.apache.flink.table.client.gateway.local.LocalExecutor.executeOperation(LocalExecutor.java:209)
>  ~[flink-sql-client-1.15.1.jar:1.15.1]
>     ... 10 more
> Caused by: java.lang.reflect.InvocationTargetException
>     at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 
> ~[?:1.8.0_181]
>     at 
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>  ~[?:1.8.0_181]
>     at 
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>  ~[?:1.8.0_181]
>     at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 
> ~[?:1.8.0_181]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1706)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:83)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:97)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.hive.HiveCatalog.createClient(HiveCatalog.java:380)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.hive.HiveCatalog.<init>(HiveCatalog.java:80) 
> ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.hive.HiveCatalogFactory.create(HiveCatalogFactory.java:51)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.file.catalog.CatalogFactory.createCatalog(CatalogFactory.java:93)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.connector.FlinkCatalogFactory.createCatalog(FlinkCatalogFactory.java:62)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.connector.FlinkCatalogFactory.createCatalog(FlinkCatalogFactory.java:57)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.connector.FlinkCatalogFactory.createCatalog(FlinkCatalogFactory.java:31)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.factories.FactoryUtil.createCatalog(FactoryUtil.java:428)
>  ~[flink-table-api-java-uber-1.15.1.jar:1.15.1]
>     at 
> org.apache.flink.table.api.internal.TableEnvironmentImpl.createCatalog(TableEnvironmentImpl.java:1356)
>  ~[flink-table-api-java-uber-1.15.1.jar:1.15.1]
>     at 
> org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:1111)
>  ~[flink-table-api-java-uber-1.15.1.jar:1.15.1]
>     at 
> org.apache.flink.table.client.gateway.local.LocalExecutor.lambda$executeOperation$3(LocalExecutor.java:209)
>  ~[flink-sql-client-1.15.1.jar:1.15.1]
>     at 
> org.apache.flink.table.client.gateway.context.ExecutionContext.wrapClassLoader(ExecutionContext.java:88)
>  ~[flink-sql-client-1.15.1.jar:1.15.1]
>     at 
> org.apache.flink.table.client.gateway.local.LocalExecutor.executeOperation(LocalExecutor.java:209)
>  ~[flink-sql-client-1.15.1.jar:1.15.1]
>     ... 10 more
> Caused by: java.lang.RuntimeException: Could not load shims in class 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge23
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.shims.ShimLoader.createShim(ShimLoader.java:132)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.shims.ShimLoader.loadShims(ShimLoader.java:124)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.shims.ShimLoader.getHadoopThriftAuthBridge(ShimLoader.java:108)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:414)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:247)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 
> ~[?:1.8.0_181]
>     at 
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>  ~[?:1.8.0_181]
>     at 
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>  ~[?:1.8.0_181]
>     at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 
> ~[?:1.8.0_181]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1706)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:83)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:97)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.hive.HiveCatalog.createClient(HiveCatalog.java:380)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.hive.HiveCatalog.<init>(HiveCatalog.java:80) 
> ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.hive.HiveCatalogFactory.create(HiveCatalogFactory.java:51)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.file.catalog.CatalogFactory.createCatalog(CatalogFactory.java:93)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.connector.FlinkCatalogFactory.createCatalog(FlinkCatalogFactory.java:62)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.connector.FlinkCatalogFactory.createCatalog(FlinkCatalogFactory.java:57)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.connector.FlinkCatalogFactory.createCatalog(FlinkCatalogFactory.java:31)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.factories.FactoryUtil.createCatalog(FactoryUtil.java:428)
>  ~[flink-table-api-java-uber-1.15.1.jar:1.15.1]
>     at 
> org.apache.flink.table.api.internal.TableEnvironmentImpl.createCatalog(TableEnvironmentImpl.java:1356)
>  ~[flink-table-api-java-uber-1.15.1.jar:1.15.1]
>     at 
> org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:1111)
>  ~[flink-table-api-java-uber-1.15.1.jar:1.15.1]
>     at 
> org.apache.flink.table.client.gateway.local.LocalExecutor.lambda$executeOperation$3(LocalExecutor.java:209)
>  ~[flink-sql-client-1.15.1.jar:1.15.1]
>     at 
> org.apache.flink.table.client.gateway.context.ExecutionContext.wrapClassLoader(ExecutionContext.java:88)
>  ~[flink-sql-client-1.15.1.jar:1.15.1]
>     at 
> org.apache.flink.table.client.gateway.local.LocalExecutor.executeOperation(LocalExecutor.java:209)
>  ~[flink-sql-client-1.15.1.jar:1.15.1]
>     ... 10 more
> Caused by: java.lang.ClassNotFoundException: 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge23
>     at java.net.URLClassLoader.findClass(URLClassLoader.java:381) 
> ~[?:1.8.0_181]
>     at java.lang.ClassLoader.loadClass(ClassLoader.java:424) ~[?:1.8.0_181]
>     at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:349) 
> ~[?:1.8.0_181]
>     at java.lang.ClassLoader.loadClass(ClassLoader.java:357) ~[?:1.8.0_181]
>     at java.lang.Class.forName0(Native Method) ~[?:1.8.0_181]
>     at java.lang.Class.forName(Class.java:264) ~[?:1.8.0_181]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.shims.ShimLoader.createShim(ShimLoader.java:129)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.shims.ShimLoader.loadShims(ShimLoader.java:124)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.shims.ShimLoader.getHadoopThriftAuthBridge(ShimLoader.java:108)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:414)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:247)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 
> ~[?:1.8.0_181]
>     at 
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>  ~[?:1.8.0_181]
>     at 
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>  ~[?:1.8.0_181]
>     at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 
> ~[?:1.8.0_181]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1706)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:83)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.shaded.org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:97)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.hive.HiveCatalog.createClient(HiveCatalog.java:380)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.hive.HiveCatalog.<init>(HiveCatalog.java:80) 
> ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.hive.HiveCatalogFactory.create(HiveCatalogFactory.java:51)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.file.catalog.CatalogFactory.createCatalog(CatalogFactory.java:93)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.connector.FlinkCatalogFactory.createCatalog(FlinkCatalogFactory.java:62)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.connector.FlinkCatalogFactory.createCatalog(FlinkCatalogFactory.java:57)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.store.connector.FlinkCatalogFactory.createCatalog(FlinkCatalogFactory.java:31)
>  ~[flink-table-store-dist-0.3-SNAPSHOT.jar:0.3-SNAPSHOT]
>     at 
> org.apache.flink.table.factories.FactoryUtil.createCatalog(FactoryUtil.java:428)
>  ~[flink-table-api-java-uber-1.15.1.jar:1.15.1]
>     at 
> org.apache.flink.table.api.internal.TableEnvironmentImpl.createCatalog(TableEnvironmentImpl.java:1356)
>  ~[flink-table-api-java-uber-1.15.1.jar:1.15.1]
>     at 
> org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:1111)
>  ~[flink-table-api-java-uber-1.15.1.jar:1.15.1]
>     at 
> org.apache.flink.table.client.gateway.local.LocalExecutor.lambda$executeOperation$3(LocalExecutor.java:209)
>  ~[flink-sql-client-1.15.1.jar:1.15.1]
>     at 
> org.apache.flink.table.client.gateway.context.ExecutionContext.wrapClassLoader(ExecutionContext.java:88)
>  ~[flink-sql-client-1.15.1.jar:1.15.1]
>     at 
> org.apache.flink.table.client.gateway.local.LocalExecutor.executeOperation(LocalExecutor.java:209)
>  ~[flink-sql-client-1.15.1.jar:1.15.1]
>     ... 10 more {code}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to