[
https://issues.apache.org/jira/browse/HIVE-28241?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17842834#comment-17842834
]
manoj kumar edited comment on HIVE-28241 at 5/3/24 1:57 PM:
------------------------------------------------------------
hive-site.xml
---------------
<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
[http://www.apache.org/licenses/LICENSE-2.0]
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<!-- Put site-specific property overrides in this file. -->
<configuration>
<property>
<name>system:java.io.tmpdir</name>
<value>/tmp/hive/java</value>
</property>
<property>
<name>system:user.name</name>
<value>${user.name}</value>
</property>
<property>
<name>hive.exec.scratchdir</name>
<value>/tmp/hive</value>
</property>
<property>
<name>hive.querylog.location</name>
<value>/tmp/hive</value>
</property>
<property>
<name>hive.downloaded.resources.dir</name>
<value>/tmp/hive</value>
</property>
<property>
<name>hive.exec.local.scratchdir</name>
<value>/tmp/hive/${system:user.name}</value>
</property>
<property>
<name>hive.downloaded.resources.dir</name>
<value>/tmp/${user.name}_resources</value>
</property>
<property>
<name>hive.zookeeper.quorum</name>
<value>mk1.example.com:2181,mk3.example.com:2181,mk5.example.com:2181</value>
</property>
<property>
<name>hive.zookeeper.client.port</name>
<value>2181</value>
</property>
<property>
<name>hive.zookeeper.namespace</name>
<value>hive_zookeeper_namespace</value>
</property>
<property>
<name>datanucleus.autoCreateSchema</name>
<value>false</value>
</property>
<property>
<name>datanucleus.cache.level2.type</name>
<value>none</value>
</property>
<property>
<name>datanucleus.fixedDatastore</name>
<value>true</value>
</property>
<property>
<name>hive.server2.thrift.port</name>
<value>10000</value>
</property>
<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.cj.jdbc.Driver</value>
</property>
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:mysql://mk5.example.com/hive</value>
</property>
<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>hive</value>
</property>
<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>hive@123</value>
</property>
<property>
<name>hive.metastore.try.direct.sql.ddl</name>
<value>true</value>
</property>
<property>
<name>hive.metastore.try.direct.sql</name>
<value>true</value>
</property>
<property>
<name>hive.metastore.warehouse.dir</name>
<value>/user/hive/warehouse</value>
</property>
<property>
<name>hive.metastore.uris</name>
<value>thrift://mk5.example.com:9083,thrift://mk1.example.com:9083,thrift://mk3.example.com:9083,thrift://mk4.example.com:9083</value>
</property>
<property>
<name>hive.metastore.schema.verification</name>
<value>false</value>
</property>
<property>
<name>datanucleus.autoStartMechanism</name>
<value>SchemaTable</value>
</property>
<property>
<name>datanucleus.schema.autoCreateAll</name>
<value>false</value>
</property>
<property>
<name>hive.server2.logging.operation.enabled</name>
<value>true</value>
</property>
<property>
<name>hive.server2.logging.operation.log.location</name>
<value>/app/hadoop/logs/hive</value>
</property>
<property>
<name>hive.metastore.server.min.threads</name>
<value>200</value>
</property>
<property>
<name>hive.metastore.server.max.threads</name>
<value>100000</value>
</property>
<property>
<name>datanucleus.connectionPool.maxPoolSize</name>
<value>50</value>
</property>
<property>
<name>hive.auto.convert.join</name>
<value>true</value>
</property>
<property>
<name>hive.auto.convert.join.noconditionaltask</name>
<value>true</value>
</property>
<property>
<name>hive.auto.convert.join.noconditionaltask.size</name>
<value>1145324612</value>
</property>
<property>
<name>hive.auto.convert.sortmerge.join</name>
<value>true</value>
</property>
<property>
<name>hive.auto.convert.sortmerge.join.to.mapjoin</name>
<value>false</value>
</property>
<property>
<name>hive.cbo.enable</name>
<value>true</value>
</property>
<property>
<name>hive.cli.print.header</name>
<value>false</value>
</property>
<property>
<name>hive.cluster.delegation.token.store.class</name>
<value>org.apache.hadoop.hive.thrift.MemoryTokenStore</value>
</property>
<property>
<name>hive.cluster.delegation.token.store.zookeeper.connectString</name>
<value>mk1.example.com:2181,mk3.example.com:2181,mk5.example.com:2181</value>
</property>
<property>
<name>hive.cluster.delegation.token.store.zookeeper.znode</name>
<value>/hive/cluster/delegation/</value>
</property>
<property>
<name>hive.compactor.abortedtxn.threshold</name>
<value>1000</value>
</property>
<property>
<name>hive.compactor.check.interval</name>
<value>300L</value>
</property>
<property>
<name>hive.server2.thrift.min.worker.threads</name>
<value>5</value>
</property>
<property>
<name>hive.conf.restricted.list</name>
<value>hive.security.authenticator.manager,hive.security.authorization.manager,hive.users.in.admin.role</value>
</property>
<property>
<name>hive.security.authenticator.manager</name>
<value>org.apache.hadoop.hive.ql.security.ProxyUserAuthenticator</value>
</property>
<property>
<name>hive.default.fileformat</name>
<value>TextFile</value>
</property>
<property>
<name>hive.enforce.bucketing</name>
<value>true</value>
</property>
<property>
<name>hive.exec.dynamic.partition</name>
<value>true</value>
</property>
<property>
<name>hive.exec.dynamic.partition.mode</name>
<value>strict</value>
</property>
<property>
<name>hive.exec.failure.hooks</name>
<value>org.apache.hadoop.hive.ql.hooks.ATSHook</value>
</property>
<property>
<name>hive.metastore.pre.event.listeners</name>
<value>org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener</value>
</property>
<property>
<name>hive.security.authorization.manager</name>
<value>org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdConfOnlyAuthorizerFactory</value>
</property>
<property>
<name>hive.security.authorization.sqlstd.confwhitelist.append</name>
<value>mapred\.max\.split\.size|tez\.grouping\.min-size|tez\.grouping\.max-size|hive\.optimize\.ppd|hive\.optimize\.ppd\.storage|hive\.vect
orized\.execution\.enabled|hive\.vectorized\.execution\.reduce\.enabled|hive\.vectorized\.execution\.reduce\.groupby\.enabled|hive\.cbo\.enable
was (Author: JIRAUSER305274):
hive-site.xml
---------------
<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<!-- Put site-specific property overrides in this file. -->
<configuration>
<property>
<name>system:java.io.tmpdir</name>
<value>/tmp/hive/java</value>
</property>
<property>
<name>system:user.name</name>
<value>${user.name}</value>
</property>
<property>
<name>hive.exec.scratchdir</name>
<value>/tmp/hive</value>
</property>
<property>
<name>hive.querylog.location</name>
<value>/tmp/hive</value>
</property>
<property>
<name>hive.downloaded.resources.dir</name>
<value>/tmp/hive</value>
</property>
<property>
<name>hive.exec.local.scratchdir</name>
<value>/tmp/hive/${system:user.name}</value>
</property>
<property>
<name>hive.downloaded.resources.dir</name>
<value>/tmp/${user.name}_resources</value>
</property>
<property>
<name>hive.zookeeper.quorum</name>
<value>mk1.example.com:2181,mk3.example.com:2181,mk5.example.com:2181</value>
</property>
<property>
<name>hive.zookeeper.client.port</name>
<value>2181</value>
</property>
<property>
<name>hive.zookeeper.namespace</name>
<value>hive_zookeeper_namespace</value>
</property>
<property>
<name>datanucleus.autoCreateSchema</name>
<value>false</value>
</property>
<property>
<name>datanucleus.cache.level2.type</name>
<value>none</value>
</property>
<property>
<name>datanucleus.fixedDatastore</name>
<value>true</value>
</property>
<property>
<name>hive.server2.thrift.port</name>
<value>10000</value>
</property>
<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.cj.jdbc.Driver</value>
</property>
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:mysql://mk5.example.com/hive</value>
</property>
<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>hive</value>
</property>
<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>hive@123</value>
</property>
<property>
<name>hive.metastore.try.direct.sql.ddl</name>
<value>true</value>
</property>
<property>
<name>hive.metastore.try.direct.sql</name>
<value>true</value>
</property>
<property>
<name>hive.metastore.warehouse.dir</name>
<value>/user/hive/warehouse</value>
</property>
<property>
<name>hive.metastore.uris</name>
<value>thrift://mk5.example.com:9083,thrift://mk1.example.com:9083,thrift://mk3.example.com:9083,thrift://mk4.example.com:9083</value>
</property>
<property>
<name>hive.metastore.schema.verification</name>
<value>false</value>
</property>
<property>
<name>datanucleus.autoStartMechanism</name>
<value>SchemaTable</value>
</property>
<property>
<name>datanucleus.schema.autoCreateAll</name>
<value>false</value>
</property>
<property>
<name>hive.server2.logging.operation.enabled</name>
<value>true</value>
</property>
<property>
<name>hive.server2.logging.operation.log.location</name>
<value>/app/hadoop/logs/hive</value>
</property>
<property>
<name>hive.metastore.server.min.threads</name>
<value>200</value>
</property>
<property>
<name>hive.metastore.server.max.threads</name>
<value>100000</value>
</property>
<property>
<name>datanucleus.connectionPool.maxPoolSize</name>
<value>50</value>
</property>
<property>
<name>hive.auto.convert.join</name>
<value>true</value>
</property>
<property>
<name>hive.auto.convert.join.noconditionaltask</name>
<value>true</value>
</property>
<property>
<name>hive.auto.convert.join.noconditionaltask.size</name>
<value>1145324612</value>
</property>
<property>
<name>hive.auto.convert.sortmerge.join</name>
<value>true</value>
</property>
<property>
<name>hive.auto.convert.sortmerge.join.to.mapjoin</name>
<value>false</value>
</property>
<property>
<name>hive.cbo.enable</name>
<value>true</value>
</property>
<property>
<name>hive.cli.print.header</name>
<value>false</value>
</property>
<property>
<name>hive.cluster.delegation.token.store.class</name>
<value>org.apache.hadoop.hive.thrift.MemoryTokenStore</value>
</property>
<property>
<name>hive.cluster.delegation.token.store.zookeeper.connectString</name>
<value>mk1.example.com:2181,mk3.example.com:2181,mk5.example.com:2181</value>
</property>
<property>
<name>hive.cluster.delegation.token.store.zookeeper.znode</name>
<value>/hive/cluster/delegation/</value>
</property>
<property>
<name>hive.compactor.abortedtxn.threshold</name>
<value>1000</value>
</property>
<property>
<name>hive.compactor.check.interval</name>
<value>300L</value>
</property>
<property>
<name>hive.server2.thrift.min.worker.threads</name>
<value>5</value>
</property>
<property>
<name>hive.conf.restricted.list</name>
<value>hive.security.authenticator.manager,hive.security.authorization.manager,hive.users.in.admin.role</value>
</property>
<property>
<name>hive.security.authenticator.manager</name>
<value>org.apache.hadoop.hive.ql.security.ProxyUserAuthenticator</value>
</property>
<property>
<name>hive.default.fileformat</name>
<value>TextFile</value>
</property>
<property>
<name>hive.enforce.bucketing</name>
<value>true</value>
</property>
<property>
<name>hive.exec.dynamic.partition</name>
<value>true</value>
</property>
<property>
<name>hive.exec.dynamic.partition.mode</name>
<value>strict</value>
</property>
<property>
<name>hive.exec.failure.hooks</name>
<value>org.apache.hadoop.hive.ql.hooks.ATSHook</value>
</property>
<property>
<name>hive.metastore.pre.event.listeners</name>
<value>org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener</value>
</property>
<property>
<name>hive.security.authorization.manager</name>
<value>org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdConfOnlyAuthorizerFactory</value>
</property>
<property>
<name>hive.security.authorization.sqlstd.confwhitelist.append</name>
<value>mapred\.max\.split\.size|tez\.grouping\.min-size|tez\.grouping\.max-size|hive\.optimize\.ppd|hive\.optimize\.ppd\.storage|hive\.vect
orized\.execution\.enabled|hive\.vectorized\.execution\.reduce\.enabled|hive\.vectorized\.execution\.reduce\.groupby\.enabled|hive\.cbo\.enable
|hive\.compute\.query\.using\.stats|hive\.stats\.fetch\.column\.stats|hive\.stats\.fetch\.partition\.stats|hive\.tez\.auto\.reducer\.parallelis
m|hive\.tez\.max\.partition\.factor|hive\.exec\.reducers\.bytes\.per\.reducer|tez\.grouping\.split-waves|hive\.explain\.user|hive\.merge\.tezfi
les|hive\.merge\.smallfiles\.avgsize|hive\.merge\.size\.per\.task|hive\.auto\.convert\.join\.noconditionaltask|hive\.auto\.convert\.join\.nocon
ditionaltask\.size|hive\.exec\.parallel|hive\.exec\.parallel\.thread\.number|hive\.execution\.engine|hive\.support\.concurrency|hive\.exec\.dyn
amic\.partition\.mode|hive\.compactor\.initiator\.on|hive\.compactor\.worker\.threads|tez\.grouping\.split-waves|tez\.grouping\.min-size|tez\.g
rouping\.max-size|mapred\.min\.split\.size|mapred\.max\.split\.size|tez\.grouping\.split-count|hive\.txn\.manager|tez\.container\.max\.java\.he
ap\.fraction|hive\.llap\.io\.memory\.size|hive\.support\.concurrency|hive\.compactor\.initiator\.on|hive\.compactor\.worker\.threads|io\.seqfil
e\.compression\.type|hive\.hadoop\.supports\.splittable\.combineinputformat|tez\.task\.resource\.memory\.mb|tez\.am\.resource\.memory\.mb</valu
e>
</property>
<property>
<name>hive.security.metastore.authenticator.manager</name>
<value>org.apache.hadoop.hive.ql.security.HadoopDefaultMetastoreAuthenticator</value>
</property>
<property>
<name>hive.security.metastore.authorization.auth.reads</name>
<value>true</value>
</property>
<property>
<name>hive.security.metastore.authorization.manager</name>
<value>org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider</value>
</property>
<property>
<name>hive.server2.allow.user.substitution</name>
<value>true</value>
</property>
<property>
<name>hive.server2.enable.doAs</name>
<value>true</value>
</property>
<property>
<name>hive.server2.logging.operation.enabled</name>
<value>true</value>
</property>
<property>
<name>hive.server2.logging.operation.log.location</name>
<value>/tmp/hive/operation_logs</value>
</property>
<property>
<name>hive.server2.thrift.http.port</name>
<value>10001</value>
</property>
<property>
<name>hive.server2.thrift.http.path</name>
<value>cliservice</value>
</property>
<property>
<name>hive.server2.thrift.max.worker.threads</name>
<value>500</value>
</property>
<property>
<name>hive.server2.thrift.port</name>
<value>10000</value>
</property>
<property>
<name>hive.server2.transport.mode</name>
<value>binary</value>
</property>
<property>
<name>hive.server2.use.SSL</name>
<value>false</value>
</property>
<property>
<name>hive.server2.webui.port</name>
<value>10002</value>
</property>
<property>
<name>hive.server2.zookeeper.namespace</name>
<value>hiveserver2</value>
</property>
<property>
<name>hive.server2.zookeeper.quorum</name>
<value>mk1.example.com:2181,mk3.example.com:2181,mk5.example.com:2181</value>
</property>
<property>
<name>hive.tez.java.opts</name>
<value>-server -Xmx2048m -Djava.net.preferIPv4Stack=true -XX:NewRatio=8
-XX:+UseNUMA -XX:+UseParallelGC -XX:+PrintGCDetails -verbose:gc -XX
:+PrintGCTimeStamps</value>
</property>
<property>
<name>hive.txn.manager</name>
<value>org.apache.hadoop.hive.ql.lockmgr.DummyTxnManager</value>
</property>
<property>
<name>hive.server2.support.dynamic.service.discovery</name>
<value>true</value>
</property>
<property>
<name>hive.execution_engine</name>
<value>tez</value>
</property>
<property>
<name>hive.merge.tezfiles</name>
<value>true</value>
</property>
<property>
<name>hive.server2.tez.default.queues</name>
<value>default</value>
</property>
<property>
<name>hive.server2.tez.initialize.default.sessions</name>
<value>false</value>
</property>
<property>
<name>hive.server2.tez.sessions.per.default.queue</name>
<value>1</value>
</property>
<property>
<name>hive.tez.auto.reducer.parallelism</name>
<value>true</value>
</property>
<property>
<name>hive.tez.container.size</name>
<value>2048</value>
</property>
<property>
<name>hive.tez.cpu.vcores</name>
<value>-1</value>
</property>
<property>
<name>hive.tez.dynamic.partition.pruning</name>
<value>true</value>
</property>
<property>
<name>hive.tez.dynamic.partition.pruning.max.event.size</name>
<value>1048576</value>
</property>
<property>
<name>hive.tez.input.format</name>
<value>org.apache.hadoop.hive.ql.io.HiveInputFormat</value>
</property>
<property>
<name>hive.tez.java.opts</name>
<value>-server -Xmx4096m -Djava.net.preferIPv4Stack=true -XX:NewRatio=8
-XX:+UseNUMA -XX:+UseParallelGC -XX:+PrintGCDetails -verbose:gc -XX
:+PrintGCTimeStamps</value>
</property>
<property>
<name>hive.tez.log.level</name>
<value>INFO</value>
</property>
<property>
<name>hive.tez.max.partition.factor</name>
<value>2.0</value>
</property>
<property>
<name>hive.tez.min.partition.factor</name>
<value>0.25</value>
</property>
<property>
<name>hive.tez.smb.number.waves</name>
<value>0.5</value>
</property>
<property>
<name>hive.convert.join.bucket.mapjoin.tez</name>
<value>false</value>
</property>
<property>
<name>hive.server2.authentication</name>
<value>KERBEROS</value>
</property>
<property>
<name>hive.server2.authentication.kerberos.principal</name>
<value>hive/[email protected]</value>
</property>
<property>
<name>hive.server2.authentication.kerberos.keytab</name>
<value>/etc/security/keytabs/hive.service.keytab</value>
</property>
<property>
<name>hive.metastore.sasl.enabled</name>
<value>true</value>
</property>
<property>
<name>hive.metastore.kerberos.principal</name>
<value>hive/[email protected]</value>
</property>
<property>
<name>hive.metastore.kerberos.keytab.file</name>
<value>/etc/security/keytabs/hive.service.keytab</value>
</property>
<property>
<name>hive.server2.authentication.spnego.keytab</name>
<value>/etc/security/keytabs/spnego.service.keytab</value>
</property>
<property>
<name>hive.server2.authentication.spnego.principal</name>
<value>HTTP/[email protected]</value>
</property>
<property>
<name>hive.server2.enable.doAs</name>
<value>true</value>
</property>
</configuration>
> beeline does not connect to hive in kerberos environment hadoop3.3.6
> hive3.1.3
> -------------------------------------------------------------------------------
>
> Key: HIVE-28241
> URL: https://issues.apache.org/jira/browse/HIVE-28241
> Project: Hive
> Issue Type: Bug
> Components: Beeline
> Affects Versions: 3.1.3
> Reporter: manoj kumar
> Priority: Major
>
> 24/04/30 12:52:03 [main]: DEBUG security.HadoopThriftAuthBridge: Current
> authMethod = KERBEROS
> 24/04/30 12:52:03 [main]: DEBUG security.HadoopThriftAuthBridge: Not setting
> UGI conf as passed-in authMethod of kerberos = current.
> 24/04/30 12:52:03 [main]: DEBUG security.UserGroupInformation:
> PrivilegedAction [as: hive/[email protected] (auth:KERBEROS)][action:
> org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge$Client$1@61a1ea2c]
> java.lang.Exception: null
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1896)
> [hadoop-common-3.3.6.jar:?]
> at
> org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge$Client.createClientTransport(HadoopThriftAuthBridge.java:234)
> [hive-exec-3.1.3.jar:3.1.3]
> at
> org.apache.hive.service.auth.KerberosSaslHelper.getKerberosTransport(KerberosSaslHelper.java:56)
> [hive-service-3.1.3.jar:3.1.3]
> at
> org.apache.hive.jdbc.HiveConnection.createBinaryTransport(HiveConnection.java:601)
> [hive-jdbc-3.1.3.jar:3.1.3]
> at
> org.apache.hive.jdbc.HiveConnection.openTransport(HiveConnection.java:341)
> [hive-jdbc-3.1.3.jar:3.1.3]
> at
> org.apache.hive.jdbc.HiveConnection.<init>(HiveConnection.java:228)
> [hive-jdbc-3.1.3.jar:3.1.3]
> at org.apache.hive.jdbc.HiveDriver.connect(HiveDriver.java:107)
> [hive-jdbc-3.1.3.jar:3.1.3]
> at java.sql.DriverManager.getConnection(DriverManager.java:664)
> [?:1.8.0_371]
> at java.sql.DriverManager.getConnection(DriverManager.java:208)
> [?:1.8.0_371]
> at
> org.apache.hive.beeline.DatabaseConnection.connect(DatabaseConnection.java:145)
> [hive-beeline-3.1.3.jar:3.1.3]
> at
> org.apache.hive.beeline.DatabaseConnection.getConnection(DatabaseConnection.java:209)
> [hive-beeline-3.1.3.jar:3.1.3]
> at org.apache.hive.beeline.Commands.connect(Commands.java:1641)
> [hive-beeline-3.1.3.jar:3.1.3]
> at org.apache.hive.beeline.Commands.connect(Commands.java:1536)
> [hive-beeline-3.1.3.jar:3.1.3]
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> ~[?:1.8.0_371]
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> ~[?:1.8.0_371]
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> ~[?:1.8.0_371]
> at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_371]
> at
> org.apache.hive.beeline.ReflectiveCommandHandler.execute(ReflectiveCommandHandler.java:56)
> [hive-beeline-3.1.3.jar:3.1.3]
> at
> org.apache.hive.beeline.BeeLine.execCommandWithPrefix(BeeLine.java:1384)
> [hive-beeline-3.1.3.jar:3.1.3]
> at org.apache.hive.beeline.BeeLine.dispatch(BeeLine.java:1423)
> [hive-beeline-3.1.3.jar:3.1.3]
> at org.apache.hive.beeline.BeeLine.execute(BeeLine.java:1287)
> [hive-beeline-3.1.3.jar:3.1.3]
> at org.apache.hive.beeline.BeeLine.begin(BeeLine.java:1071)
> [hive-beeline-3.1.3.jar:3.1.3]
> at
> org.apache.hive.beeline.BeeLine.mainWithInputRedirection(BeeLine.java:538)
> [hive-beeline-3.1.3.jar:3.1.3]
> at org.apache.hive.beeline.BeeLine.main(BeeLine.java:520)
> [hive-beeline-3.1.3.jar:3.1.3]
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> ~[?:1.8.0_371]
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> ~[?:1.8.0_371]
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> ~[?:1.8.0_371]
> at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_371]
> at org.apache.hadoop.util.RunJar.run(RunJar.java:328)
> [hadoop-common-3.3.6.jar:?]
> at org.apache.hadoop.util.RunJar.main(RunJar.java:241)
> [hadoop-common-3.3.6.jar:?]
> 24/04/30 12:52:03 [main]: DEBUG security.UserGroupInformation:
> PrivilegedAction [as: hive/[email protected] (auth:KERBEROS)][action:
> org.apache.hadoop.hive.metastore.security.TUGIAssumingTransport$1@149debbb]
> java.lang.Exception: null
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1896)
> [hadoop-common-3.3.6.jar:?]
> at
> org.apache.hadoop.hive.metastore.security.TUGIAssumingTransport.open(TUGIAssumingTransport.java:48)
> [hive-exec-3.1.3.jar:3.1.3]
> at
> org.apache.hive.jdbc.HiveConnection.openTransport(HiveConnection.java:343)
> [hive-jdbc-3.1.3.jar:3.1.3]
> at
> org.apache.hive.jdbc.HiveConnection.<init>(HiveConnection.java:228)
> [hive-jdbc-3.1.3.jar:3.1.3]
> at org.apache.hive.jdbc.HiveDriver.connect(HiveDriver.java:107)
> [hive-jdbc-3.1.3.jar:3.1.3]
> at java.sql.DriverManager.getConnection(DriverManager.java:664)
> [?:1.8.0_371]
> at java.sql.DriverManager.getConnection(DriverManager.java:208)
> [?:1.8.0_371]
> at
> org.apache.hive.beeline.DatabaseConnection.connect(DatabaseConnection.java:145)
> [hive-beeline-3.1.3.jar:3.1.3]
> at
> org.apache.hive.beeline.DatabaseConnection.getConnection(DatabaseConnection.java:209)
> [hive-beeline-3.1.3.jar:3.1.3]
> at org.apache.hive.beeline.Commands.connect(Commands.java:1641)
> [hive-beeline-3.1.3.jar:3.1.3]
> at org.apache.hive.beeline.Commands.connect(Commands.java:1536)
> [hive-beeline-3.1.3.jar:3.1.3]
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> ~[?:1.8.0_371]
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> ~[?:1.8.0_371]
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> ~[?:1.8.0_371]
> at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_371]
> at
> org.apache.hive.beeline.ReflectiveCommandHandler.execute(ReflectiveCommandHandler.java:56)
> [hive-beeline-3.1.3.jar:3.1.3]
> at
> org.apache.hive.beeline.BeeLine.execCommandWithPrefix(BeeLine.java:1384)
> [hive-beeline-3.1.3.jar:3.1.3]
> at org.apache.hive.beeline.BeeLine.dispatch(BeeLine.java:1423)
> [hive-beeline-3.1.3.jar:3.1.3]
> at org.apache.hive.beeline.BeeLine.execute(BeeLine.java:1287)
> [hive-beeline-3.1.3.jar:3.1.3]
> at org.apache.hive.beeline.BeeLine.begin(BeeLine.java:1071)
> [hive-beeline-3.1.3.jar:3.1.3]
> at
> org.apache.hive.beeline.BeeLine.mainWithInputRedirection(BeeLine.java:538)
> [hive-beeline-3.1.3.jar:3.1.3]
> at org.apache.hive.beeline.BeeLine.main(BeeLine.java:520)
> [hive-beeline-3.1.3.jar:3.1.3]
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> ~[?:1.8.0_371]
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> ~[?:1.8.0_371]
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> ~[?:1.8.0_371]
> at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_371]
> at org.apache.hadoop.util.RunJar.run(RunJar.java:328)
> [hadoop-common-3.3.6.jar:?]
> at org.apache.hadoop.util.RunJar.main(RunJar.java:241)
> [hadoop-common-3.3.6.jar:?]
> 24/04/30 12:52:03 [main]: DEBUG transport.TSaslTransport: opening transport
> org.apache.thrift.transport.TSaslClientTransport@25cd49a4
> 24/04/30 12:52:03 [main-EventThread]: INFO zookeeper.ClientCnxn: EventThread
> shut down
> 24/04/30 12:52:03 [main]: DEBUG transport.TSaslClientTransport: Sending
> mechanism name GSSAPI and initial response of length 621
> 24/04/30 12:52:03 [main]: DEBUG transport.TSaslTransport: CLIENT: Writing
> message with status START and payload length 6
> 24/04/30 12:52:03 [main]: DEBUG transport.TSaslTransport: CLIENT: Writing
> message with status OK and payload length 621
> 24/04/30 12:52:03 [main]: DEBUG transport.TSaslTransport: CLIENT: Start
> message handled
> 24/04/30 12:52:03 [main]: WARN jdbc.HiveConnection: Failed to connect to
> mk3.example.com:10000
> 24/04/30 12:52:03 [main]: INFO imps.CuratorFrameworkImpl: Starting
> 24/04/30 12:52:03 [main]: DEBUG curator.CuratorZookeeperClient: Starting
> 24/04/30 12:52:03 [main]: DEBUG curator.ConnectionState: Starting
> 24/04/30 12:52:03 [main]: DEBUG curator.ConnectionState: reset
> 24/04/30 12:52:03 [main]: INFO zookeeper.ZooKeeper: Initiating client
> connection, connectString=10.161.171.36:2181 sessionTimeout=60000
> watcher=org.apache.curator.ConnectionState@e9dc4d0
> 24/04/30 12:52:03 [main-SendThread(10.161.171.36:2181)]: INFO
> zookeeper.ClientCnxn: Opening socket connection to server
> 10.161.171.36/10.161.171.36:2181. Will not attempt to authenticate using SASL
> (unknown error)
> 24/04/30 12:52:03 [main-SendThread(10.161.171.36:2181)]: INFO
> zookeeper.ClientCnxn: Socket connection established to
> 10.161.171.36/10.161.171.36:2181, initiating session
> 24/04/30 12:52:03 [main-SendThread(10.161.171.36:2181)]: DEBUG
> zookeeper.ClientCnxn: Session establishment request sent on
> 10.161.171.36/10.161.171.36:2181
> 24/04/30 12:52:03 [main-SendThread(10.161.171.36:2181)]: INFO
> zookeeper.ClientCnxn: Session establishment complete on server
> 10.161.171.36/10.161.171.36:2181, sessionid = 0x300001b577d001d, negotiated
> timeout = 40000
> 24/04/30 12:52:03 [main-EventThread]: INFO state.ConnectionStateManager:
> State change: CONNECTED
> 24/04/30 12:52:03 [main-SendThread(10.161.171.36:2181)]: DEBUG
> zookeeper.ClientCnxn: Reading reply sessionid:0x300001b577d001d, packet::
> clientPath:null serverPath:null finished:false header:: 1,12 replyHeader::
> 1,12884902102,0 request:: '/hiveserver2,F response::
> v\{'serverUri=mk5.example.com:10000;version=3.1.3;sequence=0000000004,'serverUri=mk1.example.com:10000;version=3.1.3;sequence=0000000006,'serverUri=mk3.example.com:10000;version=3.1.3;sequence=0000000005},s\{8589935031,8589935031,1714458097538,1714458097538,0,13,0,0,13,3,12884902062}
> 24/04/30 12:52:03 [main]: DEBUG imps.CuratorFrameworkImpl: Closing
> 24/04/30 12:52:03 [Curator-Framework-0]: INFO imps.CuratorFrameworkImpl:
> backgroundOperationsLoop exiting
> 24/04/30 12:52:03 [main]: DEBUG curator.CuratorZookeeperClient: Closing
> 24/04/30 12:52:03 [main]: DEBUG curator.ConnectionState: Closing
> 24/04/30 12:52:03 [main]: DEBUG zookeeper.ZooKeeper: Closing session:
> 0x300001b577d001d
> 24/04/30 12:52:03 [main]: DEBUG zookeeper.ClientCnxn: Closing client for
> session: 0x300001b577d001d
> 24/04/30 12:52:03 [main-SendThread(10.161.171.36:2181)]: DEBUG
> zookeeper.ClientCnxn: Reading reply sessionid:0x300001b577d001d, packet::
> clientPath:null serverPath:null finished:false header:: 2,-11 replyHeader::
> 2,12884902103,0 request:: null response:: null
> 24/04/30 12:52:03 [main-SendThread(10.161.171.36:2181)]: DEBUG
> zookeeper.ClientCnxn: An exception was thrown while closing send thread for
> session 0x300001b577d001d : Unable to read additional data from server
> sessionid 0x300001b577d001d, likely server has closed socket
> 24/04/30 12:52:03 [main]: DEBUG zookeeper.ClientCnxn: Disconnecting client
> for session: 0x300001b577d001d
> 24/04/30 12:52:03 [main]: INFO zookeeper.ZooKeeper: Session:
> 0x300001b577d001d closed
> 24/04/30 12:52:03 [main]: ERROR jdbc.Utils: Unable to read HiveServer2
> configs from ZooKeeper
> 24/04/30 12:52:03 [main-EventThread]: INFO zookeeper.ClientCnxn: EventThread
> shut down
> Unknown HS2 problem when communicating with Thrift server.
> Error: Could not open client transport for any of the Server URI's in
> ZooKeeper: Peer indicated failure: Unsupported mechanism type GSSAPI
> (state=08S01,code=0)
> beeline>
--
This message was sent by Atlassian Jira
(v8.20.10#820010)