[ 
https://issues.apache.org/jira/browse/DRILL-5433?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15969055#comment-15969055
 ] 

Parag Darji commented on DRILL-5433:
------------------------------------

Here is the hive-site.xml file:
{code}
ambari.hive.db.schema.name =    hive
atlas.hook.hive.maxThreads =    1
atlas.hook.hive.minThreads =    1
datanucleus.autoCreateSchema =  FALSE
datanucleus.cache.level2.type =         none
datanucleus.fixedDatastore =    TRUE
hive.auto.convert.join =        TRUE
hive.auto.convert.join.noconditionaltask =      TRUE
hive.auto.convert.join.noconditionaltask.size =         572662306
hive.auto.convert.sortmerge.join =      TRUE
hive.auto.convert.sortmerge.join.to.mapjoin =   FALSE
hive.cbo.enable =       TRUE
hive.cli.print.header =         FALSE
hive.cluster.delegation.token.store.class =     
org.apache.hadoop.hive.thrift.ZooKeeperTokenStore
hive.cluster.delegation.token.store.zookeeper.connectString =   
host02.fqdn:2181,host01.fqdn:2181,host03.fqdn:2181
hive.cluster.delegation.token.store.zookeeper.znode =   /hive/cluster/delegation
hive.compactor.abortedtxn.threshold =   1000
hive.compactor.check.interval =         300L
hive.compactor.delta.num.threshold =    10
hive.compactor.delta.pct.threshold =    0.1f
hive.compactor.initiator.on =   FALSE
hive.compactor.worker.threads =         0
hive.compactor.worker.timeout =         86400L
hive.compute.query.using.stats =        TRUE
hive.conf.restricted.list =     
hive.security.authenticator.manager,hive.security.authorization.manager,hive.users.in.admin.role
hive.convert.join.bucket.mapjoin.tez =  FALSE
hive.default.fileformat =       TextFile
hive.default.fileformat.managed =       TextFile
hive.enforce.bucketing =        FALSE
hive.enforce.sorting =  TRUE
hive.enforce.sortmergebucketmapjoin =   TRUE
hive.exec.compress.intermediate =       FALSE
hive.exec.compress.output =     FALSE
hive.exec.dynamic.partition =   TRUE
hive.exec.dynamic.partition.mode =      strict
hive.exec.failure.hooks =       org.apache.hadoop.hive.ql.hooks.ATSHook
hive.exec.max.created.files =   100000
hive.exec.max.dynamic.partitions =      5000
hive.exec.max.dynamic.partitions.pernode =      2000
hive.exec.orc.compression.strategy =    SPEED
hive.exec.orc.default.compress =        ZLIB
hive.exec.orc.default.stripe.size =     67108864
hive.exec.orc.encoding.strategy =       SPEED
hive.exec.parallel =    FALSE
hive.exec.parallel.thread.number =      8
hive.exec.post.hooks =  org.apache.hadoop.hive.ql.hooks.ATSHook
hive.exec.pre.hooks =   org.apache.hadoop.hive.ql.hooks.ATSHook
hive.exec.reducers.bytes.per.reducer =  67108864
hive.exec.reducers.max =        1009
hive.exec.scratchdir =  /tmp/hive
hive.exec.submit.local.task.via.child =         TRUE
hive.exec.submitviachild =      FALSE
hive.execution.engine =         tez
hive.fetch.task.aggr =  FALSE
hive.fetch.task.conversion =    more
hive.fetch.task.conversion.threshold =  1073741824
hive.limit.optimize.enable =    TRUE
hive.limit.pushdown.memory.usage =      0.04
hive.map.aggr =         TRUE
hive.map.aggr.hash.force.flush.memory.threshold =       0.9
hive.map.aggr.hash.min.reduction =      0.5
hive.map.aggr.hash.percentmemory =      0.5
hive.mapjoin.bucket.cache.size =        10000
hive.mapjoin.optimized.hashtable =      TRUE
hive.mapred.reduce.tasks.speculative.execution =        FALSE
hive.merge.mapfiles =   TRUE
hive.merge.mapredfiles =        FALSE
hive.merge.orcfile.stripe.level =       TRUE
hive.merge.rcfile.block.level =         TRUE
hive.merge.size.per.task =      256000000
hive.merge.smallfiles.avgsize =         16000000
hive.merge.tezfiles =   FALSE
hive.metastore.authorization.storage.checks =   FALSE
hive.metastore.cache.pinobjtypes =      Table,Database,Type,FieldSchema,Order
hive.metastore.client.connect.retry.delay =     5s
hive.metastore.client.socket.timeout =  1800s
hive.metastore.connect.retries =        24
hive.metastore.execute.setugi =         TRUE
hive.metastore.failure.retries =        24
hive.metastore.kerberos.keytab.file =   
/etc/security/keytabs/hive.service.keytab
hive.metastore.kerberos.principal =     hive/[email protected]
hive.metastore.pre.event.listeners =    
org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener
hive.metastore.sasl.enabled =   TRUE
hive.metastore.server.max.threads =     100000
hive.metastore.uris =   thrift://host02.fqdn:9083
hive.metastore.warehouse.dir =  /apps/hive/warehouse
hive.optimize.bucketmapjoin =   TRUE
hive.optimize.bucketmapjoin.sortedmerge =       FALSE
hive.optimize.constant.propagation =    TRUE
hive.optimize.index.filter =    TRUE
hive.optimize.metadataonly =    TRUE
hive.optimize.null.scan =       TRUE
hive.optimize.reducededuplication =     TRUE
hive.optimize.reducededuplication.min.reducer =         4
hive.optimize.sort.dynamic.partition =  FALSE
hive.orc.compute.splits.num.threads =   10
hive.orc.splits.include.file.footer =   FALSE
hive.prewarm.enabled =  FALSE
hive.prewarm.numcontainers =    3
hive.security.authenticator.manager =   
org.apache.hadoop.hive.ql.security.ProxyUserAuthenticator
hive.security.authorization.enabled =   TRUE
hive.security.authorization.manager =   
org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdConfOnlyAuthorizerFactory
hive.security.metastore.authenticator.manager =         
org.apache.hadoop.hive.ql.security.HadoopDefaultMetastoreAuthenticator
hive.security.metastore.authorization.auth.reads =      TRUE
hive.security.metastore.authorization.manager =         
org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider
hive.server2.allow.user.substitution =  TRUE
hive.server2.authentication =   KERBEROS
hive.server2.authentication.kerberos.keytab =   
/etc/security/keytabs/hive.service.keytab
hive.server2.authentication.kerberos.principal =        hive/[email protected]
hive.server2.authentication.spnego.keytab =     
/etc/security/keytabs/spnego.service.keytab
hive.server2.authentication.spnego.principal =  HTTP/[email protected]
hive.server2.enable.doAs =      FALSE
hive.server2.logging.operation.enabled =        TRUE
hive.server2.logging.operation.log.location =   /tmp/hive/operation_logs
hive.server2.max.start.attempts =       5
hive.server2.support.dynamic.service.discovery =        TRUE
hive.server2.table.type.mapping =       CLASSIC
hive.server2.tez.default.queues =       default,llap
hive.server2.tez.initialize.default.sessions =  FALSE
hive.server2.tez.sessions.per.default.queue =   1
hive.server2.thrift.http.path =         cliservice
hive.server2.thrift.http.port =         10001
hive.server2.thrift.max.worker.threads =        500
hive.server2.thrift.port =      10010
hive.server2.thrift.sasl.qop =  auth
hive.server2.transport.mode =   binary
hive.server2.use.SSL =  FALSE
hive.server2.zookeeper.namespace =      hiveserver2
hive.smbjoin.cache.rows =       10000
hive.stats.autogather =         TRUE
hive.stats.dbclass =    fs
hive.stats.fetch.column.stats =         TRUE
hive.stats.fetch.partition.stats =      TRUE
hive.support.concurrency =      FALSE
hive.tez.auto.reducer.parallelism =     TRUE
hive.tez.container.size =       2048
hive.tez.cpu.vcores =   -1
hive.tez.dynamic.partition.pruning =    TRUE
hive.tez.dynamic.partition.pruning.max.data.size =      104857600
hive.tez.dynamic.partition.pruning.max.event.size =     1048576
hive.tez.input.format =         org.apache.hadoop.hive.ql.io.HiveInputFormat
hive.tez.java.opts =    -server -Xmx819m -Djava.net.preferIPv4Stack=true 
-XX:NewRatio=8 -XX:+UseNUMA -XX:+UseParallelGC -XX:+PrintGCDetails -verbose:gc 
-XX:+PrintGCTimeStamps
hive.tez.log.level =    INFO
hive.tez.max.partition.factor =         2
hive.tez.min.partition.factor =         0.25
hive.tez.smb.number.waves =     0.5
hive.txn.manager =      org.apache.hadoop.hive.ql.lockmgr.DummyTxnManager
hive.txn.max.open.batch =       1000
hive.txn.timeout =      300
hive.user.install.directory =   /user/
hive.vectorized.execution.enabled =     TRUE
hive.vectorized.execution.reduce.enabled =      FALSE
hive.vectorized.groupby.checkinterval =         4096
hive.vectorized.groupby.flush.percent =         0.1
hive.vectorized.groupby.maxentries =    100000
hive.zookeeper.client.port =    2181
hive.zookeeper.namespace =      hive_zookeeper_namespace
hive.zookeeper.quorum =         
host02.fqdn:2181,host01.fqdn:2181,host03.fqdn:2181
javax.jdo.option.ConnectionDriverName =         com.mysql.jdbc.Driver
javax.jdo.option.ConnectionURL =        
jdbc:mysql://host02.fqdn/hive?createDatabaseIfNotExist=true
javax.jdo.option.ConnectionUserName =   hive
{code}

> Authentication failed: Server requires authentication using [kerberos, plain]
> -----------------------------------------------------------------------------
>
>                 Key: DRILL-5433
>                 URL: https://issues.apache.org/jira/browse/DRILL-5433
>             Project: Apache Drill
>          Issue Type: Task
>          Components: Functions - Drill
>    Affects Versions: 1.10.0
>         Environment: OS: Redhat Linux 6.7, HDP 2.5.3, Kerberos enabled, 
> Hardware: VmWare
>            Reporter: Parag Darji
>            Priority: Minor
>              Labels: newbie, security
>             Fix For: 1.10.0
>
>   Original Estimate: 168h
>  Remaining Estimate: 168h
>
> I've setup Apace drill 1.10.0 on RHEL 6.7, HDP 2.5.3, kerberos enabled
> I'm getting below error while running "drill-conf" or sqlline as user "drill" 
> which is configured in the "drill-override.conf" file. 
> {code}
> drill@host:/opt/drill/bin>  drill-conf
> Error: Failure in connecting to Drill: 
> org.apache.drill.exec.rpc.NonTransientRpcException: 
> javax.security.sasl.SaslException: Authentication failed: Server requires 
> authentication using [kerberos, plain]. Insufficient credentials? [Caused by 
> javax.security.sasl.SaslException: Server requires authentication using 
> [kerberos, plain]. Insufficient credentials?] (state=,code=0)
> java.sql.SQLException: Failure in connecting to Drill: 
> org.apache.drill.exec.rpc.NonTransientRpcException: 
> javax.security.sasl.SaslException: Authentication failed: Server requires 
> authentication using [kerberos, plain]. Insufficient credentials? [Caused by 
> javax.security.sasl.SaslException: Server requires authentication using 
> [kerberos, plain]. Insufficient credentials?]
>         at 
> org.apache.drill.jdbc.impl.DrillConnectionImpl.<init>(DrillConnectionImpl.java:166)
>         at 
> org.apache.drill.jdbc.impl.DrillJdbc41Factory.newDrillConnection(DrillJdbc41Factory.java:72)
>         at 
> org.apache.drill.jdbc.impl.DrillFactory.newConnection(DrillFactory.java:69)
>         at 
> org.apache.calcite.avatica.UnregisteredDriver.connect(UnregisteredDriver.java:143)
>         at org.apache.drill.jdbc.Driver.connect(Driver.java:72)
>         at sqlline.DatabaseConnection.connect(DatabaseConnection.java:167)
>         at 
> sqlline.DatabaseConnection.getConnection(DatabaseConnection.java:213)
>         at sqlline.Commands.connect(Commands.java:1083)
>         at sqlline.Commands.connect(Commands.java:1015)
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>         at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>         at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:606)
>         at 
> sqlline.ReflectiveCommandHandler.execute(ReflectiveCommandHandler.java:36)
>         at sqlline.SqlLine.dispatch(SqlLine.java:742)
>         at sqlline.SqlLine.initArgs(SqlLine.java:528)
>         at sqlline.SqlLine.begin(SqlLine.java:596)
>         at sqlline.SqlLine.start(SqlLine.java:375)
>         at sqlline.SqlLine.main(SqlLine.java:268)
> Caused by: org.apache.drill.exec.rpc.NonTransientRpcException: 
> javax.security.sasl.SaslException: Authentication failed: Server requires 
> authentication using [kerberos, plain]. Insufficient credentials? [Caused by 
> javax.security.sasl.SaslException: Server requires authentication using 
> [kerberos, plain]. Insufficient credentials?]
>         at 
> org.apache.drill.exec.rpc.user.UserClient.connect(UserClient.java:157)
>         at 
> org.apache.drill.exec.client.DrillClient.connect(DrillClient.java:432)
>         at 
> org.apache.drill.exec.client.DrillClient.connect(DrillClient.java:379)
>         at 
> org.apache.drill.jdbc.impl.DrillConnectionImpl.<init>(DrillConnectionImpl.java:157)
>         ... 18 more
> Caused by: javax.security.sasl.SaslException: Authentication failed: Server 
> requires authentication using [kerberos, plain]. Insufficient credentials? 
> [Caused by javax.security.sasl.SaslException: Server requires authentication 
> using [kerberos, plain]. Insufficient credentials?]
>         at 
> org.apache.drill.exec.rpc.user.UserClient$3.mapException(UserClient.java:204)
>         at 
> org.apache.drill.exec.rpc.user.UserClient$3.mapException(UserClient.java:197)
>         at 
> com.google.common.util.concurrent.AbstractCheckedFuture.checkedGet(AbstractCheckedFuture.java:85)
>         at 
> org.apache.drill.exec.rpc.user.UserClient.connect(UserClient.java:155)
>         ... 21 more
> Caused by: javax.security.sasl.SaslException: Server requires authentication 
> using [kerberos, plain]. Insufficient credentials?
>         at 
> org.apache.drill.exec.rpc.user.UserClient.getAuthenticatorFactory(UserClient.java:285)
>         at 
> org.apache.drill.exec.rpc.user.UserClient.authenticate(UserClient.java:216)
>         ... 22 more
> apache drill 1.10.0
> "this isn't your grandfather's sql"
> {code}
> Same error when running below command:
> {code}
> sqlline --maxWidth=10000 -u 
> "jdbc:drill:drillbit=host1.fqdn;auth=kerberos;principal=drill/[email protected]"
> {code}
> "Drill" user has has valid keytab/ticket.
> The Drill UI is working fine with local authentication.
> drill-override.conf file:
> {code}
> drill.exec: {
>   cluster-id: "drillbits1",
>   zk.connect: "host1.fqdn:2181,host2.fqdn:2181,host3.fqdn:2181",
>   security: {
>           user.auth.enabled: true,
>           user.auth.impl: "pam",
>           user.auth.pam_profiles: [ "sudo", "login" ],
>           packages += "org.apache.drill.exec.rpc.user.security",
>           auth.mechanisms: ["KERBEROS","PLAIN"],
>           auth.principal: "drill/[email protected]",
>           auth.keytab: "/opt/drill/.keytab/drill.keytab"
>         }
> }
> {code}
> {code}
> cat drill-env.sh | egrep -v '^#|^$'
> export DRILLBIT_JAVA_OPTS="-Djava.library.path=/opt/pam/JPam-1.1/"
> {code}



--
This message was sent by Atlassian JIRA
(v6.3.15#6346)

Reply via email to