Repository: drill Updated Branches: refs/heads/master 149928da5 -> a52889cdb
DRILL-3413: When SASL is enabled use DIGEST mechanism in creating HiveMetaStoreClient for proxy users. Project: http://git-wip-us.apache.org/repos/asf/drill/repo Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/a52889cd Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/a52889cd Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/a52889cd Branch: refs/heads/master Commit: a52889cdbfc210501fae0f92d3dcb095272cb4f5 Parents: 149928d Author: vkorukanti <[email protected]> Authored: Sun Jun 28 11:26:33 2015 -0700 Committer: vkorukanti <[email protected]> Committed: Sun Jun 28 18:12:48 2015 -0700 ---------------------------------------------------------------------- .../store/hive/DrillHiveMetaStoreClient.java | 43 ++++++++++++++++++-- .../store/hive/schema/HiveSchemaFactory.java | 22 +++++----- 2 files changed, 49 insertions(+), 16 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/drill/blob/a52889cd/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/DrillHiveMetaStoreClient.java ---------------------------------------------------------------------- diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/DrillHiveMetaStoreClient.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/DrillHiveMetaStoreClient.java index ef70b2e..aa1d074 100644 --- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/DrillHiveMetaStoreClient.java +++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/DrillHiveMetaStoreClient.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.UnknownTableException; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException; +import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.security.UserGroupInformation; import org.apache.thrift.TException; @@ -58,6 +59,8 @@ public abstract class DrillHiveMetaStoreClient extends HiveMetaStoreClient { * 1. Drill impersonation is enabled and * 2. either storage (in remote HiveMetaStore server) or SQL standard based authorization (in Hive storage plugin) * is enabled + * @param processUserMetaStoreClient MetaStoreClient of process user. Useful for generating the delegation tokens when + * SASL (KERBEROS or custom SASL implementations) is enabled. * @param hiveConf Conf including authorization configuration * @param hiveConfigOverride * @param userName User who is trying to access the Hive metadata @@ -67,10 +70,12 @@ public abstract class DrillHiveMetaStoreClient extends HiveMetaStoreClient { * @return * @throws MetaException */ - public static DrillHiveMetaStoreClient createClientWithAuthz(final HiveConf hiveConf, - final Map<String, String> hiveConfigOverride, final String userName, final boolean ignoreAuthzErrors) - throws MetaException { + public static DrillHiveMetaStoreClient createClientWithAuthz(final DrillHiveMetaStoreClient processUserMetaStoreClient, + final HiveConf hiveConf, final Map<String, String> hiveConfigOverride, final String userName, + final boolean ignoreAuthzErrors) throws MetaException { try { + boolean delegationTokenGenerated = false; + final UserGroupInformation ugiForRpc; // UGI credentials to use for RPC communication with Hive MetaStore server if (!hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS)) { // If the user impersonation is disabled in Hive storage plugin (not Drill impersonation), use the process @@ -78,11 +83,32 @@ public abstract class DrillHiveMetaStoreClient extends HiveMetaStoreClient { ugiForRpc = ImpersonationUtil.getProcessUserUGI(); } else { ugiForRpc = ImpersonationUtil.createProxyUgi(userName); + if (hiveConf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL)) { + // When SASL is enabled for proxy user create a delegation token. Currently HiveMetaStoreClient can create + // client transport for proxy users only when the authentication mechanims is DIGEST (through use of + // delegation tokens). + String delegationToken = processUserMetaStoreClient.getDelegationToken(userName, userName); + try { + ShimLoader.getHadoopShims().setTokenStr(ugiForRpc, delegationToken, HiveClientWithAuthz.DRILL2HMS_TOKEN); + } catch (IOException e) { + throw new DrillRuntimeException("Couldn't setup delegation token in the UGI for Hive MetaStoreClient", e); + } + delegationTokenGenerated = true; + } + } + + final HiveConf hiveConfForClient; + if (delegationTokenGenerated) { + hiveConfForClient = new HiveConf(hiveConf); + hiveConfForClient.set("hive.metastore.token.signature", HiveClientWithAuthz.DRILL2HMS_TOKEN); + } else { + hiveConfForClient = hiveConf; } + return ugiForRpc.doAs(new PrivilegedExceptionAction<DrillHiveMetaStoreClient>() { @Override public DrillHiveMetaStoreClient run() throws Exception { - return new HiveClientWithAuthz(hiveConf, hiveConfigOverride, ugiForRpc, userName, ignoreAuthzErrors); + return new HiveClientWithAuthz(hiveConfForClient, hiveConfigOverride, ugiForRpc, userName, ignoreAuthzErrors); } }); } catch (final Exception e) { @@ -197,6 +223,8 @@ public abstract class DrillHiveMetaStoreClient extends HiveMetaStoreClient { * credentials and check authorization privileges if set. */ private static class HiveClientWithAuthz extends DrillHiveMetaStoreClient { + public static final String DRILL2HMS_TOKEN = "DrillDelegationTokenForHiveMetaStoreServer"; + private final UserGroupInformation ugiForRpc; private final boolean ignoreAuthzErrors; private HiveAuthorizationHelper authorizer; @@ -322,6 +350,13 @@ public abstract class DrillHiveMetaStoreClient extends HiveMetaStoreClient { } @Override + public String getDelegationToken(String owner, String renewerKerberosPrincipalName) throws TException { + synchronized (this) { + return super.getDelegationToken(owner, renewerKerberosPrincipalName); + } + } + + @Override public void close() { // No-op. } http://git-wip-us.apache.org/repos/asf/drill/blob/a52889cd/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/HiveSchemaFactory.java ---------------------------------------------------------------------- diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/HiveSchemaFactory.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/HiveSchemaFactory.java index c8f2490..73e7bf7 100644 --- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/HiveSchemaFactory.java +++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/HiveSchemaFactory.java @@ -46,7 +46,8 @@ import com.google.common.collect.Sets; public class HiveSchemaFactory implements SchemaFactory { static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(HiveSchemaFactory.class); - private final DrillHiveMetaStoreClient globalMetastoreClient; + // MetaStoreClient created using process user credentials + private final DrillHiveMetaStoreClient processUserMetastoreClient; private final HiveStoragePlugin plugin; private final Map<String, String> hiveConfigOverride; private final String schemaName; @@ -72,14 +73,11 @@ public class HiveSchemaFactory implements SchemaFactory { isHS2DoAsSet = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS); isDrillImpersonationEnabled = plugin.getContext().getConfig().getBoolean(ExecConstants.IMPERSONATION_ENABLED); - if (!isDrillImpersonationEnabled) { - try { - globalMetastoreClient = DrillHiveMetaStoreClient.createNonCloseableClientWithCaching(hiveConf, hiveConfigOverride); - } catch (MetaException e) { - throw new ExecutionSetupException("Failure setting up Hive metastore client.", e); - } - } else { - globalMetastoreClient = null; + try { + processUserMetastoreClient = + DrillHiveMetaStoreClient.createNonCloseableClientWithCaching(hiveConf, hiveConfigOverride); + } catch (MetaException e) { + throw new ExecutionSetupException("Failure setting up Hive metastore client.", e); } } @@ -93,11 +91,11 @@ public class HiveSchemaFactory implements SchemaFactory { @Override public void registerSchemas(SchemaConfig schemaConfig, SchemaPlus parent) throws IOException { - DrillHiveMetaStoreClient mClientForSchemaTree = globalMetastoreClient; + DrillHiveMetaStoreClient mClientForSchemaTree = processUserMetastoreClient; if (isDrillImpersonationEnabled) { try { - mClientForSchemaTree = DrillHiveMetaStoreClient.createClientWithAuthz(hiveConf, hiveConfigOverride, - schemaConfig.getUserName(), schemaConfig.getIgnoreAuthErrors()); + mClientForSchemaTree = DrillHiveMetaStoreClient.createClientWithAuthz(processUserMetastoreClient, hiveConf, + hiveConfigOverride, schemaConfig.getUserName(), schemaConfig.getIgnoreAuthErrors()); } catch (final TException e) { throw new IOException("Failure setting up Hive metastore client.", e); }
