SENTRY-498: Sentry integration with Hive authorization framework V2 (Dapeng Sun, reviewed by Colin Ma)
Project: http://git-wip-us.apache.org/repos/asf/incubator-sentry/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-sentry/commit/5e58f3fe Tree: http://git-wip-us.apache.org/repos/asf/incubator-sentry/tree/5e58f3fe Diff: http://git-wip-us.apache.org/repos/asf/incubator-sentry/diff/5e58f3fe Branch: refs/heads/master Commit: 5e58f3fe6aab426e58085bb93fcf6efc9380bd5f Parents: b2d71a8 Author: Sun Dapeng <s...@apache.org> Authored: Thu Nov 26 10:21:02 2015 +0800 Committer: Sun Dapeng <s...@apache.org> Committed: Thu Nov 26 10:32:57 2015 +0800 ---------------------------------------------------------------------- pom.xml | 6 + sentry-binding/pom.xml | 1 + sentry-binding/sentry-binding-hive-v2/pom.xml | 158 ++++++ .../binding/hive/v2/HiveAuthzBindingHookV2.java | 94 ++++ .../hive/v2/HiveAuthzBindingSessionHookV2.java | 107 ++++ .../hive/v2/SentryAuthorizerFactory.java | 164 ++++++ ...entryHiveAuthorizationTaskFactoryImplV2.java | 64 +++ .../hive/v2/SentryHivePrivilegeObject.java | 32 ++ .../DefaultSentryAccessController.java | 558 +++++++++++++++++++ .../v2/authorizer/DefaultSentryValidator.java | 481 ++++++++++++++++ .../authorizer/SentryHiveAccessController.java | 200 +++++++ .../SentryHiveAuthorizationValidator.java | 58 ++ .../v2/authorizer/SentryHiveAuthorizer.java | 195 +++++++ .../v2/metastore/AuthorizingObjectStoreV2.java | 413 ++++++++++++++ .../v2/metastore/MetastoreAuthzBindingV2.java | 54 ++ .../SentryMetastorePostEventListenerV2.java | 73 +++ .../hive/v2/util/SentryAuthorizerUtil.java | 362 ++++++++++++ .../hive/v2/util/SimpleSemanticAnalyzer.java | 369 ++++++++++++ .../v2/DummyHiveAuthenticationProvider.java | 63 +++ .../ql/exec/SentryHivePrivilegeObjectDesc.java | 4 + .../binding/hive/authz/SentryConfigTool.java | 5 +- .../metastore/AuthorizingObjectStore.java | 4 +- .../metastore/MetastoreAuthzBinding.java | 6 +- .../metastore/SentryMetaStoreFilterHook.java | 2 +- .../SentryMetastorePostEventListener.java | 2 +- .../org/apache/sentry/core/model/db/Column.java | 2 + 26 files changed, 3468 insertions(+), 9 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/pom.xml ---------------------------------------------------------------------- diff --git a/pom.xml b/pom.xml index bf3a94d..a369621 100644 --- a/pom.xml +++ b/pom.xml @@ -69,6 +69,7 @@ limitations under the License. <derby.version>10.10.2.0</derby.version> <commons-cli.version>1.2</commons-cli.version> <hive.version>1.1.0</hive.version> + <hive-v2.version>1.3.0-SNAPSHOT</hive-v2.version> <hadoop.version>2.6.0</hadoop.version> <fest.reflect.version>1.4.1</fest.reflect.version> <guava.version>11.0.2</guava.version> @@ -382,6 +383,11 @@ limitations under the License. </dependency> <dependency> <groupId>org.apache.sentry</groupId> + <artifactId>sentry-binding-hive-v2</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>org.apache.sentry</groupId> <artifactId>sentry-binding-solr</artifactId> <version>${project.version}</version> </dependency> http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/pom.xml ---------------------------------------------------------------------- diff --git a/sentry-binding/pom.xml b/sentry-binding/pom.xml index 15a962f..4283edb 100644 --- a/sentry-binding/pom.xml +++ b/sentry-binding/pom.xml @@ -31,6 +31,7 @@ limitations under the License. <modules> <module>sentry-binding-hive</module> + <module>sentry-binding-hive-v2</module> <module>sentry-binding-solr</module> <module>sentry-binding-sqoop</module> </modules> http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive-v2/pom.xml ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive-v2/pom.xml b/sentry-binding/sentry-binding-hive-v2/pom.xml new file mode 100644 index 0000000..ef6048c --- /dev/null +++ b/sentry-binding/sentry-binding-hive-v2/pom.xml @@ -0,0 +1,158 @@ +<?xml version="1.0"?> +<!-- +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +--> +<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> + <modelVersion>4.0.0</modelVersion> + + <parent> + <groupId>org.apache.sentry</groupId> + <artifactId>sentry-binding</artifactId> + <version>1.7.0-incubating-SNAPSHOT</version> + </parent> + + <artifactId>sentry-binding-hive-v2</artifactId> + <name>Sentry Binding v2 for Hive</name> + + <dependencies> + <dependency> + <groupId>org.apache.sentry</groupId> + <artifactId>sentry-binding-hive</artifactId> + <exclusions> + <exclusion> + <groupId>org.apache.httpcomponents</groupId> + <artifactId>httpclient</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.httpcomponents</groupId> + <artifactId>httpcore</artifactId> + </exclusion> + </exclusions> + </dependency> + <dependency> + <groupId>org.apache.thrift</groupId> + <artifactId>libthrift</artifactId> + <exclusions> + <exclusion> + <groupId>org.apache.httpcomponents</groupId> + <artifactId>httpclient</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.httpcomponents</groupId> + <artifactId>httpcore</artifactId> + </exclusion> + </exclusions> + </dependency> + <dependency> + <groupId>org.apache.derby</groupId> + <artifactId>derby</artifactId> + </dependency> + <dependency> + <groupId>junit</groupId> + <artifactId>junit</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-exec</artifactId> + <version>${hive-v2.version}</version> + <scope>provided</scope> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-service</artifactId> + <version>${hive-v2.version}</version> + <scope>provided</scope> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-metastore</artifactId> + <version>${hive-v2.version}</version> + <scope>provided</scope> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-shims</artifactId> + <version>${hive-v2.version}</version> + <scope>provided</scope> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-serde</artifactId> + <version>${hive-v2.version}</version> + <scope>provided</scope> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-common</artifactId> + <version>${hive-v2.version}</version> + <scope>provided</scope> + </dependency> + <dependency> + <groupId>org.apache.sentry</groupId> + <artifactId>sentry-core-common</artifactId> + </dependency> + <dependency> + <groupId>org.apache.sentry</groupId> + <artifactId>sentry-core-model-db</artifactId> + </dependency> + <dependency> + <groupId>org.apache.sentry</groupId> + <artifactId>sentry-provider-common</artifactId> + </dependency> + <!-- required for SentryGrantRevokeTask --> + <dependency> + <groupId>org.apache.sentry</groupId> + <artifactId>sentry-provider-db</artifactId> + <exclusions> + <exclusion> + <groupId>org.apache.hive</groupId> + <artifactId>hive-beeline</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.hive</groupId> + <artifactId>hive-metastore</artifactId> + </exclusion> + </exclusions> + </dependency> + <dependency> + <groupId>org.apache.sentry</groupId> + <artifactId>sentry-provider-file</artifactId> + </dependency> + <dependency> + <groupId>org.apache.sentry</groupId> + <artifactId>sentry-policy-db</artifactId> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <scope>provided</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-client</artifactId> + <version>${hadoop.version}</version> + <scope>provided</scope> + </dependency> + <dependency> + <groupId>org.mockito</groupId> + <artifactId>mockito-all</artifactId> + <scope>test</scope> + </dependency> + </dependencies> + +</project> http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingHookV2.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingHookV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingHookV2.java new file mode 100644 index 0000000..67cf266 --- /dev/null +++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingHookV2.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.sentry.binding.hive.v2; + +import java.io.Serializable; +import java.util.List; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.DDLTask; +import org.apache.hadoop.hive.ql.exec.SentryFilterDDLTask; +import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook; +import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.DDLWork; +import org.apache.hadoop.hive.ql.plan.HiveOperation; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.sentry.binding.hive.HiveAuthzBindingHook; +import org.apache.sentry.binding.hive.authz.HiveAuthzBinding; +import org.apache.sentry.binding.hive.conf.HiveAuthzConf; +import org.apache.sentry.core.common.Subject; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class HiveAuthzBindingHookV2 extends AbstractSemanticAnalyzerHook { + private static final Logger LOG = LoggerFactory + .getLogger(HiveAuthzBindingHookV2.class); + private final HiveAuthzBinding hiveAuthzBinding; + private final HiveAuthzConf authzConf; + + public HiveAuthzBindingHookV2() throws Exception { + SessionState session = SessionState.get(); + if(session == null) { + throw new IllegalStateException("Session has not been started"); + } + + HiveConf hiveConf = session.getConf(); + if(hiveConf == null) { + throw new IllegalStateException("Session HiveConf is null"); + } + authzConf = HiveAuthzBindingHook.loadAuthzConf(hiveConf); + hiveAuthzBinding = new HiveAuthzBinding(hiveConf, authzConf); + } + + @Override + public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) + throws SemanticException { + return ast; + } + + /** + * Post analyze hook that invokes hive auth bindings + */ + @Override + public void postAnalyze(HiveSemanticAnalyzerHookContext context, + List<Task<? extends Serializable>> rootTasks) throws SemanticException { + HiveOperation stmtOperation = getCurrentHiveStmtOp(); + Subject subject = new Subject(context.getUserName()); + for (int i = 0; i < rootTasks.size(); i++) { + Task<? extends Serializable> task = rootTasks.get(i); + if (task instanceof DDLTask) { + SentryFilterDDLTask filterTask = + new SentryFilterDDLTask(hiveAuthzBinding, subject, stmtOperation); + filterTask.setWork((DDLWork)task.getWork()); + rootTasks.set(i, filterTask); + } + } + } + + private HiveOperation getCurrentHiveStmtOp() { + SessionState sessState = SessionState.get(); + if (sessState == null) { + LOG.warn("SessionState is null"); + return null; + } + return sessState.getHiveOperation(); + } + +} http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingSessionHookV2.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingSessionHookV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingSessionHookV2.java new file mode 100644 index 0000000..3fbb626 --- /dev/null +++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingSessionHookV2.java @@ -0,0 +1,107 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.sentry.binding.hive.v2; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.mapreduce.JobContext; +import org.apache.hive.service.cli.HiveSQLException; +import org.apache.hive.service.cli.session.HiveSessionHookContext; +import org.apache.sentry.binding.hive.HiveAuthzBindingHook; +import org.apache.sentry.binding.hive.conf.HiveAuthzConf; + +import com.google.common.base.Joiner; + +/** + * The session hook is the Session Hook for HiveAuthzBindingSessionHookV2, The configuration of + * session will update for Hive Authz v2. + */ +public class HiveAuthzBindingSessionHookV2 implements + org.apache.hive.service.cli.session.HiveSessionHook { + public static final String SCRATCH_DIR_PERMISSIONS = "700"; + public static final String SEMANTIC_HOOK = HiveAuthzBindingHookV2.class.getName(); + public static final String ACCESS_RESTRICT_LIST = Joiner.on(",").join( + ConfVars.SEMANTIC_ANALYZER_HOOK.varname, ConfVars.PREEXECHOOKS.varname, + ConfVars.SCRATCHDIR.varname, ConfVars.LOCALSCRATCHDIR.varname, + ConfVars.METASTOREURIS.varname, ConfVars.METASTORECONNECTURLKEY.varname, + ConfVars.HADOOPBIN.varname, ConfVars.HIVESESSIONID.varname, ConfVars.HIVEAUXJARS.varname, + ConfVars.HIVESTATSDBCONNECTIONSTRING.varname, ConfVars.SCRATCHDIRPERMISSION.varname, + ConfVars.HIVE_SECURITY_COMMAND_WHITELIST.varname, + ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY.varname, + ConfVars.HIVE_CAPTURE_TRANSFORM_ENTITY.varname, HiveAuthzConf.HIVE_ACCESS_CONF_URL, + HiveAuthzConf.HIVE_SENTRY_CONF_URL, HiveAuthzConf.HIVE_ACCESS_SUBJECT_NAME, + HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME, HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET); + + /** + * The session hook for sentry authorization that sets the required session level configuration 1. + * Setup the sentry hooks - semantic, exec and filter hooks 2. Set additional config properties + * required for auth set HIVE_EXTENDED_ENITITY_CAPTURE = true set SCRATCHDIRPERMISSION = 700 3. + * Add sensitive config parameters to the config restrict list so that they can't be overridden by + * users + */ + @Override + public void run(HiveSessionHookContext sessionHookContext) throws HiveSQLException { + // Add sentry hooks to the session configuration + HiveConf sessionConf = sessionHookContext.getSessionConf(); + + appendConfVar(sessionConf, ConfVars.SEMANTIC_ANALYZER_HOOK.varname, SEMANTIC_HOOK); + // enable sentry authorization V2 + sessionConf.setBoolean(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED.varname, true); + sessionConf.setBoolean(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, false); + sessionConf.set(HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER.varname, + "org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator"); + + // grant all privileges for table to its owner + sessionConf.setVar(ConfVars.HIVE_AUTHORIZATION_TABLE_OWNER_GRANTS, ""); + + // Enable compiler to capture transform URI referred in the query + sessionConf.setBoolVar(ConfVars.HIVE_CAPTURE_TRANSFORM_ENTITY, true); + + // set security command list + HiveAuthzConf authzConf = HiveAuthzBindingHook.loadAuthzConf(sessionConf); + String commandWhitelist = + authzConf.get(HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST, + HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST_DEFAULT); + sessionConf.setVar(ConfVars.HIVE_SECURITY_COMMAND_WHITELIST, commandWhitelist); + + // set additional configuration properties required for auth + sessionConf.setVar(ConfVars.SCRATCHDIRPERMISSION, SCRATCH_DIR_PERMISSIONS); + + // setup restrict list + sessionConf.addToRestrictList(ACCESS_RESTRICT_LIST); + + // set user name + sessionConf.set(HiveAuthzConf.HIVE_ACCESS_SUBJECT_NAME, sessionHookContext.getSessionUser()); + sessionConf.set(HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME, sessionHookContext.getSessionUser()); + + // Set MR ACLs to session user + appendConfVar(sessionConf, JobContext.JOB_ACL_VIEW_JOB, sessionHookContext.getSessionUser()); + appendConfVar(sessionConf, JobContext.JOB_ACL_MODIFY_JOB, sessionHookContext.getSessionUser()); + } + + // Setup given sentry hooks + private void appendConfVar(HiveConf sessionConf, String confVar, String sentryConfVal) { + String currentValue = sessionConf.get(confVar, "").trim(); + if (currentValue.isEmpty()) { + currentValue = sentryConfVal; + } else { + currentValue = sentryConfVal + "," + currentValue; + } + sessionConf.set(confVar, currentValue); + } + +} http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryAuthorizerFactory.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryAuthorizerFactory.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryAuthorizerFactory.java new file mode 100644 index 0000000..4a5cbcf --- /dev/null +++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryAuthorizerFactory.java @@ -0,0 +1,164 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.sentry.binding.hive.v2; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext.CLIENT_TYPE; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory; +import org.apache.sentry.binding.hive.HiveAuthzBindingHook; +import org.apache.sentry.binding.hive.conf.HiveAuthzConf; +import org.apache.sentry.binding.hive.v2.authorizer.DefaultSentryAccessController; +import org.apache.sentry.binding.hive.v2.authorizer.DefaultSentryValidator; +import org.apache.sentry.binding.hive.v2.authorizer.SentryHiveAccessController; +import org.apache.sentry.binding.hive.v2.authorizer.SentryHiveAuthorizationValidator; +import org.apache.sentry.binding.hive.v2.authorizer.SentryHiveAuthorizer; + +import com.google.common.annotations.VisibleForTesting; + +public class SentryAuthorizerFactory implements HiveAuthorizerFactory { + public static final String HIVE_SENTRY_ACCESS_CONTROLLER = + "hive.security.sentry.access.controller"; + public static final String HIVE_SENTRY_AUTHORIZATION_CONTROLLER = + "hive.security.sentry.authorization.controller"; + private HiveAuthzConf authzConf; + + @Override + public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory, + HiveConf conf, HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) + throws HiveAuthzPluginException { + HiveAuthzSessionContext sessionContext; + try { + this.authzConf = HiveAuthzBindingHook.loadAuthzConf(conf); + sessionContext = applyTestSettings(ctx, conf); + assertHiveCliAuthDisabled(conf, sessionContext); + } catch (Exception e) { + throw new HiveAuthzPluginException(e); + } + SentryHiveAccessController accessController = + getAccessController(conf, authzConf, authenticator, sessionContext); + SentryHiveAuthorizationValidator authzValidator = + getAuthzValidator(conf, authzConf, authenticator); + + return new SentryHiveAuthorizer(accessController, authzValidator); + } + + private HiveAuthzSessionContext applyTestSettings(HiveAuthzSessionContext ctx, HiveConf conf) { + if (conf.getBoolVar(ConfVars.HIVE_TEST_AUTHORIZATION_SQLSTD_HS2_MODE) + && ctx.getClientType() == CLIENT_TYPE.HIVECLI) { + // create new session ctx object with HS2 as client type + HiveAuthzSessionContext.Builder ctxBuilder = new HiveAuthzSessionContext.Builder(ctx); + ctxBuilder.setClientType(CLIENT_TYPE.HIVESERVER2); + return ctxBuilder.build(); + } + return ctx; + } + + private void assertHiveCliAuthDisabled(HiveConf conf, HiveAuthzSessionContext ctx) + throws HiveAuthzPluginException { + if (ctx.getClientType() == CLIENT_TYPE.HIVECLI + && conf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED)) { + throw new HiveAuthzPluginException( + "SQL standards based authorization should not be enabled from hive cli" + + "Instead the use of storage based authorization in hive metastore is reccomended. Set " + + ConfVars.HIVE_AUTHORIZATION_ENABLED.varname + "=false to disable authz within cli"); + } + } + + /** + * just for testing + */ + @VisibleForTesting + protected HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory, + HiveConf conf, HiveAuthzConf authzConf, HiveAuthenticationProvider authenticator, + HiveAuthzSessionContext ctx) throws HiveAuthzPluginException { + SentryHiveAccessController accessController = + getAccessController(conf, authzConf, authenticator, ctx); + SentryHiveAuthorizationValidator authzValidator = + getAuthzValidator(conf, authzConf, authenticator); + + return new SentryHiveAuthorizer(accessController, authzValidator); + } + + /** + * Get instance of SentryAccessController from configuration + * Default return DefaultSentryAccessController + * + * @param conf + * @param authzConf + * @param hiveAuthzBinding + * @param authenticator + * @throws HiveAuthzPluginException + */ + public static SentryHiveAccessController getAccessController(HiveConf conf, + HiveAuthzConf authzConf, HiveAuthenticationProvider authenticator, + HiveAuthzSessionContext ctx) throws HiveAuthzPluginException { + Class<? extends SentryHiveAccessController> clazz = + conf.getClass(HIVE_SENTRY_ACCESS_CONTROLLER, DefaultSentryAccessController.class, + SentryHiveAccessController.class); + + if (clazz == null) { + // should not happen as default value is set + throw new HiveAuthzPluginException("Configuration value " + HIVE_SENTRY_ACCESS_CONTROLLER + + " is not set to valid SentryAccessController subclass"); + } + + try { + return new DefaultSentryAccessController(conf, authzConf, authenticator, ctx); + } catch (Exception e) { + throw new HiveAuthzPluginException(e); + } + + } + + /** + * Get instance of SentryAuthorizationValidator from configuration + * Default return DefaultSentryAuthorizationValidator + * + * @param conf + * @param authzConf + * @param authenticator + * @throws HiveAuthzPluginException + */ + public static SentryHiveAuthorizationValidator getAuthzValidator(HiveConf conf, + HiveAuthzConf authzConf, HiveAuthenticationProvider authenticator) + throws HiveAuthzPluginException { + Class<? extends SentryHiveAuthorizationValidator> clazz = + conf.getClass(HIVE_SENTRY_AUTHORIZATION_CONTROLLER, DefaultSentryValidator.class, + SentryHiveAuthorizationValidator.class); + + if (clazz == null) { + // should not happen as default value is set + throw new HiveAuthzPluginException("Configuration value " + + HIVE_SENTRY_AUTHORIZATION_CONTROLLER + + " is not set to valid SentryAuthorizationValidator subclass"); + } + + try { + return new DefaultSentryValidator(conf, authzConf, authenticator); + } catch (Exception e) { + throw new HiveAuthzPluginException(e); + } + + } +} http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHiveAuthorizationTaskFactoryImplV2.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHiveAuthorizationTaskFactoryImplV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHiveAuthorizationTaskFactoryImplV2.java new file mode 100644 index 0000000..2d4bf64 --- /dev/null +++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHiveAuthorizationTaskFactoryImplV2.java @@ -0,0 +1,64 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.sentry.binding.hive.v2; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.SentryHivePrivilegeObjectDesc; +import org.apache.hadoop.hive.ql.metadata.Hive; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.parse.authorization.HiveAuthorizationTaskFactoryImpl; +import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc; + +public class SentryHiveAuthorizationTaskFactoryImplV2 extends HiveAuthorizationTaskFactoryImpl { + + public SentryHiveAuthorizationTaskFactoryImplV2(HiveConf conf, Hive db) { + super(conf, db); + } + + @Override + protected PrivilegeObjectDesc parsePrivObject(ASTNode ast) throws SemanticException { + SentryHivePrivilegeObjectDesc subject = new SentryHivePrivilegeObjectDesc(); + ASTNode child = (ASTNode) ast.getChild(0); + ASTNode gchild = (ASTNode) child.getChild(0); + if (child.getType() == HiveParser.TOK_TABLE_TYPE) { + subject.setTable(true); + String[] qualified = BaseSemanticAnalyzer.getQualifiedTableName(gchild); + subject.setObject(BaseSemanticAnalyzer.getDotName(qualified)); + } else if (child.getType() == HiveParser.TOK_URI_TYPE) { + subject.setUri(true); + subject.setObject(gchild.getText()); + } else if (child.getType() == HiveParser.TOK_SERVER_TYPE) { + subject.setServer(true); + subject.setObject(gchild.getText()); + } else { + subject.setTable(false); + subject.setObject(BaseSemanticAnalyzer.unescapeIdentifier(gchild.getText())); + } + // if partition spec node is present, set partition spec + for (int i = 1; i < child.getChildCount(); i++) { + gchild = (ASTNode) child.getChild(i); + if (gchild.getType() == HiveParser.TOK_PARTSPEC) { + subject.setPartSpec(DDLSemanticAnalyzer.getPartSpec(gchild)); + } else if (gchild.getType() == HiveParser.TOK_TABCOLNAME) { + subject.setColumns(BaseSemanticAnalyzer.getColumnNames(gchild)); + } + } + return subject; + } +} http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHivePrivilegeObject.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHivePrivilegeObject.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHivePrivilegeObject.java new file mode 100644 index 0000000..6277385 --- /dev/null +++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHivePrivilegeObject.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +package org.apache.sentry.binding.hive.v2; + +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; + +public class SentryHivePrivilegeObject extends HivePrivilegeObject { + + boolean isServer = false; + + boolean isUri = false; + + String objectName = ""; + + public SentryHivePrivilegeObject(HivePrivilegeObjectType type, String objectName) { + super(type, null, objectName); + } + +} http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryAccessController.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryAccessController.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryAccessController.java new file mode 100644 index 0000000..9e72b78 --- /dev/null +++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryAccessController.java @@ -0,0 +1,558 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.sentry.binding.hive.v2.authorizer; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.apache.hadoop.hive.SentryHiveConstants; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.metadata.AuthorizationException; +import org.apache.hadoop.hive.ql.plan.HiveOperation; +import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext.CLIENT_TYPE; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.sentry.SentryUserException; +import org.apache.sentry.binding.hive.SentryOnFailureHookContext; +import org.apache.sentry.binding.hive.SentryOnFailureHookContextImpl; +import org.apache.sentry.binding.hive.authz.HiveAuthzBinding; +import org.apache.sentry.binding.hive.authz.HiveAuthzBinding.HiveHook; +import org.apache.sentry.binding.hive.conf.HiveAuthzConf; +import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars; +import org.apache.sentry.binding.hive.v2.util.SentryAuthorizerUtil; +import org.apache.sentry.core.common.ActiveRoleSet; +import org.apache.sentry.core.common.Authorizable; +import org.apache.sentry.core.model.db.AccessConstants; +import org.apache.sentry.core.model.db.DBModelAuthorizable; +import org.apache.sentry.core.model.db.Server; +import org.apache.sentry.provider.db.SentryAccessDeniedException; +import org.apache.sentry.provider.db.service.thrift.SentryPolicyServiceClient; +import org.apache.sentry.provider.db.service.thrift.TSentryPrivilege; +import org.apache.sentry.provider.db.service.thrift.TSentryRole; +import org.apache.sentry.service.thrift.SentryServiceClientFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Preconditions; +import com.google.common.collect.Sets; + +public class DefaultSentryAccessController extends SentryHiveAccessController { + + public static final Logger LOG = LoggerFactory.getLogger(DefaultSentryAccessController.class); + + public static final String REQUIRED_AUTHZ_SERVER_NAME = "Config " + + AuthzConfVars.AUTHZ_SERVER_NAME.getVar() + " is required"; + + private HiveAuthenticationProvider authenticator; + private String serverName; + private HiveConf conf; + private HiveAuthzConf authzConf; + private HiveAuthzSessionContext ctx; + + private HiveHook hiveHook; + private HiveAuthzBinding hiveAuthzBinding; + protected SentryPolicyServiceClient sentryClient; + + + public DefaultSentryAccessController(HiveConf conf, HiveAuthzConf authzConf, + HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) throws Exception { + initilize(conf, authzConf, authenticator, ctx); + this.hiveHook = HiveHook.HiveServer2; + } + + public DefaultSentryAccessController(HiveHook hiveHook, HiveConf conf, HiveAuthzConf authzConf, + HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) throws Exception { + initilize(conf, authzConf, authenticator, ctx); + this.hiveHook = hiveHook; + } + + /** + * initialize authenticator and hiveAuthzBinding. + */ + protected void initilize(HiveConf conf, HiveAuthzConf authzConf, + HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) throws Exception { + Preconditions.checkNotNull(conf, "HiveConf cannot be null"); + Preconditions.checkNotNull(authzConf, "HiveAuthzConf cannot be null"); + Preconditions.checkNotNull(authenticator, "Hive authenticator provider cannot be null"); + Preconditions.checkNotNull(ctx, "HiveAuthzSessionContext cannot be null"); + + this.conf = conf; + this.authzConf = authzConf; + this.authenticator = authenticator; + this.ctx = ctx; + this.serverName = + Preconditions.checkNotNull(authzConf.get(AuthzConfVars.AUTHZ_SERVER_NAME.getVar()), + REQUIRED_AUTHZ_SERVER_NAME); + } + + @Override + public void createRole(String roleName, HivePrincipal adminGrantor) + throws HiveAuthzPluginException, HiveAccessControlException { + if (AccessConstants.RESERVED_ROLE_NAMES.contains(roleName.toUpperCase())) { + String msg = + "Roles cannot be one of the reserved roles: " + AccessConstants.RESERVED_ROLE_NAMES; + throw new HiveAccessControlException(msg); + } + try { + sentryClient = getSentryClient(); + sentryClient.createRole(authenticator.getUserName(), roleName); + } catch (SentryAccessDeniedException e) { + HiveOperation hiveOp = HiveOperation.CREATEROLE; + executeOnFailureHooks(hiveOp, e); + } catch (SentryUserException e) { + String msg = "Error occurred when Sentry client creating role: " + e.getMessage(); + executeOnErrorHooks(msg, e); + } finally { + if (sentryClient != null) { + sentryClient.close(); + } + } + } + + @Override + public void dropRole(String roleName) throws HiveAuthzPluginException, HiveAccessControlException { + if (AccessConstants.RESERVED_ROLE_NAMES.contains(roleName.toUpperCase())) { + String msg = + "Roles cannot be one of the reserved roles: " + AccessConstants.RESERVED_ROLE_NAMES; + throw new HiveAccessControlException(msg); + } + try { + sentryClient = getSentryClient(); + sentryClient.dropRole(authenticator.getUserName(), roleName); + } catch (SentryAccessDeniedException e) { + HiveOperation hiveOp = HiveOperation.DROPROLE; + executeOnFailureHooks(hiveOp, e); + } catch (SentryUserException e) { + String msg = "Error occurred when Sentry client creating role: " + e.getMessage(); + executeOnErrorHooks(msg, e); + } finally { + if (sentryClient != null) { + sentryClient.close(); + } + } + } + + @Override + public List<String> getAllRoles() throws HiveAccessControlException, HiveAuthzPluginException { + List<String> roles = new ArrayList<String>(); + try { + sentryClient = getSentryClient(); + roles = convert2RoleList(sentryClient.listRoles(authenticator.getUserName())); + } catch (SentryAccessDeniedException e) { + HiveOperation hiveOp = HiveOperation.SHOW_ROLES; + executeOnFailureHooks(hiveOp, e); + } catch (SentryUserException e) { + String msg = "Error when sentryClient listRoles: " + e.getMessage(); + executeOnErrorHooks(msg, e); + } finally { + if (sentryClient != null) { + sentryClient.close(); + } + } + return roles; + } + + @Override + public void grantPrivileges(List<HivePrincipal> hivePrincipals, + List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject, + HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException, + HiveAccessControlException { + grantOrRevokePrivlegeOnRole(hivePrincipals, hivePrivileges, hivePrivObject, grantorPrincipal, + grantOption, true); + } + + @Override + public void revokePrivileges(List<HivePrincipal> hivePrincipals, + List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject, + HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException, + HiveAccessControlException { + grantOrRevokePrivlegeOnRole(hivePrincipals, hivePrivileges, hivePrivObject, grantorPrincipal, + grantOption, false); + } + + @Override + public void grantRole(List<HivePrincipal> hivePrincipals, List<String> roles, + boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthzPluginException, + HiveAccessControlException { + grantOrRevokeRoleOnGroup(hivePrincipals, roles, grantOption, grantorPrinc, true); + } + + @Override + public void revokeRole(List<HivePrincipal> hivePrincipals, List<String> roles, + boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthzPluginException, + HiveAccessControlException { + grantOrRevokeRoleOnGroup(hivePrincipals, roles, grantOption, grantorPrinc, false); + } + + + @Override + public List<HivePrivilegeInfo> showPrivileges(HivePrincipal principal, HivePrivilegeObject privObj) + throws HiveAuthzPluginException, HiveAccessControlException { + if (principal.getType() != HivePrincipalType.ROLE) { + String msg = + SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + principal.getType(); + throw new HiveAuthzPluginException(msg); + } + List<HivePrivilegeInfo> infoList = new ArrayList<HivePrivilegeInfo>(); + try { + sentryClient = getSentryClient(); + List<List<DBModelAuthorizable>> authorizables = + SentryAuthorizerUtil.getAuthzHierarchy(new Server(serverName), privObj); + Set<TSentryPrivilege> tPrivilges = new HashSet<TSentryPrivilege>(); + if (authorizables != null && !authorizables.isEmpty()) { + for (List<? extends Authorizable> authorizable : authorizables) { + tPrivilges.addAll(sentryClient.listPrivilegesByRoleName(authenticator.getUserName(), + principal.getName(), authorizable)); + } + } else { + tPrivilges.addAll(sentryClient.listPrivilegesByRoleName(authenticator.getUserName(), + principal.getName(), null)); + } + + if (tPrivilges != null && !tPrivilges.isEmpty()) { + for (TSentryPrivilege privilege : tPrivilges) { + infoList.add(SentryAuthorizerUtil.convert2HivePrivilegeInfo(privilege, principal)); + } + } + } catch (SentryAccessDeniedException e) { + HiveOperation hiveOp = HiveOperation.SHOW_GRANT; + executeOnFailureHooks(hiveOp, e); + } catch (SentryUserException e) { + String msg = "Error when sentryClient listPrivilegesByRoleName: " + e.getMessage(); + executeOnErrorHooks(msg, e); + } finally { + if (sentryClient != null) { + sentryClient.close(); + } + } + return infoList; + } + + @Override + public void setCurrentRole(String roleName) throws HiveAccessControlException, + HiveAuthzPluginException { + try { + sentryClient = getSentryClient(); + hiveAuthzBinding = new HiveAuthzBinding(hiveHook, conf, authzConf); + hiveAuthzBinding.setActiveRoleSet(roleName, + sentryClient.listUserRoles(authenticator.getUserName())); + } catch (SentryAccessDeniedException e) { + HiveOperation hiveOp = HiveOperation.GRANT_ROLE; + executeOnFailureHooks(hiveOp, e); + } catch (Exception e) { + String msg = "Error when sentryClient setCurrentRole: " + e.getMessage(); + executeOnErrorHooks(msg, e); + } finally { + if (sentryClient != null) { + sentryClient.close(); + } + if (hiveAuthzBinding != null) { + hiveAuthzBinding.close(); + } + } + } + + @Override + public List<String> getCurrentRoleNames() throws HiveAuthzPluginException { + List<String> roles = new ArrayList<String>(); + try { + sentryClient = getSentryClient(); + hiveAuthzBinding = new HiveAuthzBinding(hiveHook, conf, authzConf); + ActiveRoleSet roleSet = hiveAuthzBinding.getActiveRoleSet(); + if (roleSet.isAll()) { + roles = convert2RoleList(sentryClient.listUserRoles(authenticator.getUserName())); + } else { + roles.addAll(roleSet.getRoles()); + } + } catch (Exception e) { + String msg = "Error when sentryClient listUserRoles: " + e.getMessage(); + executeOnErrorHooks(msg, e); + } finally { + if (sentryClient != null) { + sentryClient.close(); + } + if (hiveAuthzBinding != null) { + hiveAuthzBinding.close(); + } + } + return roles; + } + + @Override + public List<HiveRoleGrant> getPrincipalGrantInfoForRole(String roleName) + throws HiveAuthzPluginException { + // TODO we will support in future + throw new HiveAuthzPluginException("Not supported of SHOW_ROLE_PRINCIPALS in Sentry"); + } + + @Override + public List<HiveRoleGrant> getRoleGrantInfoForPrincipal(HivePrincipal principal) + throws HiveAccessControlException, HiveAuthzPluginException { + List<HiveRoleGrant> hiveRoleGrants = new ArrayList<HiveRoleGrant>(); + try { + sentryClient = getSentryClient(); + + if (principal.getType() != HivePrincipalType.GROUP) { + String msg = + SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + principal.getType(); + throw new HiveAuthzPluginException(msg); + } + Set<TSentryRole> roles = + sentryClient.listRolesByGroupName(authenticator.getUserName(), principal.getName()); + if (roles != null && !roles.isEmpty()) { + for (TSentryRole role : roles) { + hiveRoleGrants.add(SentryAuthorizerUtil.convert2HiveRoleGrant(role)); + } + } + } catch (SentryAccessDeniedException e) { + HiveOperation hiveOp = HiveOperation.SHOW_ROLE_GRANT; + executeOnFailureHooks(hiveOp, e); + } catch (SentryUserException e) { + String msg = "Error when sentryClient listRolesByGroupName: " + e.getMessage(); + executeOnErrorHooks(msg, e); + } finally { + if (sentryClient != null) { + sentryClient.close(); + } + } + return hiveRoleGrants; + } + + @Override + public void applyAuthorizationConfigPolicy(HiveConf hiveConf) throws HiveAuthzPluginException { + // Apply rest of the configuration only to HiveServer2 + if (ctx.getClientType() != CLIENT_TYPE.HIVESERVER2 + || !hiveConf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED)) { + throw new HiveAuthzPluginException("Sentry just support for hiveserver2"); + } + } + + /** + * Grant(isGrant is true) or revoke(isGrant is false) db privileges to/from role via sentryClient, + * which is a instance of SentryPolicyServiceClientV2 + * + * @param hivePrincipals + * @param hivePrivileges + * @param hivePrivObject + * @param grantorPrincipal + * @param grantOption + * @param isGrant + */ + private void grantOrRevokePrivlegeOnRole(List<HivePrincipal> hivePrincipals, + List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject, + HivePrincipal grantorPrincipal, boolean grantOption, boolean isGrant) + throws HiveAuthzPluginException, HiveAccessControlException { + try { + sentryClient = getSentryClient(); + + for (HivePrincipal principal : hivePrincipals) { + // Sentry only support grant privilege to ROLE + if (principal.getType() != HivePrincipalType.ROLE) { + String msg = + SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + principal.getType(); + throw new HiveAuthzPluginException(msg); + } + for (HivePrivilege privilege : hivePrivileges) { + String grantorName = authenticator.getUserName(); + String roleName = principal.getName(); + String action = SentryAuthorizerUtil.convert2SentryAction(privilege); + List<String> columnNames = privilege.getColumns(); + Boolean grantOp = null; + if (isGrant) { + grantOp = grantOption; + } + + switch (hivePrivObject.getType()) { + case GLOBAL: + if (isGrant) { + sentryClient.grantServerPrivilege(grantorName, roleName, + hivePrivObject.getObjectName(), action, grantOp); + } else { + sentryClient.revokeServerPrivilege(grantorName, roleName, + hivePrivObject.getObjectName(), action, grantOp); + } + break; + case DATABASE: + if (isGrant) { + sentryClient.grantDatabasePrivilege(grantorName, roleName, serverName, + hivePrivObject.getDbname(), action, grantOp); + } else { + sentryClient.revokeDatabasePrivilege(grantorName, roleName, serverName, + hivePrivObject.getDbname(), action, grantOp); + } + break; + case TABLE_OR_VIEW: + // For column level security + if (columnNames != null && !columnNames.isEmpty()) { + if (action.equalsIgnoreCase(AccessConstants.INSERT) + || action.equalsIgnoreCase(AccessConstants.ALL)) { + String msg = + SentryHiveConstants.PRIVILEGE_NOT_SUPPORTED + privilege.getName() + + " on Column"; + throw new HiveAuthzPluginException(msg); + } + if (isGrant) { + sentryClient.grantColumnsPrivileges(grantorName, roleName, serverName, + hivePrivObject.getDbname(), hivePrivObject.getObjectName(), columnNames, + action, grantOp); + } else { + sentryClient.revokeColumnsPrivilege(grantorName, roleName, serverName, + hivePrivObject.getDbname(), hivePrivObject.getObjectName(), columnNames, + action, grantOp); + } + } else { + if (isGrant) { + sentryClient.grantTablePrivilege(grantorName, roleName, serverName, + hivePrivObject.getDbname(), hivePrivObject.getObjectName(), action, grantOp); + } else { + sentryClient.revokeTablePrivilege(grantorName, roleName, serverName, + hivePrivObject.getDbname(), hivePrivObject.getObjectName(), action, grantOp); + } + } + break; + case LOCAL_URI: + case DFS_URI: + String uRIString = hivePrivObject.getObjectName().replace("'", "").replace("\"", ""); + if (isGrant) { + sentryClient.grantURIPrivilege(grantorName, roleName, serverName, + uRIString, grantOp); + } else { + sentryClient.revokeURIPrivilege(grantorName, roleName, serverName, + uRIString, grantOp); + } + break; + case FUNCTION: + case PARTITION: + case COLUMN: + case COMMAND_PARAMS: + // not support these type + throw new HiveAuthzPluginException(hivePrivObject.getType().name() + + " are not supported in sentry"); + default: + break; + } + } + } + } catch (SentryAccessDeniedException e) { + HiveOperation hiveOp = + isGrant ? HiveOperation.GRANT_PRIVILEGE : HiveOperation.REVOKE_PRIVILEGE; + executeOnFailureHooks(hiveOp, e); + } catch (SentryUserException e) { + String msg = "Error when sentryClient grant/revoke privilege:" + e.getMessage(); + executeOnErrorHooks(msg, e); + } finally { + if (sentryClient != null) { + sentryClient.close(); + } + } + } + + /** + * Grant(isGrant is true) or revoke(isGrant is false) role to/from group via sentryClient, which + * is a instance of SentryPolicyServiceClientV2 + * + * @param hivePrincipals + * @param roles + * @param grantOption + * @param grantorPrinc + * @param isGrant + */ + private void grantOrRevokeRoleOnGroup(List<HivePrincipal> hivePrincipals, List<String> roles, + boolean grantOption, HivePrincipal grantorPrinc, boolean isGrant) + throws HiveAuthzPluginException, HiveAccessControlException { + try { + sentryClient = getSentryClient(); + // get principals + Set<String> groups = Sets.newHashSet(); + for (HivePrincipal principal : hivePrincipals) { + if (principal.getType() != HivePrincipalType.GROUP) { + String msg = + SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + principal.getType(); + throw new HiveAuthzPluginException(msg); + } + groups.add(principal.getName()); + } + + // grant/revoke role to/from principals + for (String roleName : roles) { + if (isGrant) { + sentryClient.grantRoleToGroups(grantorPrinc.getName(), roleName, groups); + } else { + sentryClient.revokeRoleFromGroups(grantorPrinc.getName(), roleName, groups); + } + } + + } catch (SentryAccessDeniedException e) { + HiveOperation hiveOp = isGrant ? HiveOperation.GRANT_ROLE : HiveOperation.REVOKE_ROLE; + executeOnFailureHooks(hiveOp, e); + } catch (SentryUserException e) { + String msg = "Error when sentryClient grant/revoke role:" + e.getMessage(); + executeOnErrorHooks(msg, e); + } finally { + if (sentryClient != null) { + sentryClient.close(); + } + } + } + + private void executeOnFailureHooks(HiveOperation hiveOp, SentryAccessDeniedException e) + throws HiveAccessControlException { + SentryOnFailureHookContext hookCtx = + new SentryOnFailureHookContextImpl(SessionState.get().getCmd(), null, null, hiveOp, null, + null, null, null, authenticator.getUserName(), null, new AuthorizationException(e), + authzConf); + SentryAuthorizerUtil.executeOnFailureHooks(hookCtx, authzConf); + throw new HiveAccessControlException(e.getMessage(), e); + } + + private void executeOnErrorHooks(String msg, Exception e) throws HiveAuthzPluginException { + LOG.error(msg, e); + throw new HiveAuthzPluginException(msg, e); + } + + private List<String> convert2RoleList(Set<TSentryRole> roleSet) { + List<String> roles = new ArrayList<String>(); + if (roleSet != null && !roleSet.isEmpty()) { + for (TSentryRole tRole : roleSet) { + roles.add(tRole.getRoleName()); + } + } + return roles; + } + + private SentryPolicyServiceClient getSentryClient() throws HiveAuthzPluginException { + try { + Preconditions.checkNotNull(authzConf, "HiveAuthConf cannot be null"); + return SentryServiceClientFactory.create(authzConf); + } catch (Exception e) { + String msg = "Error occurred when creating Sentry client: " + e.getMessage(); + throw new HiveAuthzPluginException(msg, e); + } + } + + +} http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryValidator.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryValidator.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryValidator.java new file mode 100644 index 0000000..2bc8aad --- /dev/null +++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryValidator.java @@ -0,0 +1,481 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.sentry.binding.hive.v2.authorizer; + +import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; + +import java.security.CodeSource; +import java.util.ArrayList; +import java.util.EnumSet; +import java.util.List; +import java.util.Set; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.metadata.AuthorizationException; +import org.apache.hadoop.hive.ql.plan.HiveOperation; +import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.sentry.binding.hive.SentryOnFailureHookContext; +import org.apache.sentry.binding.hive.SentryOnFailureHookContextImpl; +import org.apache.sentry.binding.hive.authz.HiveAuthzBinding; +import org.apache.sentry.binding.hive.authz.HiveAuthzBinding.HiveHook; +import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges; +import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationScope; +import org.apache.sentry.binding.hive.authz.HiveAuthzPrivilegesMap; +import org.apache.sentry.binding.hive.conf.HiveAuthzConf; +import org.apache.sentry.binding.hive.v2.util.SentryAuthorizerUtil; +import org.apache.sentry.binding.hive.v2.util.SimpleSemanticAnalyzer; +import org.apache.sentry.core.common.Subject; +import org.apache.sentry.core.model.db.AccessURI; +import org.apache.sentry.core.model.db.Column; +import org.apache.sentry.core.model.db.DBModelAction; +import org.apache.sentry.core.model.db.DBModelAuthorizable; +import org.apache.sentry.core.model.db.DBModelAuthorizable.AuthorizableType; +import org.apache.sentry.core.model.db.Database; +import org.apache.sentry.core.model.db.Table; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Preconditions; +import com.google.common.collect.Sets; + +/** + * This class used to do authorization. Check if current user has privileges to do the operation. + */ +public class DefaultSentryValidator extends SentryHiveAuthorizationValidator { + + public static final Logger LOG = LoggerFactory.getLogger(DefaultSentryValidator.class); + + protected HiveConf conf; + protected HiveAuthzConf authzConf; + protected HiveAuthenticationProvider authenticator; + + public DefaultSentryValidator(HiveConf conf, HiveAuthzConf authzConf, + HiveAuthenticationProvider authenticator) throws Exception { + initilize(conf, authzConf, authenticator); + this.hiveHook = HiveHook.HiveServer2; + } + + public DefaultSentryValidator(HiveHook hiveHook, HiveConf conf, HiveAuthzConf authzConf, + HiveAuthenticationProvider authenticator) throws Exception { + initilize(conf, authzConf, authenticator); + this.hiveHook = hiveHook; + } + + /** + * initialize authenticator and hiveAuthzBinding. + */ + protected void initilize(HiveConf conf, HiveAuthzConf authzConf, + HiveAuthenticationProvider authenticator) throws Exception { + Preconditions.checkNotNull(conf, "HiveConf cannot be null"); + Preconditions.checkNotNull(authzConf, "HiveAuthzConf cannot be null"); + Preconditions.checkNotNull(authenticator, "Hive authenticator provider cannot be null"); + this.conf = conf; + this.authzConf = authzConf; + this.authenticator = authenticator; + } + + private HiveHook hiveHook; + + // all operations need to extend at DB scope + private static final Set<HiveOperation> EX_DB_ALL = Sets.newHashSet(HiveOperation.DROPDATABASE, + HiveOperation.CREATETABLE, HiveOperation.IMPORT, HiveOperation.DESCDATABASE, + HiveOperation.ALTERTABLE_RENAME, HiveOperation.LOCKDB, HiveOperation.UNLOCKDB); + // input operations need to extend at DB scope + private static final Set<HiveOperation> EX_DB_INPUT = Sets.newHashSet(HiveOperation.DROPDATABASE, + HiveOperation.DESCDATABASE, HiveOperation.ALTERTABLE_RENAME, HiveOperation.LOCKDB, + HiveOperation.UNLOCKDB); + + // all operations need to extend at Table scope + private static final Set<HiveOperation> EX_TB_ALL = Sets.newHashSet(HiveOperation.DROPTABLE, + HiveOperation.DROPVIEW, HiveOperation.DESCTABLE, HiveOperation.SHOW_TBLPROPERTIES, + HiveOperation.SHOWINDEXES, HiveOperation.ALTERTABLE_PROPERTIES, + HiveOperation.ALTERTABLE_SERDEPROPERTIES, HiveOperation.ALTERTABLE_CLUSTER_SORT, + HiveOperation.ALTERTABLE_FILEFORMAT, HiveOperation.ALTERTABLE_TOUCH, + HiveOperation.ALTERTABLE_PROTECTMODE, HiveOperation.ALTERTABLE_RENAMECOL, + HiveOperation.ALTERTABLE_ADDCOLS, HiveOperation.ALTERTABLE_REPLACECOLS, + HiveOperation.ALTERTABLE_RENAMEPART, HiveOperation.ALTERTABLE_ARCHIVE, + HiveOperation.ALTERTABLE_UNARCHIVE, HiveOperation.ALTERTABLE_SERIALIZER, + HiveOperation.ALTERTABLE_MERGEFILES, HiveOperation.ALTERTABLE_SKEWED, + HiveOperation.ALTERTABLE_DROPPARTS, HiveOperation.ALTERTABLE_ADDPARTS, + HiveOperation.ALTERTABLE_RENAME, HiveOperation.ALTERTABLE_LOCATION, + HiveOperation.ALTERVIEW_PROPERTIES, HiveOperation.ALTERPARTITION_FILEFORMAT, + HiveOperation.ALTERPARTITION_PROTECTMODE, HiveOperation.ALTERPARTITION_SERDEPROPERTIES, + HiveOperation.ALTERPARTITION_SERIALIZER, HiveOperation.ALTERPARTITION_MERGEFILES, + HiveOperation.ALTERPARTITION_LOCATION, HiveOperation.ALTERTBLPART_SKEWED_LOCATION, + HiveOperation.MSCK, HiveOperation.ALTERINDEX_REBUILD, HiveOperation.LOCKTABLE, + HiveOperation.UNLOCKTABLE, HiveOperation.SHOWCOLUMNS, HiveOperation.SHOW_TABLESTATUS, HiveOperation.LOAD); + // input operations need to extend at Table scope + private static final Set<HiveOperation> EX_TB_INPUT = Sets.newHashSet(HiveOperation.DROPTABLE, + HiveOperation.DROPVIEW, HiveOperation.SHOW_TBLPROPERTIES, HiveOperation.SHOWINDEXES, + HiveOperation.ALTERINDEX_REBUILD, HiveOperation.LOCKTABLE, HiveOperation.UNLOCKTABLE, + HiveOperation.SHOW_TABLESTATUS); + private static final Set<HiveOperation> META_TB_INPUT = Sets.newHashSet(HiveOperation.DESCTABLE, + HiveOperation.SHOWCOLUMNS); + + /** + * Check if current user has privileges to perform given operation type hiveOpType on the given + * input and output objects + * + * @param hiveOpType + * @param inputHObjs + * @param outputHObjs + * @param context + * @throws SentryAccessControlException + */ + @Override + public void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs, + List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context) + throws HiveAuthzPluginException, HiveAccessControlException { + if (LOG.isDebugEnabled()) { + String msg = + "Checking privileges for operation " + hiveOpType + " by user " + + authenticator.getUserName() + " on " + " input objects " + inputHObjs + + " and output objects " + outputHObjs + ". Context Info: " + context; + LOG.debug(msg); + } + + HiveOperation hiveOp = SentryAuthorizerUtil.convert2HiveOperation(hiveOpType.name()); + HiveAuthzPrivileges stmtAuthPrivileges = null; + if (HiveOperation.DESCTABLE.equals(hiveOp) && + !(context.getCommandString().contains("EXTENDED") || context.getCommandString().contains("FORMATTED")) ) { + stmtAuthPrivileges = HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(HiveOperation.SHOWCOLUMNS); + } else { + stmtAuthPrivileges = HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(hiveOp); + } + + HiveAuthzBinding hiveAuthzBinding = null; + try { + hiveAuthzBinding = getAuthzBinding(); + if (stmtAuthPrivileges == null) { + // We don't handle authorizing this statement + return; + } + + List<List<DBModelAuthorizable>> inputHierarchyList = + SentryAuthorizerUtil.convert2SentryPrivilegeList(hiveAuthzBinding.getAuthServer(), + inputHObjs); + List<List<DBModelAuthorizable>> outputHierarchyList = + SentryAuthorizerUtil.convert2SentryPrivilegeList(hiveAuthzBinding.getAuthServer(), + outputHObjs); + + // Workaround for metadata queries + addExtendHierarchy(hiveOp, stmtAuthPrivileges, inputHierarchyList, outputHierarchyList, + context.getCommandString(), hiveAuthzBinding); + + hiveAuthzBinding.authorize(hiveOp, stmtAuthPrivileges, + new Subject(authenticator.getUserName()), inputHierarchyList, outputHierarchyList); + } catch (AuthorizationException e) { + Database db = null; + Table tab = null; + AccessURI udfURI = null; + AccessURI partitionURI = null; + if (outputHObjs != null) { + for (HivePrivilegeObject obj : outputHObjs) { + switch (obj.getType()) { + case DATABASE: + db = new Database(obj.getObjectName()); + break; + case TABLE_OR_VIEW: + db = new Database(obj.getDbname()); + tab = new Table(obj.getObjectName()); + break; + case PARTITION: + db = new Database(obj.getDbname()); + tab = new Table(obj.getObjectName()); + case LOCAL_URI: + case DFS_URI: + } + } + } + String permsRequired = ""; + SentryOnFailureHookContext hookCtx = + new SentryOnFailureHookContextImpl(context.getCommandString(), null, null, hiveOp, db, + tab, udfURI, partitionURI, authenticator.getUserName(), context.getIpAddress(), e, + authzConf); + SentryAuthorizerUtil.executeOnFailureHooks(hookCtx, authzConf); + for (String perm : hiveAuthzBinding.getLastQueryPrivilegeErrors()) { + permsRequired += perm + ";"; + } + SessionState.get().getConf().set(HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS, permsRequired); + String msg = + HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE + + "\n Required privileges for this query: " + permsRequired; + throw new HiveAccessControlException(msg, e); + } catch (Exception e) { + throw new HiveAuthzPluginException(e.getClass()+ ": " + e.getMessage(), e); + } finally { + if (hiveAuthzBinding != null) { + hiveAuthzBinding.close(); + } + } + + if ("true".equalsIgnoreCase(SessionState.get().getConf() + .get(HiveAuthzConf.HIVE_SENTRY_MOCK_COMPILATION))) { + throw new HiveAccessControlException(HiveAuthzConf.HIVE_SENTRY_MOCK_ERROR + + " Mock query compilation aborted. Set " + HiveAuthzConf.HIVE_SENTRY_MOCK_COMPILATION + + " to 'false' for normal query processing"); + } + } + + @VisibleForTesting + public HiveAuthzBinding getAuthzBinding() throws Exception { + return new HiveAuthzBinding(hiveHook, conf, authzConf); + } + + private void addExtendHierarchy(HiveOperation hiveOp, HiveAuthzPrivileges stmtAuthPrivileges, + List<List<DBModelAuthorizable>> inputHierarchyList, + List<List<DBModelAuthorizable>> outputHierarchyList, String command, + HiveAuthzBinding hiveAuthzBinding) throws HiveAuthzPluginException, + HiveAccessControlException { + String currDatabase = null; + switch (stmtAuthPrivileges.getOperationScope()) { + case SERVER: + // validate server level privileges if applicable. Eg create UDF,register jar etc .. + List<DBModelAuthorizable> serverHierarchy = new ArrayList<DBModelAuthorizable>(); + serverHierarchy.add(hiveAuthzBinding.getAuthServer()); + inputHierarchyList.add(serverHierarchy); + break; + case DATABASE: + // workaround for metadata queries. + if (EX_DB_ALL.contains(hiveOp)) { + SimpleSemanticAnalyzer analyzer = new SimpleSemanticAnalyzer(hiveOp, command); + currDatabase = analyzer.getCurrentDb(); + + List<DBModelAuthorizable> externalAuthorizableHierarchy = + new ArrayList<DBModelAuthorizable>(); + externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer()); + externalAuthorizableHierarchy.add(new Database(currDatabase)); + + if (EX_DB_INPUT.contains(hiveOp)) { + inputHierarchyList.add(externalAuthorizableHierarchy); + } else { + outputHierarchyList.add(externalAuthorizableHierarchy); + } + } + break; + case TABLE: + case COLUMN: + // workaround for drop table/view. + if (EX_TB_ALL.contains(hiveOp)) { + SimpleSemanticAnalyzer analyzer = new SimpleSemanticAnalyzer(hiveOp, command); + currDatabase = analyzer.getCurrentDb(); + String currTable = analyzer.getCurrentTb(); + + List<DBModelAuthorizable> externalAuthorizableHierarchy = + new ArrayList<DBModelAuthorizable>(); + externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer()); + externalAuthorizableHierarchy.add(new Database(currDatabase)); + externalAuthorizableHierarchy.add(new Table(currTable)); + + if (EX_TB_INPUT.contains(hiveOp)) { + inputHierarchyList.add(externalAuthorizableHierarchy); + } else if (META_TB_INPUT.contains(hiveOp)) { + externalAuthorizableHierarchy.add(Column.SOME); + inputHierarchyList.add(externalAuthorizableHierarchy); + } else { + outputHierarchyList.add(externalAuthorizableHierarchy); + } + } + break; + case FUNCTION: + if (hiveOp.equals(HiveOperation.CREATEFUNCTION)) { + SimpleSemanticAnalyzer analyzer = new SimpleSemanticAnalyzer(hiveOp, command); + currDatabase = analyzer.getCurrentDb(); + String udfClassName = analyzer.getCurrentTb(); + try { + CodeSource udfSrc = Class.forName(udfClassName).getProtectionDomain().getCodeSource(); + if (udfSrc == null) { + throw new HiveAuthzPluginException("Could not resolve the jar for UDF class " + + udfClassName); + } + String udfJar = udfSrc.getLocation().getPath(); + if (udfJar == null || udfJar.isEmpty()) { + throw new HiveAuthzPluginException("Could not find the jar for UDF class " + + udfClassName + "to validate privileges"); + } + AccessURI udfURI = SentryAuthorizerUtil.parseURI(udfSrc.getLocation().toString(), true); + List<DBModelAuthorizable> udfUriHierarchy = new ArrayList<DBModelAuthorizable>(); + udfUriHierarchy.add(hiveAuthzBinding.getAuthServer()); + udfUriHierarchy.add(udfURI); + inputHierarchyList.add(udfUriHierarchy); + } catch (Exception e) { + throw new HiveAuthzPluginException("Error retrieving udf class", e); + } + } + break; + case CONNECT: + /* + * The 'CONNECT' is an implicit privilege scope currently used for - USE <db> It's allowed + * when the user has any privilege on the current database. For application backward + * compatibility, we allow (optional) implicit connect permission on 'default' db. + */ + List<DBModelAuthorizable> connectHierarchy = new ArrayList<DBModelAuthorizable>(); + connectHierarchy.add(hiveAuthzBinding.getAuthServer()); + if (hiveOp.equals(HiveOperation.SWITCHDATABASE)) { + currDatabase = command.split(" ")[1]; + } + // by default allow connect access to default db + Table currTbl = Table.ALL; + Database currDB = new Database(currDatabase); + Column currCol = Column.ALL; + if ((DEFAULT_DATABASE_NAME.equalsIgnoreCase(currDatabase) && "false" + .equalsIgnoreCase(authzConf.get( + HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), "false")))) { + currDB = Database.ALL; + currTbl = Table.SOME; + } + + connectHierarchy.add(currDB); + connectHierarchy.add(currTbl); + connectHierarchy.add(currCol); + + inputHierarchyList.add(connectHierarchy); + break; + } + } + + @Override + public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs, + HiveAuthzContext context) { + if (listObjs != null && listObjs.size() >= 1) { + HivePrivilegeObjectType pType = listObjs.get(0).getType(); + HiveAuthzBinding hiveAuthzBinding = null; + try { + switch (pType) { + case DATABASE: + hiveAuthzBinding = getAuthzBinding(); + listObjs = filterShowDatabases(listObjs, authenticator.getUserName(), hiveAuthzBinding); + break; + case TABLE_OR_VIEW: + hiveAuthzBinding = getAuthzBinding(); + listObjs = filterShowTables(listObjs, authenticator.getUserName(), hiveAuthzBinding); + break; + } + } catch (Exception e) { + LOG.debug(e.getMessage(),e); + } finally { + if (hiveAuthzBinding != null) { + hiveAuthzBinding.close(); + } + } + } + return listObjs; + } + + private List<HivePrivilegeObject> filterShowTables(List<HivePrivilegeObject> listObjs, + String userName, HiveAuthzBinding hiveAuthzBinding) { + List<HivePrivilegeObject> filteredResult = new ArrayList<HivePrivilegeObject>(); + Subject subject = new Subject(userName); + HiveAuthzPrivileges tableMetaDataPrivilege = + new HiveAuthzPrivileges.AuthzPrivilegeBuilder() + .addInputObjectPriviledge(AuthorizableType.Column, + EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT)) + .setOperationScope(HiveOperationScope.TABLE) + .setOperationType( + org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationType.INFO) + .build(); + + for (HivePrivilegeObject obj : listObjs) { + // if user has privileges on table, add to filtered list, else discard + Table table = new Table(obj.getObjectName()); + Database database; + database = new Database(obj.getDbname()); + + List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); + List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); + List<DBModelAuthorizable> externalAuthorizableHierarchy = + new ArrayList<DBModelAuthorizable>(); + externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer()); + externalAuthorizableHierarchy.add(database); + externalAuthorizableHierarchy.add(table); + externalAuthorizableHierarchy.add(Column.ALL); + inputHierarchy.add(externalAuthorizableHierarchy); + + try { + hiveAuthzBinding.authorize(HiveOperation.SHOWTABLES, tableMetaDataPrivilege, subject, + inputHierarchy, outputHierarchy); + filteredResult.add(obj); + } catch (AuthorizationException e) { + // squash the exception, user doesn't have privileges, so the table is + // not added to + // filtered list. + ; + } + } + return filteredResult; + } + + private List<HivePrivilegeObject> filterShowDatabases(List<HivePrivilegeObject> listObjs, + String userName, HiveAuthzBinding hiveAuthzBinding) { + List<HivePrivilegeObject> filteredResult = new ArrayList<HivePrivilegeObject>(); + Subject subject = new Subject(userName); + HiveAuthzPrivileges anyPrivilege = + new HiveAuthzPrivileges.AuthzPrivilegeBuilder() + .addInputObjectPriviledge( + AuthorizableType.Column, + EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT, DBModelAction.ALTER, + DBModelAction.CREATE, DBModelAction.DROP, DBModelAction.INDEX, + DBModelAction.LOCK)) + .setOperationScope(HiveOperationScope.CONNECT) + .setOperationType( + org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationType.QUERY) + .build(); + + for (HivePrivilegeObject obj : listObjs) { + // if user has privileges on database, add to filtered list, else discard + Database database = null; + + // if default is not restricted, continue + if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(obj.getObjectName()) + && "false".equalsIgnoreCase(hiveAuthzBinding.getAuthzConf().get( + HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), "false"))) { + filteredResult.add(obj); + continue; + } + + database = new Database(obj.getObjectName()); + + List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); + List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); + List<DBModelAuthorizable> externalAuthorizableHierarchy = + new ArrayList<DBModelAuthorizable>(); + externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer()); + externalAuthorizableHierarchy.add(database); + externalAuthorizableHierarchy.add(Table.ALL); + externalAuthorizableHierarchy.add(Column.ALL); + inputHierarchy.add(externalAuthorizableHierarchy); + + try { + hiveAuthzBinding.authorize(HiveOperation.SHOWDATABASES, anyPrivilege, subject, + inputHierarchy, outputHierarchy); + filteredResult.add(obj); + } catch (AuthorizationException e) { + // squash the exception, user doesn't have privileges, so the table is + // not added to + // filtered list. + ; + } + } + return filteredResult; + } +}