Repository: incubator-sentry Updated Branches: refs/heads/master b2d71a8c5 -> 5e58f3fe6
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAccessController.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAccessController.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAccessController.java new file mode 100644 index 0000000..26fdac8 --- /dev/null +++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAccessController.java @@ -0,0 +1,200 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.sentry.binding.hive.v2.authorizer; + +import java.util.List; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessController; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant; + +/** + * Abstract class to do access control commands, e.g. grant/revoke privileges, grant/revoke role, + * create/drop role. + */ +public abstract class SentryHiveAccessController implements HiveAccessController { + + /** + * Hive statement: Grant privilege GRANT priv_type [, priv_type ] ... ON table_or_view_name TO + * principal_specification [, principal_specification] ... [WITH GRANT OPTION]; + * principal_specification : USER user | ROLE role + * + * priv_type : INSERT | SELECT | UPDATE | DELETE | ALL + * + * @param hivePrincipals + * @param hivePrivileges + * @param hivePrivObject + * @param grantorPrincipal + * @param grantOption + * @throws HiveAuthzPluginException, HiveAccessControlException + */ + @Override + public abstract void grantPrivileges(List<HivePrincipal> hivePrincipals, + List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject, + HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException, + HiveAccessControlException; + + /** + * Hive statement: Revoke privilege REVOKE priv_type [, priv_type ] ... ON table_or_view_name FROM + * principal_specification [, principal_specification] ... ; + * + * principal_specification : USER user | ROLE role + * + * priv_type : INSERT | SELECT | UPDATE | DELETE | ALL + * + * @param hivePrincipals + * @param hivePrivileges + * @param hivePrivObject + * @param grantorPrincipal + * @param grantOption + * @throws HiveAuthzPluginException, HiveAccessControlException + */ + @Override + public abstract void revokePrivileges(List<HivePrincipal> hivePrincipals, + List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject, + HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException, + HiveAccessControlException; + + /** + * Hive statement: Create role CREATE ROLE role_name; + * + * @param roleName + * @param adminGrantor + * @throws HiveAuthzPluginException, HiveAccessControlException + */ + @Override + public abstract void createRole(String roleName, HivePrincipal adminGrantor) + throws HiveAuthzPluginException, HiveAccessControlException; + + /** + * Hive statement: Drop role DROP ROLE role_name; + * + * @param roleName + * @throws HiveAuthzPluginException, HiveAccessControlException + */ + @Override + public abstract void dropRole(String roleName) throws HiveAuthzPluginException, + HiveAccessControlException; + + /** + * Hive statement: Grant role GRANT role_name [, role_name] ... TO principal_specification [, + * principal_specification] ... [ WITH ADMIN OPTION ]; + * + * principal_specification : USER user | ROLE role + * + * @param hivePrincipals + * @param roles + * @param grantOption + * @param grantorPrinc + * @throws HiveAuthzPluginException, HiveAccessControlException + */ + @Override + public abstract void grantRole(List<HivePrincipal> hivePrincipals, List<String> roles, + boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthzPluginException, + HiveAccessControlException; + + + /** + * Hive statement: Revoke role REVOKE [ADMIN OPTION FOR] role_name [, role_name] ... FROM + * principal_specification [, principal_specification] ... ; + * + * principal_specification : USER user | ROLE role + * + * @param hivePrincipals + * @param roles + * @param grantOption + * @param grantorPrinc + * @throws HiveAuthzPluginException, HiveAccessControlException + */ + @Override + public abstract void revokeRole(List<HivePrincipal> hivePrincipals, List<String> roles, + boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthzPluginException, + HiveAccessControlException; + + /** + * Hive statement: Show roles SHOW ROLES; + * + * @throws HiveAuthzPluginException, HiveAccessControlException + */ + @Override + public abstract List<String> getAllRoles() throws HiveAuthzPluginException, + HiveAccessControlException; + + /** + * Hive statement: Show grant SHOW GRANT [principal_name] ON (ALL| ([TABLE] table_or_view_name); + * + * @param principal + * @param privObj + * @throws HiveAuthzPluginException, HiveAccessControlException + */ + @Override + public abstract List<HivePrivilegeInfo> showPrivileges(HivePrincipal principal, + HivePrivilegeObject privObj) throws HiveAuthzPluginException, HiveAccessControlException; + + /** + * Hive statement: Set role SET ROLE (role_name|ALL); + * + * @param roleName + * @throws HiveAuthzPluginException, HiveAccessControlException + */ + @Override + public abstract void setCurrentRole(String roleName) throws HiveAuthzPluginException, + HiveAccessControlException; + + /** + * Hive statement: Show current roles SHOW CURRENT ROLES; + * + * @throws HiveAuthzPluginException + */ + @Override + public abstract List<String> getCurrentRoleNames() throws HiveAuthzPluginException; + + /** + * Hive statement: Set role privileges SHOW PRINCIPALS role_name; + * + * @param roleName + * @throws HiveAuthzPluginException, HiveAccessControlException + */ + @Override + public abstract List<HiveRoleGrant> getPrincipalGrantInfoForRole(String roleName) + throws HiveAuthzPluginException, HiveAccessControlException; + + /** + * Hive statement: Set role grant SHOW ROLE GRANT (USER|ROLE) principal_name; + * + * @param principal + * @throws HiveAuthzPluginException, HiveAccessControlException + */ + @Override + public abstract List<HiveRoleGrant> getRoleGrantInfoForPrincipal(HivePrincipal principal) + throws HiveAuthzPluginException, HiveAccessControlException; + + /** + * Apply configuration files for authorization V2 + * + * @param hiveConf + * @throws HiveAuthzPluginException + */ + @Override + public abstract void applyAuthorizationConfigPolicy(HiveConf hiveConf) + throws HiveAuthzPluginException; + +} http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAuthorizationValidator.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAuthorizationValidator.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAuthorizationValidator.java new file mode 100644 index 0000000..7bf7b87 --- /dev/null +++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAuthorizationValidator.java @@ -0,0 +1,58 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.sentry.binding.hive.v2.authorizer; + +import java.util.List; + +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationValidator; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; + +/** + * This class used to do authorization validate. Check if current user has privileges to do the + * operation and filter the select results. + */ +public abstract class SentryHiveAuthorizationValidator implements HiveAuthorizationValidator { + + /** + * Check if current user has privileges to perform given operation type hiveOpType on the given + * input and output objects. + * + * @param hiveOpType + * @param inputHObjs + * @param outputHObjs + * @param context + * @throws HiveAuthzPluginException, HiveAccessControlException + */ + @Override + public abstract void checkPrivileges(HiveOperationType hiveOpType, + List<HivePrivilegeObject> inputHObjs, List<HivePrivilegeObject> outputHObjs, + HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException; + + + /** + * Filter the select results according current user's permission. remove the object which current + * user do not have any privilege on it. + * + * @param listObjs + * @param context + */ + @Override + public abstract List<HivePrivilegeObject> filterListCmdObjects( + List<HivePrivilegeObject> listObjs, HiveAuthzContext context); +} http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAuthorizer.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAuthorizer.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAuthorizer.java new file mode 100644 index 0000000..9d227b8 --- /dev/null +++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAuthorizer.java @@ -0,0 +1,195 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.sentry.binding.hive.v2.authorizer; + +import java.util.List; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.SentryHivePrivilegeObjectDesc; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.PrincipalDesc; +import org.apache.hadoop.hive.ql.plan.PrivilegeDesc; +import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc; +import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant; +import org.apache.sentry.binding.hive.v2.SentryHivePrivilegeObject; + +/** + * Convenience implementation of HiveAuthorizer. You can customize the behavior by passing different + * implementations of {@link SentryHiveAccessController} and + * {@link SentryHiveAuthorizationValidator} to constructor. + */ +public class SentryHiveAuthorizer implements HiveAuthorizer { + + private SentryHiveAccessController accessController; + private SentryHiveAuthorizationValidator authValidator; + + public SentryHiveAuthorizer(SentryHiveAccessController accessController, + SentryHiveAuthorizationValidator authValidator) { + this.accessController = accessController; + this.authValidator = authValidator; + } + + @Override + public void grantPrivileges(List<HivePrincipal> hivePrincipals, + List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject, + HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException, + HiveAccessControlException { + accessController.grantPrivileges(hivePrincipals, hivePrivileges, hivePrivObject, + grantorPrincipal, grantOption); + } + + @Override + public void revokePrivileges(List<HivePrincipal> hivePrincipals, + List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject, + HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException, + HiveAccessControlException { + accessController.revokePrivileges(hivePrincipals, hivePrivileges, hivePrivObject, + grantorPrincipal, grantOption); + } + + @Override + public void createRole(String roleName, HivePrincipal adminGrantor) + throws HiveAuthzPluginException, HiveAccessControlException { + accessController.createRole(roleName, adminGrantor); + } + + @Override + public void dropRole(String roleName) throws HiveAuthzPluginException, HiveAccessControlException { + accessController.dropRole(roleName); + } + + @Override + public void grantRole(List<HivePrincipal> hivePrincipals, List<String> roles, + boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthzPluginException, + HiveAccessControlException { + accessController.grantRole(hivePrincipals, roles, grantOption, grantorPrinc); + } + + @Override + public void revokeRole(List<HivePrincipal> hivePrincipals, List<String> roles, + boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthzPluginException, + HiveAccessControlException { + accessController.revokeRole(hivePrincipals, roles, grantOption, grantorPrinc); + } + + @Override + public void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs, + List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context) + throws HiveAuthzPluginException, HiveAccessControlException { + authValidator.checkPrivileges(hiveOpType, inputHObjs, outputHObjs, context); + } + + @Override + public List<String> getAllRoles() throws HiveAuthzPluginException, HiveAccessControlException { + return accessController.getAllRoles(); + } + + @Override + public List<HivePrivilegeInfo> showPrivileges(HivePrincipal principal, HivePrivilegeObject privObj) + throws HiveAuthzPluginException, HiveAccessControlException { + return accessController.showPrivileges(principal, privObj); + } + + @Override + public VERSION getVersion() { + return VERSION.V1; + } + + @Override + public void setCurrentRole(String roleName) throws HiveAccessControlException, + HiveAuthzPluginException { + accessController.setCurrentRole(roleName); + } + + @Override + public List<String> getCurrentRoleNames() throws HiveAuthzPluginException { + return accessController.getCurrentRoleNames(); + } + + @Override + public List<HiveRoleGrant> getPrincipalGrantInfoForRole(String roleName) + throws HiveAuthzPluginException, HiveAccessControlException { + return accessController.getPrincipalGrantInfoForRole(roleName); + } + + @Override + public List<HiveRoleGrant> getRoleGrantInfoForPrincipal(HivePrincipal principal) + throws HiveAuthzPluginException, HiveAccessControlException { + return accessController.getRoleGrantInfoForPrincipal(principal); + } + + @Override + public void applyAuthorizationConfigPolicy(HiveConf hiveConf) throws HiveAuthzPluginException { + accessController.applyAuthorizationConfigPolicy(hiveConf); + } + + @Override + public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs, + HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException { + return authValidator.filterListCmdObjects(listObjs, context); + } + + @Override + public List<HivePrincipal> getHivePrincipals(List<PrincipalDesc> principals) throws HiveException { + return AuthorizationUtils.getHivePrincipals(principals); + } + + @Override + public List<HivePrivilege> getHivePrivileges(List<PrivilegeDesc> privileges) { + return AuthorizationUtils.getHivePrivileges(privileges); + } + + @Override + public HivePrivilegeObject getHivePrivilegeObject(PrivilegeObjectDesc privSubjectDesc) + throws HiveException { + SentryHivePrivilegeObjectDesc sPrivSubjectDesc = null; + if (privSubjectDesc instanceof SentryHivePrivilegeObjectDesc) { + sPrivSubjectDesc = (SentryHivePrivilegeObjectDesc) privSubjectDesc; + } + if (sPrivSubjectDesc != null && sPrivSubjectDesc.isSentryPrivObjectDesc()) { + HivePrivilegeObjectType objectType = getPrivObjectType(sPrivSubjectDesc); + return new SentryHivePrivilegeObject(objectType, privSubjectDesc.getObject()); + } else { + return AuthorizationUtils.getHivePrivilegeObject(privSubjectDesc); + } + } + + protected static HivePrivilegeObjectType getPrivObjectType( + SentryHivePrivilegeObjectDesc privSubjectDesc) { + if (privSubjectDesc.getObject() == null) { + return null; + } + if (privSubjectDesc.getServer()) { + return HivePrivilegeObjectType.GLOBAL; + } else if (privSubjectDesc.getUri()) { + return HivePrivilegeObjectType.LOCAL_URI; + } else { + return privSubjectDesc.getTable() ? HivePrivilegeObjectType.TABLE_OR_VIEW + : HivePrivilegeObjectType.DATABASE; + } + } + +} http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/AuthorizingObjectStoreV2.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/AuthorizingObjectStoreV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/AuthorizingObjectStoreV2.java new file mode 100644 index 0000000..ff648ff --- /dev/null +++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/AuthorizingObjectStoreV2.java @@ -0,0 +1,413 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sentry.binding.hive.v2.metastore; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.List; +import java.util.Set; + +import javax.security.auth.login.LoginException; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.ObjectStore; +import org.apache.hadoop.hive.metastore.api.ColumnStatistics; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.Index; +import org.apache.hadoop.hive.metastore.api.InvalidObjectException; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.metastore.api.UnknownDBException; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.HiveOperation; +import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.hadoop.hive.shims.Utils; +import org.apache.sentry.binding.hive.HiveAuthzBindingHook; +import org.apache.sentry.binding.hive.authz.HiveAuthzBinding; +import org.apache.sentry.binding.hive.conf.HiveAuthzConf; +import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars; + +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; + +/*** + * This class is the wrapper of ObjectStore which is the interface between the + * application logic and the database store. Do the authorization or filter the + * result when processing the metastore request. + * eg: + * Callers will only receive the objects back which they have privileges to + * access. + * If there is a request for the object list(like getAllTables()), the result + * will be filtered to exclude object the requestor doesn't have privilege to + * access. + */ +public class AuthorizingObjectStoreV2 extends ObjectStore { + private static ImmutableSet<String> serviceUsers; + private static HiveConf hiveConf; + private static HiveAuthzConf authzConf; + private static HiveAuthzBinding hiveAuthzBinding; + private static String NO_ACCESS_MESSAGE_TABLE = "Table does not exist or insufficient privileges to access: "; + private static String NO_ACCESS_MESSAGE_DATABASE = "Database does not exist or insufficient privileges to access: "; + + @Override + public List<String> getDatabases(String pattern) throws MetaException { + return filterDatabases(super.getDatabases(pattern)); + } + + @Override + public List<String> getAllDatabases() throws MetaException { + return filterDatabases(super.getAllDatabases()); + } + + @Override + public Database getDatabase(String name) throws NoSuchObjectException { + Database db = super.getDatabase(name); + try { + if (filterDatabases(Lists.newArrayList(name)).isEmpty()) { + throw new NoSuchObjectException(getNoAccessMessageForDB(name)); + } + } catch (MetaException e) { + throw new NoSuchObjectException("Failed to authorized access to " + name + + " : " + e.getMessage()); + } + return db; + } + + @Override + public Table getTable(String dbName, String tableName) throws MetaException { + Table table = super.getTable(dbName, tableName); + if (table == null + || filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) { + return null; + } + return table; + } + + @Override + public Partition getPartition(String dbName, String tableName, + List<String> part_vals) throws MetaException, NoSuchObjectException { + if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) { + throw new NoSuchObjectException(getNoAccessMessageForTable(dbName, tableName)); + } + return super.getPartition(dbName, tableName, part_vals); + } + + @Override + public List<Partition> getPartitions(String dbName, String tableName, + int maxParts) throws MetaException, NoSuchObjectException { + if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) { + throw new MetaException(getNoAccessMessageForTable(dbName, tableName)); + } + return super.getPartitions(dbName, tableName, maxParts); + } + + @Override + public List<String> getTables(String dbName, String pattern) + throws MetaException { + return filterTables(dbName, super.getTables(dbName, pattern)); + } + + @Override + public List<Table> getTableObjectsByName(String dbname, List<String> tableNames) + throws MetaException, UnknownDBException { + return super.getTableObjectsByName(dbname, filterTables(dbname, tableNames)); + } + + @Override + public List<String> getAllTables(String dbName) throws MetaException { + return filterTables(dbName, super.getAllTables(dbName)); + } + + @Override + public List<String> listTableNamesByFilter(String dbName, String filter, + short maxTables) throws MetaException { + return filterTables(dbName, + super.listTableNamesByFilter(dbName, filter, maxTables)); + } + + @Override + public List<String> listPartitionNames(String dbName, String tableName, + short max_parts) throws MetaException { + if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) { + throw new MetaException(getNoAccessMessageForTable(dbName, tableName)); + } + return super.listPartitionNames(dbName, tableName, max_parts); + } + + @Override + public List<String> listPartitionNamesByFilter(String dbName, + String tableName, String filter, short max_parts) throws MetaException { + if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) { + throw new MetaException(getNoAccessMessageForTable(dbName, tableName)); + } + return super.listPartitionNamesByFilter(dbName, tableName, filter, + max_parts); + } + + @Override + public Index getIndex(String dbName, String origTableName, String indexName) + throws MetaException { + if (filterTables(dbName, Lists.newArrayList(origTableName)).isEmpty()) { + throw new MetaException(getNoAccessMessageForTable(dbName, origTableName)); + } + return super.getIndex(dbName, origTableName, indexName); + } + + @Override + public List<Index> getIndexes(String dbName, String origTableName, int max) + throws MetaException { + if (filterTables(dbName, Lists.newArrayList(origTableName)).isEmpty()) { + throw new MetaException(getNoAccessMessageForTable(dbName, origTableName)); + } + return super.getIndexes(dbName, origTableName, max); + } + + @Override + public List<String> listIndexNames(String dbName, String origTableName, + short max) throws MetaException { + if (filterTables(dbName, Lists.newArrayList(origTableName)).isEmpty()) { + throw new MetaException(getNoAccessMessageForTable(dbName, origTableName)); + } + return super.listIndexNames(dbName, origTableName, max); + } + + @Override + public List<Partition> getPartitionsByFilter(String dbName, + String tblName, String filter, short maxParts) throws MetaException, + NoSuchObjectException { + if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) { + throw new MetaException(getNoAccessMessageForTable(dbName, tblName)); + } + return super.getPartitionsByFilter(dbName, tblName, filter, maxParts); + } + + @Override + public List<Partition> getPartitionsByNames(String dbName, String tblName, + List<String> partNames) throws MetaException, NoSuchObjectException { + if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) { + throw new MetaException(getNoAccessMessageForTable(dbName, tblName)); + } + return super.getPartitionsByNames(dbName, tblName, partNames); + } + + @Override + public Partition getPartitionWithAuth(String dbName, String tblName, + List<String> partVals, String user_name, List<String> group_names) + throws MetaException, NoSuchObjectException, InvalidObjectException { + if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) { + throw new MetaException(getNoAccessMessageForTable(dbName, tblName)); + } + return super.getPartitionWithAuth(dbName, tblName, partVals, user_name, + group_names); + } + + @Override + public List<Partition> getPartitionsWithAuth(String dbName, String tblName, + short maxParts, String userName, List<String> groupNames) + throws MetaException, InvalidObjectException { + if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) { + throw new MetaException(getNoAccessMessageForTable(dbName, tblName)); + } + return super.getPartitionsWithAuth(dbName, tblName, maxParts, userName, + groupNames); + } + + @Override + public List<String> listPartitionNamesPs(String dbName, String tblName, + List<String> part_vals, short max_parts) throws MetaException, + NoSuchObjectException { + if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) { + throw new MetaException(getNoAccessMessageForTable(dbName, tblName)); + } + return super.listPartitionNamesPs(dbName, tblName, part_vals, max_parts); + } + + @Override + public List<Partition> listPartitionsPsWithAuth(String dbName, + String tblName, List<String> part_vals, short max_parts, String userName, + List<String> groupNames) throws MetaException, InvalidObjectException, + NoSuchObjectException { + if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) { + throw new MetaException(getNoAccessMessageForTable(dbName, tblName)); + } + return super.listPartitionsPsWithAuth(dbName, tblName, part_vals, + max_parts, userName, groupNames); + } + + @Override + public ColumnStatistics getTableColumnStatistics(String dbName, + String tableName, List<String> colNames) throws MetaException, + NoSuchObjectException { + if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) { + throw new MetaException(getNoAccessMessageForTable(dbName, tableName)); + } + return super.getTableColumnStatistics(dbName, tableName, colNames); + } + + @Override + public List<ColumnStatistics> getPartitionColumnStatistics( + String dbName, String tblName, List<String> partNames, + List<String> colNames) throws MetaException, NoSuchObjectException { + if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) { + throw new MetaException(getNoAccessMessageForTable(dbName, tblName)); + } + return super.getPartitionColumnStatistics(dbName, tblName, partNames, + colNames); + } + + /** + * Invoke Hive database filtering that removes the entries which use has no + * privileges to access + * @param dbList + * @return + * @throws MetaException + */ + private List<String> filterDatabases(List<String> dbList) + throws MetaException { + if (needsAuthorization(getUserName())) { + try { + return HiveAuthzBindingHook.filterShowDatabases(getHiveAuthzBinding(), + dbList, HiveOperation.SHOWDATABASES, getUserName()); + } catch (SemanticException e) { + throw new MetaException("Error getting DB list " + e.getMessage()); + } + } else { + return dbList; + } + } + + /** + * Invoke Hive table filtering that removes the entries which use has no + * privileges to access + * @param dbList + * @return + * @throws MetaException + */ + protected List<String> filterTables(String dbName, List<String> tabList) + throws MetaException { + if (needsAuthorization(getUserName())) { + try { + return HiveAuthzBindingHook.filterShowTables(getHiveAuthzBinding(), + tabList, HiveOperation.SHOWTABLES, getUserName(), dbName); + } catch (SemanticException e) { + throw new MetaException("Error getting Table list " + e.getMessage()); + } + } else { + return tabList; + } + } + + /** + * load Hive auth provider + * + * @return + * @throws MetaException + */ + private HiveAuthzBinding getHiveAuthzBinding() throws MetaException { + if (hiveAuthzBinding == null) { + try { + hiveAuthzBinding = new HiveAuthzBinding(HiveAuthzBinding.HiveHook.HiveMetaStore, + getHiveConf(), getAuthzConf()); + } catch (Exception e) { + throw new MetaException("Failed to load Hive binding " + e.getMessage()); + } + } + return hiveAuthzBinding; + } + + private ImmutableSet<String> getServiceUsers() throws MetaException { + if (serviceUsers == null) { + serviceUsers = ImmutableSet.copyOf(toTrimed(Sets.newHashSet(getAuthzConf().getStrings( + AuthzConfVars.AUTHZ_METASTORE_SERVICE_USERS.getVar(), new String[] { "" })))); + } + return serviceUsers; + } + + private HiveConf getHiveConf() { + if (hiveConf == null) { + hiveConf = new HiveConf(getConf(), this.getClass()); + } + return hiveConf; + } + + private HiveAuthzConf getAuthzConf() throws MetaException { + if (authzConf == null) { + String hiveAuthzConf = getConf().get(HiveAuthzConf.HIVE_SENTRY_CONF_URL); + if (hiveAuthzConf == null + || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) { + throw new MetaException("Configuration key " + + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf + + "' is invalid."); + } + try { + authzConf = new HiveAuthzConf(new URL(hiveAuthzConf)); + } catch (MalformedURLException e) { + throw new MetaException("Configuration key " + + HiveAuthzConf.HIVE_SENTRY_CONF_URL + + " specifies a malformed URL '" + hiveAuthzConf + "' " + + e.getMessage()); + } + } + return authzConf; + } + + /** + * Extract the user from underlying auth subsystem + * @return + * @throws MetaException + */ + private String getUserName() throws MetaException { + try { + return Utils.getUGI().getShortUserName(); + } catch (LoginException e) { + throw new MetaException("Failed to get username " + e.getMessage()); + } catch (IOException e) { + throw new MetaException("Failed to get username " + e.getMessage()); + } + } + + /** + * Check if the give user needs to be validated. + * @param userName + * @return + */ + private boolean needsAuthorization(String userName) throws MetaException { + return !getServiceUsers().contains(userName.trim()); + } + + private static Set<String> toTrimed(Set<String> s) { + Set<String> result = Sets.newHashSet(); + for (String v : s) { + result.add(v.trim()); + } + return result; + } + + protected String getNoAccessMessageForTable(String dbName, String tableName) { + return NO_ACCESS_MESSAGE_TABLE + "<" + dbName + ">.<" + tableName + ">"; + } + + private String getNoAccessMessageForDB(String dbName) { + return NO_ACCESS_MESSAGE_DATABASE + "<" + dbName + ">"; + } +} http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/MetastoreAuthzBindingV2.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/MetastoreAuthzBindingV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/MetastoreAuthzBindingV2.java new file mode 100644 index 0000000..d937491 --- /dev/null +++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/MetastoreAuthzBindingV2.java @@ -0,0 +1,54 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.sentry.binding.hive.v2.metastore; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.api.InvalidOperationException; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.events.PreDropPartitionEvent; +import org.apache.hadoop.hive.ql.plan.HiveOperation; +import org.apache.sentry.binding.metastore.MetastoreAuthzBinding; + +/** + * Sentry binding for Hive Metastore. The binding is integrated into Metastore + * via the pre-event listener which are fired prior to executing the metadata + * action. This point we are only authorizing metadata writes since the listners + * are not fired from read events. Each action builds a input and output + * hierarchy as per the objects used in the given operations. This is then + * passed down to the hive binding which handles the authorization. This ensures + * that we follow the same privilege model and policies. + */ +public class MetastoreAuthzBindingV2 extends MetastoreAuthzBinding { + + public MetastoreAuthzBindingV2(Configuration config) throws Exception { + super(config); + } + + protected void authorizeDropPartition(PreDropPartitionEvent context) + throws InvalidOperationException, MetaException { + authorizeMetastoreAccess( + HiveOperation.ALTERTABLE_DROPPARTS, + new HierarcyBuilder().addTableToOutput(getAuthServer(), + context.getTable().getDbName(), + context.getTable().getTableName()).build(), + new HierarcyBuilder().addTableToOutput(getAuthServer(), + context.getTable().getDbName(), + context.getTable().getTableName()).build()); + } + +} http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetastorePostEventListenerV2.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetastorePostEventListenerV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetastorePostEventListenerV2.java new file mode 100644 index 0000000..a72e745 --- /dev/null +++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetastorePostEventListenerV2.java @@ -0,0 +1,73 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.sentry.binding.hive.v2.metastore; + +import java.util.Iterator; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.events.AddPartitionEvent; +import org.apache.hadoop.hive.metastore.events.DropPartitionEvent; +import org.apache.sentry.binding.metastore.SentryMetastorePostEventListener; +import org.apache.sentry.provider.db.SentryMetastoreListenerPlugin; + +public class SentryMetastorePostEventListenerV2 extends SentryMetastorePostEventListener { + + public SentryMetastorePostEventListenerV2(Configuration config) { + super(config); + } + + @Override + public void onAddPartition(AddPartitionEvent partitionEvent) + throws MetaException { + if (partitionEvent != null && partitionEvent.getPartitionIterator() != null) { + Iterator<Partition> it = partitionEvent.getPartitionIterator(); + while (it.hasNext()) { + Partition part = it.next(); + if ((part.getSd() != null) && (part.getSd().getLocation() != null)) { + String authzObj = part.getDbName() + "." + part.getTableName(); + String path = part.getSd().getLocation(); + for (SentryMetastoreListenerPlugin plugin : sentryPlugins) { + plugin.addPath(authzObj, path); + } + } + } + } + } + + @Override + public void onDropPartition(DropPartitionEvent partitionEvent) + throws MetaException { + if (partitionEvent != null && partitionEvent.getPartitionIterator() != null) { + String authzObj = partitionEvent.getTable().getDbName() + "." + + partitionEvent.getTable().getTableName(); + Iterator<Partition> it = partitionEvent.getPartitionIterator(); + while (it.hasNext()) { + Partition part = it.next(); + if ((part.getSd() != null) && (part.getSd().getLocation() != null)) { + String path = part.getSd().getLocation(); + for (SentryMetastoreListenerPlugin plugin : sentryPlugins) { + plugin.removePath(authzObj, path); + } + } + } + } + } + +} http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SentryAuthorizerUtil.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SentryAuthorizerUtil.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SentryAuthorizerUtil.java new file mode 100644 index 0000000..35bd68c --- /dev/null +++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SentryAuthorizerUtil.java @@ -0,0 +1,362 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.sentry.binding.hive.v2.util; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.common.JavaUtils; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.ql.hooks.Hook; +import org.apache.hadoop.hive.ql.metadata.AuthorizationException; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.HiveOperation; +import org.apache.hadoop.hive.ql.security.authorization.PrivilegeType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.sentry.binding.hive.SentryOnFailureHook; +import org.apache.sentry.binding.hive.SentryOnFailureHookContext; +import org.apache.sentry.binding.hive.conf.HiveAuthzConf; +import org.apache.sentry.core.common.utils.PathUtils; +import org.apache.sentry.core.model.db.AccessConstants; +import org.apache.sentry.core.model.db.AccessURI; +import org.apache.sentry.core.model.db.Column; +import org.apache.sentry.core.model.db.DBModelAuthorizable; +import org.apache.sentry.core.model.db.Database; +import org.apache.sentry.core.model.db.Server; +import org.apache.sentry.core.model.db.Table; +import org.apache.sentry.provider.db.service.thrift.TSentryGrantOption; +import org.apache.sentry.provider.db.service.thrift.TSentryPrivilege; +import org.apache.sentry.provider.db.service.thrift.TSentryRole; +import org.apache.sentry.service.thrift.ServiceConstants.PrivilegeScope; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Splitter; + +public class SentryAuthorizerUtil { + public static final Logger LOG = LoggerFactory.getLogger(SentryAuthorizerUtil.class); + public static String UNKONWN_GRANTOR = "--"; + + /** + * Convert string to URI + * + * @param uri + * @param isLocal + * @throws SemanticException + * @throws URISyntaxException + */ + public static AccessURI parseURI(String uri, boolean isLocal) throws URISyntaxException { + HiveConf conf = SessionState.get().getConf(); + String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE); + return new AccessURI(PathUtils.parseURI(warehouseDir, uri, isLocal)); + } + + /** + * Convert HivePrivilegeObject to DBModelAuthorizable list Now hive 0.13 don't support column + * level + * + * @param server + * @param privilege + */ + public static List<List<DBModelAuthorizable>> getAuthzHierarchy(Server server, + HivePrivilegeObject privilege) { + List<DBModelAuthorizable> baseHierarchy = new ArrayList<DBModelAuthorizable>(); + List<List<DBModelAuthorizable>> objectHierarchy = new ArrayList<List<DBModelAuthorizable>>(); + boolean isLocal = false; + if (privilege.getType() != null) { + switch (privilege.getType()) { + case GLOBAL: + baseHierarchy.add(new Server(privilege.getObjectName())); + objectHierarchy.add(baseHierarchy); + break; + case DATABASE: + baseHierarchy.add(server); + baseHierarchy.add(new Database(privilege.getDbname())); + objectHierarchy.add(baseHierarchy); + break; + case TABLE_OR_VIEW: + baseHierarchy.add(server); + baseHierarchy.add(new Database(privilege.getDbname())); + baseHierarchy.add(new Table(privilege.getObjectName())); + if (privilege.getColumns() != null) { + for (String columnName : privilege.getColumns()) { + List<DBModelAuthorizable> columnHierarchy = + new ArrayList<DBModelAuthorizable>(baseHierarchy); + columnHierarchy.add(new Column(columnName)); + objectHierarchy.add(columnHierarchy); + } + } else { + objectHierarchy.add(baseHierarchy); + } + break; + case LOCAL_URI: + isLocal = true; + case DFS_URI: + if (privilege.getObjectName() == null) { + break; + } + try { + baseHierarchy.add(server); + baseHierarchy.add(parseURI(privilege.getObjectName(), isLocal)); + objectHierarchy.add(baseHierarchy); + } catch (Exception e) { + throw new AuthorizationException("Failed to get File URI", e); + } + break; + case FUNCTION: + case PARTITION: + case COLUMN: + case COMMAND_PARAMS: + // not support these type + break; + default: + break; + } + } + return objectHierarchy; + } + + /** + * Convert HivePrivilegeObject list to List<List<DBModelAuthorizable>> + * + * @param server + * @param privilges + */ + public static List<List<DBModelAuthorizable>> convert2SentryPrivilegeList(Server server, + List<HivePrivilegeObject> privilges) { + List<List<DBModelAuthorizable>> hierarchyList = new ArrayList<List<DBModelAuthorizable>>(); + if (privilges != null && !privilges.isEmpty()) { + for (HivePrivilegeObject p : privilges) { + hierarchyList.addAll(getAuthzHierarchy(server, p)); + } + } + return hierarchyList; + } + + /** + * Convert HiveOperationType to HiveOperation + * + * @param type + */ + public static HiveOperation convert2HiveOperation(String typeName) { + try { + return HiveOperation.valueOf(typeName); + } catch (Exception e) { + return null; + } + } + + /** + * Convert HivePrivilege to Sentry Action + * + * @param hivePrivilege + */ + public static String convert2SentryAction(HivePrivilege hivePrivilege) { + if (PrivilegeType.ALL.name().equals(hivePrivilege.getName())) { + return AccessConstants.ALL; + } else { + return hivePrivilege.getName(); + } + } + + /** + * Convert Sentry Action to HivePrivilege + * + * @param hivePrivilege + */ + public static HivePrivilege convert2HivePrivilege(String action) { + return new HivePrivilege(action, null); + } + + /** + * Convert TSentryRole Set to String List + * + * @param roleSet + */ + public static List<String> convert2RoleList(Set<TSentryRole> roleSet) { + List<String> roles = new ArrayList<String>(); + if (roleSet != null && !roleSet.isEmpty()) { + for (TSentryRole tRole : roleSet) { + roles.add(tRole.getRoleName()); + } + } + return roles; + } + + /** + * Convert TSentryPrivilege to HivePrivilegeInfo + * + * @param tPrivilege + * @param principal + */ + public static HivePrivilegeInfo convert2HivePrivilegeInfo(TSentryPrivilege tPrivilege, + HivePrincipal principal) { + HivePrivilege hivePrivilege = convert2HivePrivilege(tPrivilege.getAction()); + HivePrivilegeObject hivePrivilegeObject = convert2HivePrivilegeObject(tPrivilege); + // now sentry don't show grantor of a privilege + HivePrincipal grantor = new HivePrincipal(UNKONWN_GRANTOR, HivePrincipalType.ROLE); + boolean grantOption = + tPrivilege.getGrantOption().equals(TSentryGrantOption.TRUE) ? true : false; + return new HivePrivilegeInfo(principal, hivePrivilege, hivePrivilegeObject, grantor, + grantOption, (int) tPrivilege.getCreateTime()); + } + + /** + * Convert TSentryPrivilege to HivePrivilegeObject + * + * @param tSentryPrivilege + */ + public static HivePrivilegeObject convert2HivePrivilegeObject(TSentryPrivilege tSentryPrivilege) { + HivePrivilegeObject privilege = null; + switch (PrivilegeScope.valueOf(tSentryPrivilege.getPrivilegeScope())) { + case SERVER: + privilege = new HivePrivilegeObject(HivePrivilegeObjectType.GLOBAL, "*", null); + break; + case DATABASE: + privilege = + new HivePrivilegeObject(HivePrivilegeObjectType.DATABASE, tSentryPrivilege.getDbName(), + null); + break; + case TABLE: + privilege = + new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, + tSentryPrivilege.getDbName(), tSentryPrivilege.getTableName()); + break; + case COLUMN: + privilege = + new HivePrivilegeObject(HivePrivilegeObjectType.COLUMN, tSentryPrivilege.getDbName(), + tSentryPrivilege.getTableName(), null, tSentryPrivilege.getColumnName()); + break; + case URI: + String uriString = tSentryPrivilege.getURI(); + try { + uriString = uriString.replace("'", "").replace("\"", ""); + HivePrivilegeObjectType type = + isLocalUri(uriString) ? HivePrivilegeObjectType.LOCAL_URI + : HivePrivilegeObjectType.DFS_URI; + privilege = new HivePrivilegeObject(type, uriString, null); + } catch (URISyntaxException e1) { + throw new RuntimeException(uriString + "is not a URI"); + } + default: + LOG.warn("Unknown PrivilegeScope: " + + PrivilegeScope.valueOf(tSentryPrivilege.getPrivilegeScope())); + break; + } + return privilege; + } + + public static boolean isLocalUri(String uriString) throws URISyntaxException { + URI uri = new URI(uriString); + if (uri.getScheme().equalsIgnoreCase("file")) { + return true; + } + + return false; + } + + /** + * Convert TSentryRole to HiveRoleGrant + * + * @param role + */ + public static HiveRoleGrant convert2HiveRoleGrant(TSentryRole role) { + HiveRoleGrant hiveRoleGrant = new HiveRoleGrant(); + hiveRoleGrant.setRoleName(role.getRoleName()); + hiveRoleGrant.setPrincipalName(role.getRoleName()); + hiveRoleGrant.setPrincipalType(PrincipalType.ROLE.name()); + hiveRoleGrant.setGrantOption(false); + hiveRoleGrant.setGrantor(role.getGrantorPrincipal()); + hiveRoleGrant.setGrantorType(PrincipalType.USER.name()); + return hiveRoleGrant; + } + + /** + * Execute on failure hooks for e2e tests + * + * @param context + * @param conf + * @param hiveOp + */ + public static void executeOnFailureHooks(SentryOnFailureHookContext hookCtx, Configuration conf) { + String csHooks = + conf.get(HiveAuthzConf.AuthzConfVars.AUTHZ_ONFAILURE_HOOKS.getVar(), "").trim(); + + try { + for (Hook aofh : SentryAuthorizerUtil.getHooks(csHooks)) { + ((SentryOnFailureHook) aofh).run(hookCtx); + } + } catch (Exception ex) { + LOG.error("Error executing hook:", ex); + } + } + + /** + * Returns a set of hooks specified in a configuration variable. + * + * See getHooks(HiveAuthzConf.AuthzConfVars hookConfVar, Class<T> clazz) + * + * @param hookConfVar + * @return + * @throws Exception + */ + public static List<Hook> getHooks(String csHooks) throws Exception { + return getHooks(csHooks, Hook.class); + } + + /** + * Returns the hooks specified in a configuration variable. The hooks are returned in a list in + * the order they were specified in the configuration variable. + * + * @param hookConfVar The configuration variable specifying a comma separated list of the hook + * class names. + * @param clazz The super type of the hooks. + * @return A list of the hooks cast as the type specified in clazz, in the order they are listed + * in the value of hookConfVar + * @throws Exception + */ + public static <T extends Hook> List<T> getHooks(String csHooks, Class<T> clazz) throws Exception { + + List<T> hooks = new ArrayList<T>(); + if (csHooks.isEmpty()) { + return hooks; + } + for (String hookClass : Splitter.on(",").omitEmptyStrings().trimResults().split(csHooks)) { + try { + @SuppressWarnings("unchecked") + T hook = (T) Class.forName(hookClass, true, JavaUtils.getClassLoader()).newInstance(); + hooks.add(hook); + } catch (ClassNotFoundException e) { + LOG.error(hookClass + " Class not found:" + e.getMessage()); + throw e; + } + } + + return hooks; + } +} http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SimpleSemanticAnalyzer.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SimpleSemanticAnalyzer.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SimpleSemanticAnalyzer.java new file mode 100644 index 0000000..b50bbf4 --- /dev/null +++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SimpleSemanticAnalyzer.java @@ -0,0 +1,369 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.sentry.binding.hive.v2.util; + +import java.util.HashMap; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.apache.hadoop.hive.ql.plan.HiveOperation; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.sentry.core.model.db.Table; + +/** + * Currently hive complier doesn't create read/write entities for some operations, e.g. create + * table, drop table. This class is a simple semantic analyzer using regex, it is a workaround + * approach to extract db_name and tb_name from those operations. + */ +public class SimpleSemanticAnalyzer { + private String currentDb; + private String currentTb; + + /** + * CREATE [TEMPORARY] [EXTERNAL] TABLE [IF NOT EXISTS] [db_name.]table_name ... + */ + private static final String CREATE_TABLE_REGEX = "^(CREATE)\\s+" + "(TEMPORARY\\s+)?" + + "(EXTERNAL\\s+)?" + "TABLE\\s+" + "(IF\\s+NOT\\s+EXISTS\\s+)?" + "([A-Za-z0-9._]+)"; + + /** + * DROP (DATABASE|SCHEMA) [IF EXISTS] database_name [RESTRICT|CASCADE]; + */ + private static final String DROP_DB_REGEX = "^DROP\\s+" + "(DATABASE|SCHEMA)\\s+" + + "(IF\\s+EXISTS\\s+)?" + "([A-Za-z0-9_]+)"; + + /** + * DROP TABLE [IF EXISTS] table_name; + */ + private static final String DROP_TABLE_REGEX = "^DROP\\s+" + "TABLE\\s+" + "(IF\\s+EXISTS\\s+)?" + + "([A-Za-z0-9._]+)"; + + /** + * DROP VIEW [IF EXISTS] view_name; + */ + private static final String DROP_VIEW_REGEX = "^DROP\\s+" + "VIEW\\s+" + "(IF\\s+EXISTS\\s+)?" + + "([A-Za-z0-9_].+)"; + + /** + * DESCRIBE DATABASE|SCHEMA [EXTENDED] db_name; + */ + private static final String DESCRIBE_DB_REGEX = "^DESCRIBE\\s+" + "(DATABASE|SCHEMA)\\s+" + + "(EXTENDED\\s+)?" + "([A-Za-z0-9_]+)"; + + /** + * DESCRIBE [EXTENDED|FORMATTED] [db_name.]table_name[.col_name ( [.field_name] | [.'$elem$'] | + * [.'$key$'] | [.'$value$'] )* ]; + */ + private static final String DESCRIBE_TABLE_REGEX = "^DESCRIBE\\s+" + + "((EXTENDED|FORMATTED)\\s+)?" + "([A-Za-z0-9._]+)"; + + /** + * SHOW [FORMATTED] (INDEX|INDEXES) ON table_with_index [(FROM|IN) db_name]; + */ + private static final String SHOW_INDEX_REGEX = "^SHOW\\s+" + "(FORMATTED\\s+)?" + + "(INDEX|INDEXES)\\s+" + "ON\\s+" + "([A-Za-z0-9._]+)\\s*" + + "((FROM|IN)\\s+([A-Za-z0-9_]+))?"; + + /** + * SHOW TBLPROPERTIES tblname; + */ + private static final String SHOW_TBLPROPERTIES_REGEX = "^SHOW\\s+" + "TBLPROPERTIES\\s+" + + "([A-Za-z0-9._]+)"; + + /** + * ALTER TABLE table_name ... + */ + private static final String ALTER_TABLE_REGEX = "^ALTER\\s+" + "TABLE\\s+" + "([A-Za-z0-9._]+)"; + + /** + * ALTER VIEW view_name ... + */ + private static final String ALTER_VIEW_REGEX = "^ALTER\\s+" + "VIEW\\s+" + "([A-Za-z0-9._]+)"; + + /** + * MSCK REPAIR TABLE table_name; + */ + private static final String MSCK_REGEX = "^MSCK\\s+" + "REPAIR\\s" + "TABLE\\s" + + "([A-Za-z0-9._]+)"; + + /** + * ALTER INDEX index_name ON table_name [PARTITION partition_spec] REBUILD; + */ + private static final String ALTER_INDEX_REGEX = "^ALTER\\s+" + "INDEX\\s+" + + "([A-Za-z0-9_]+)\\s+" + "ON\\s" + "([A-Za-z0-9._]+)"; + + /** + * CREATE FUNCTION [db_name.]function_name AS class_name [USING JAR|FILE|ARCHIVE 'file_uri' [, + * JAR|FILE|ARCHIVE 'file_uri'] ]; + */ + private static final String CREATE_FUNCTION_REGEX = "^CREATE\\s+" + "(TEMPORARY\\s+)?" + + "FUNCTION\\s+" + "([A-Za-z0-9._]+)\\s+" + "AS\\s" + "([A-Za-z0-9._']+)"; + + /** + * SHOW COLUMNS FROM table_name + */ + private static final String SHOWCOLUMNS = "^SHOW\\s+" + "COLUMNS\\s+" + "(FROM|IN)\\s+" + + "([A-Za-z0-9._]+)"; + + private static final String SHOW_TABLESTATUS = "^SHOW\\s+" + "TABLE\\s+" + "EXTENDED\\s+" + "IN\\s+" + + "([A-Za-z0-9._]+)"; + + private static final String LOAD = "^LOAD\\s+" + "DATA\\s+" + "(LOCAL\\s+)?" + "INPATH\\s+" + + "([A-Za-z0-9._':///-]+)" +"\\s" + "INTO\\s" + "TABLE\\s" + "([A-Za-z0-9._]+)"; + + /** + * LOCK DATABASE dbname; + */ + private static final String LOCKDB = "^LOCK\\s+" + "DATABASE\\s+" + "([A-Za-z0-9._]+)"; + + /** + * UNLOCK DATABASE dbname; + */ + private static final String UNLOCKDB = "^UNLOCK\\s+" + "DATABASE\\s+" + "([A-Za-z0-9._]+)"; + + /** + * LOCK TABLE tblname; + */ + private static final String LOCKTABLE = "^LOCK\\s+" + "TABLE\\s+" + "([A-Za-z0-9._]+)"; + + /** + * UNLOCK TABLE tblname; + */ + private static final String UNLOCKTABLE = "^UNLOCK\\s+" + "TABLE\\s+" + "([A-Za-z0-9._]+)"; + + private static Map<HiveOperation, String> OP_REGEX_MAP = new HashMap<HiveOperation, String>(); + static { + // database metadata + OP_REGEX_MAP.put(HiveOperation.DROPDATABASE, DROP_DB_REGEX); + OP_REGEX_MAP.put(HiveOperation.DESCDATABASE, DESCRIBE_DB_REGEX); + + // table metadata + OP_REGEX_MAP.put(HiveOperation.CREATETABLE, CREATE_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.DROPTABLE, DROP_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.DROPVIEW, DROP_VIEW_REGEX); + OP_REGEX_MAP.put(HiveOperation.DESCTABLE, DESCRIBE_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.SHOW_TBLPROPERTIES, SHOW_TBLPROPERTIES_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_PROPERTIES, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_SERDEPROPERTIES, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_CLUSTER_SORT, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_FILEFORMAT, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_TOUCH, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_PROTECTMODE, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_RENAMECOL, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_ADDCOLS, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_REPLACECOLS, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_RENAMEPART, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_ARCHIVE, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_UNARCHIVE, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_SERIALIZER, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_MERGEFILES, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_SKEWED, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_DROPPARTS, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_ADDPARTS, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_RENAME, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_LOCATION, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_FILEFORMAT, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_PROTECTMODE, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_SERDEPROPERTIES, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_SERIALIZER, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_MERGEFILES, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_LOCATION, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERTBLPART_SKEWED_LOCATION, ALTER_TABLE_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERVIEW_PROPERTIES, ALTER_VIEW_REGEX); + OP_REGEX_MAP.put(HiveOperation.MSCK, MSCK_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERINDEX_REBUILD, ALTER_INDEX_REGEX); + OP_REGEX_MAP.put(HiveOperation.ALTERINDEX_PROPS, ALTER_INDEX_REGEX); + OP_REGEX_MAP.put(HiveOperation.LOCKDB, LOCKDB); + OP_REGEX_MAP.put(HiveOperation.UNLOCKDB, UNLOCKDB); + OP_REGEX_MAP.put(HiveOperation.LOCKTABLE, LOCKTABLE); + OP_REGEX_MAP.put(HiveOperation.UNLOCKTABLE, UNLOCKTABLE); + OP_REGEX_MAP.put(HiveOperation.SHOWCOLUMNS, SHOWCOLUMNS); + OP_REGEX_MAP.put(HiveOperation.SHOW_TABLESTATUS, SHOW_TABLESTATUS); + } + + public SimpleSemanticAnalyzer(HiveOperation hiveOp, String cmd) throws HiveAuthzPluginException { + currentDb = SessionState.get().getCurrentDatabase(); + parse(hiveOp, cmd); + } + + private void parse(HiveOperation hiveOp, String cmd) throws HiveAuthzPluginException { + switch (hiveOp) { + case DROPDATABASE: + case DESCDATABASE: + case LOCKDB: + case UNLOCKDB: + parseDbMeta(cmd, OP_REGEX_MAP.get(hiveOp)); + break; + case DESCTABLE: + case CREATETABLE: + case DROPTABLE: + case DROPVIEW: + case SHOW_TBLPROPERTIES: + // alter table + case ALTERTABLE_PROPERTIES: + case ALTERTABLE_SERDEPROPERTIES: + case ALTERTABLE_CLUSTER_SORT: + case ALTERTABLE_FILEFORMAT: + case ALTERTABLE_TOUCH: + case ALTERTABLE_PROTECTMODE: + case ALTERTABLE_RENAMECOL: + case ALTERTABLE_ADDCOLS: + case ALTERTABLE_REPLACECOLS: + case ALTERTABLE_RENAMEPART: + case ALTERTABLE_ARCHIVE: + case ALTERTABLE_UNARCHIVE: + case ALTERTABLE_SERIALIZER: + case ALTERTABLE_MERGEFILES: + case ALTERTABLE_SKEWED: + case ALTERTABLE_DROPPARTS: + case ALTERTABLE_ADDPARTS: + case ALTERTABLE_RENAME: + case ALTERTABLE_LOCATION: + // alter view + case ALTERVIEW_PROPERTIES: + // alter partition + case ALTERPARTITION_FILEFORMAT: + case ALTERPARTITION_PROTECTMODE: + case ALTERPARTITION_SERDEPROPERTIES: + case ALTERPARTITION_SERIALIZER: + case ALTERPARTITION_MERGEFILES: + case ALTERPARTITION_LOCATION: + case ALTERTBLPART_SKEWED_LOCATION: + // MSCK + case MSCK: + // alter index + case ALTERINDEX_REBUILD: + case ALTERINDEX_PROPS: + case LOCKTABLE: + case UNLOCKTABLE: + case SHOWCOLUMNS: + parseTableMeta(cmd, OP_REGEX_MAP.get(hiveOp)); + break; + case SHOWINDEXES: + parseShowIndex(cmd, SHOW_INDEX_REGEX); + break; + case CREATEFUNCTION: + parseFunction(cmd, CREATE_FUNCTION_REGEX); + break; + case SHOW_TABLESTATUS: + parseTableExtend(cmd, SHOW_TABLESTATUS); + break; + case LOAD: + parseLoadTable(cmd, LOAD); + break; + default: + break; + } + } + + private void parseLoadTable(String cmd, String load) throws HiveAuthzPluginException { + Pattern pattern = Pattern.compile(load, Pattern.CASE_INSENSITIVE); + Matcher matcher = pattern.matcher(cmd); + if (matcher.find()) { + String tbName = matcher.group(matcher.groupCount()); + extractDbAndTb(tbName.trim()); + } else { + throw new HiveAuthzPluginException("this command " + cmd + " is not match table meta grammar"); + } + } + + private void parseTableExtend(String cmd, String showTablestatus) throws HiveAuthzPluginException { + Pattern pattern = Pattern.compile(showTablestatus, Pattern.CASE_INSENSITIVE); + Matcher matcher = pattern.matcher(cmd); + if (matcher.find()) { + String dbName = matcher.group(matcher.groupCount()); + currentDb = dbName; + currentTb = Table.SOME.getName(); + } else { + throw new HiveAuthzPluginException("this command " + cmd + " is not match table meta grammar"); + } + } + + private void extractDbAndTb(String tableName) { + if (tableName.contains(".")) { + String[] tb = tableName.split("\\."); + currentDb = tb[0]; + currentTb = tb[1]; + } else { + currentDb = SessionState.get().getCurrentDatabase(); + currentTb = tableName; + } + } + + private void parseDbMeta(String cmd, String regex) throws HiveAuthzPluginException { + Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE); + Matcher matcher = pattern.matcher(cmd); + if (matcher.find()) { + currentDb = matcher.group(matcher.groupCount()); + } else { + throw new HiveAuthzPluginException("this command " + cmd + + " is not match database meta grammar"); + } + } + + private void parseTableMeta(String cmd, String regex) throws HiveAuthzPluginException { + Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE); + Matcher matcher = pattern.matcher(cmd); + if (matcher.find()) { + String tbName = matcher.group(matcher.groupCount()); + extractDbAndTb(tbName.trim()); + } else { + throw new HiveAuthzPluginException("this command " + cmd + " is not match table meta grammar"); + } + } + + private void parseShowIndex(String cmd, String regex) throws HiveAuthzPluginException { + Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE); + Matcher matcher = pattern.matcher(cmd); + if (matcher.find()) { + String dbName = matcher.group(matcher.groupCount()); + String tbName = matcher.group(3); + if (dbName != null) { + currentDb = dbName; + currentTb = tbName; + } else { + extractDbAndTb(tbName); + } + } else { + throw new HiveAuthzPluginException("this command " + cmd + " is not match show index grammar"); + } + } + + private void parseFunction(String cmd, String regex) throws HiveAuthzPluginException { + Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE); + Matcher matcher = pattern.matcher(cmd); + if (matcher.find()) { + String udfClass = matcher.group(matcher.groupCount()); + if (udfClass.contains("'")) { + currentTb = udfClass.split("'")[1]; + } else { + currentTb = udfClass; + } + } else { + throw new HiveAuthzPluginException("this command " + cmd + + " is not match create function grammar"); + } + } + + public String getCurrentDb() { + return currentDb; + } + + public String getCurrentTb() { + return currentTb; + } + +} http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive-v2/src/test/java/org/apache/sentry/binding/hive/v2/DummyHiveAuthenticationProvider.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive-v2/src/test/java/org/apache/sentry/binding/hive/v2/DummyHiveAuthenticationProvider.java b/sentry-binding/sentry-binding-hive-v2/src/test/java/org/apache/sentry/binding/hive/v2/DummyHiveAuthenticationProvider.java new file mode 100644 index 0000000..9335c37 --- /dev/null +++ b/sentry-binding/sentry-binding-hive-v2/src/test/java/org/apache/sentry/binding/hive/v2/DummyHiveAuthenticationProvider.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.sentry.binding.hive.v2; + +import java.util.List; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; +import org.apache.hadoop.hive.ql.session.SessionState; + +public class DummyHiveAuthenticationProvider implements HiveAuthenticationProvider { + + private String userName; + private Configuration conf; + + @Override + public void setConf(Configuration conf) { + this.conf = conf; + } + + @Override + public Configuration getConf() { + return conf; + } + + @Override + public String getUserName() { + return userName; + } + + @Override + public List<String> getGroupNames() { + return null; + } + + @Override + public void destroy() throws HiveException { + + } + + @Override + public void setSessionState(SessionState ss) { + + } + + public void setUserName(String user) { + this.userName = user; + } + +} http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java index 18cdde2..8929357 100644 --- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java +++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java @@ -47,4 +47,8 @@ public class SentryHivePrivilegeObjectDesc extends PrivilegeObjectDesc { this.isServer = isServer; } + public boolean isSentryPrivObjectDesc() { + return isServer || isUri; + } + } http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java index d9bb42d..2e0f299 100644 --- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java +++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java @@ -379,8 +379,9 @@ public class SentryConfigTool { // verify senty session hook is set private boolean isSentryEnabledOnHiveServer(Statement stmt) throws SQLException { - return HiveAuthzBindingSessionHook.class.getName().equalsIgnoreCase( - readConfig(stmt, HiveConf.ConfVars.HIVE_SERVER2_SESSION_HOOK.varname)); + String bindingString = readConfig(stmt, HiveConf.ConfVars.HIVE_SERVER2_SESSION_HOOK.varname).toUpperCase(); + return bindingString.contains("org.apache.sentry.binding.hive".toUpperCase()) + && bindingString.contains("HiveAuthzBindingSessionHook".toUpperCase()); } // read a config value using 'set' statement http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java index 5a0c950..9938373 100644 --- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java +++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java @@ -303,7 +303,7 @@ public class AuthorizingObjectStore extends ObjectStore { * @return * @throws MetaException */ - private List<String> filterTables(String dbName, List<String> tabList) + protected List<String> filterTables(String dbName, List<String> tabList) throws MetaException { if (needsAuthorization(getUserName())) { try { @@ -403,7 +403,7 @@ public class AuthorizingObjectStore extends ObjectStore { return result; } - private String getNoAccessMessageForTable(String dbName, String tableName) { + protected String getNoAccessMessageForTable(String dbName, String tableName) { return NO_ACCESS_MESSAGE_TABLE + "<" + dbName + ">.<" + tableName + ">"; } http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBinding.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBinding.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBinding.java index 5375f6a..f6b9c7a 100644 --- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBinding.java +++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBinding.java @@ -79,7 +79,7 @@ public class MetastoreAuthzBinding extends MetaStorePreEventListener { /** * Build the set of object hierarchies ie fully qualified db model objects */ - private static class HierarcyBuilder { + protected static class HierarcyBuilder { private List<List<DBModelAuthorizable>> authHierarchy; public HierarcyBuilder() { @@ -337,7 +337,7 @@ public class MetastoreAuthzBinding extends MetaStorePreEventListener { } } - private void authorizeDropPartition(PreDropPartitionEvent context) + protected void authorizeDropPartition(PreDropPartitionEvent context) throws InvalidOperationException, MetaException { authorizeMetastoreAccess( HiveOperation.ALTERTABLE_DROPPARTS, @@ -392,7 +392,7 @@ public class MetastoreAuthzBinding extends MetaStorePreEventListener { * @param outputHierarchy * @throws InvalidOperationException */ - private void authorizeMetastoreAccess(HiveOperation hiveOp, + protected void authorizeMetastoreAccess(HiveOperation hiveOp, List<List<DBModelAuthorizable>> inputHierarchy, List<List<DBModelAuthorizable>> outputHierarchy) throws InvalidOperationException { http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetaStoreFilterHook.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetaStoreFilterHook.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetaStoreFilterHook.java index e8f21e5..9f33f3d 100644 --- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetaStoreFilterHook.java +++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetaStoreFilterHook.java @@ -111,7 +111,7 @@ public class SentryMetaStoreFilterHook implements MetaStoreFilterHook { @Override public List<String> filterIndexNames(String dbName, String tblName, List<String> indexList) { - return null; + return indexList; } @Override http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java index 4924669..ecdfe1f 100644 --- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java +++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java @@ -56,7 +56,7 @@ public class SentryMetastorePostEventListener extends MetaStoreEventListener { private final HiveAuthzConf authzConf; private final Server server; - private List<SentryMetastoreListenerPlugin> sentryPlugins = new ArrayList<SentryMetastoreListenerPlugin>(); + protected List<SentryMetastoreListenerPlugin> sentryPlugins = new ArrayList<SentryMetastoreListenerPlugin>(); public SentryMetastorePostEventListener(Configuration config) { super(config); http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/5e58f3fe/sentry-core/sentry-core-model-db/src/main/java/org/apache/sentry/core/model/db/Column.java ---------------------------------------------------------------------- diff --git a/sentry-core/sentry-core-model-db/src/main/java/org/apache/sentry/core/model/db/Column.java b/sentry-core/sentry-core-model-db/src/main/java/org/apache/sentry/core/model/db/Column.java index 89aabfc..305fd1f 100644 --- a/sentry-core/sentry-core-model-db/src/main/java/org/apache/sentry/core/model/db/Column.java +++ b/sentry-core/sentry-core-model-db/src/main/java/org/apache/sentry/core/model/db/Column.java @@ -23,6 +23,8 @@ public class Column implements DBModelAuthorizable { */ public static final Column ALL = new Column(AccessConstants.ALL); + public static final Column SOME = new Column(AccessConstants.SOME); + private final String name; public Column(String name) {