RANGER-203: replaced Hive plugin implementation to use Pluggable-service model.
Project: http://git-wip-us.apache.org/repos/asf/incubator-ranger/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-ranger/commit/7758ed1c Tree: http://git-wip-us.apache.org/repos/asf/incubator-ranger/tree/7758ed1c Diff: http://git-wip-us.apache.org/repos/asf/incubator-ranger/diff/7758ed1c Branch: refs/heads/stack Commit: 7758ed1cabb2052d1d212bd3f118036dd2f89efb Parents: 5a50f5f Author: Madhan Neethiraj <[email protected]> Authored: Mon Jan 26 17:56:07 2015 -0800 Committer: Madhan Neethiraj <[email protected]> Committed: Mon Jan 26 17:56:07 2015 -0800 ---------------------------------------------------------------------- .../org/apache/ranger/pdp/hive/HiveAuthDB.java | 306 ------------- .../apache/ranger/pdp/hive/HiveAuthRule.java | 222 --------- .../pdp/hive/HiveAuthorizationProviderBase.java | 64 --- .../ranger/pdp/hive/RangerAuthorizer.java | 47 -- .../apache/ranger/pdp/hive/URLBasedAuthDB.java | 221 --------- hive-agent/pom.xml | 5 + .../hive/RangerHiveAccessContext.java | 107 ----- .../hive/RangerHiveAccessVerifier.java | 29 -- .../hive/RangerHiveAccessVerifierFactory.java | 66 --- .../hive/RangerHiveObjectAccessInfo.java | 270 ----------- .../authorizer/RangerHiveAccessRequest.java | 92 ++++ .../hive/authorizer/RangerHiveAuditHandler.java | 177 ++++++++ .../hive/authorizer/RangerHiveAuthorizer.java | 452 +++++++++---------- .../authorizer/RangerHiveAuthorizerBase.java | 6 - .../hive/authorizer/RangerHiveResource.java | 185 ++++++++ .../plugin/audit/RangerDefaultAuditHandler.java | 7 +- .../plugin/policyengine/RangerAccessResult.java | 37 ++ 17 files changed, 722 insertions(+), 1571 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthDB.java ---------------------------------------------------------------------- diff --git a/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthDB.java b/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthDB.java deleted file mode 100644 index f9bdedf..0000000 --- a/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthDB.java +++ /dev/null @@ -1,306 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ranger.pdp.hive; - -import java.util.ArrayList; -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.ranger.authorization.hive.RangerHiveObjectAccessInfo; -import org.apache.ranger.authorization.hive.RangerHiveObjectAccessInfo.HiveAccessType; -import org.apache.ranger.authorization.hive.RangerHiveObjectAccessInfo.HiveObjectType; -import org.apache.ranger.authorization.utils.StringUtil; - -public class HiveAuthDB { - - private static final Log LOG = LogFactory.getLog(HiveAuthDB.class); - - private ArrayList<HiveAuthRule> allRuleList = null; - private ArrayList<HiveAuthRule> tblRuleList = null; - private ArrayList<HiveAuthRule> colRuleList = null; - - public HiveAuthDB() { - this(null) ; - } - - - public HiveAuthDB(ArrayList<HiveAuthRule> aRuleList) { - - if (aRuleList == null) { - aRuleList = new ArrayList<HiveAuthRule>() ; - } - - LOG.info("Number of Rules in the PolicyContainer: " + ((aRuleList == null) ? 0 : aRuleList.size()) ) ; - - allRuleList = new ArrayList<HiveAuthRule>() ; - colRuleList = new ArrayList<HiveAuthRule>(); - tblRuleList = new ArrayList<HiveAuthRule>() ; - - allRuleList = aRuleList ; - - for (HiveAuthRule rule : aRuleList) { - if (rule.isTableRule()) { - this.tblRuleList.add(rule); - } else { - this.colRuleList.add(rule); - } - } - - } - - public boolean isAccessAllowed(UserGroupInformation ugi, RangerHiveObjectAccessInfo objAccessInfo) { - boolean ret = false; - - if(objAccessInfo.getAccessType() == HiveAccessType.NONE || objAccessInfo.getObjectType() == HiveObjectType.NONE) { - return true; - } - - String accessType = objAccessInfo.getAccessType().name(); - - switch(objAccessInfo.getObjectType()) { - case DATABASE: - ret = isAccessAllowed(ugi, accessType, objAccessInfo.getDatabase()); - break; - - case TABLE: - case INDEX: - case PARTITION: - ret = isAccessAllowed(ugi, accessType, objAccessInfo.getDatabase(), objAccessInfo.getTable()); - break; - - case VIEW: - ret = isAccessAllowed(ugi, accessType, objAccessInfo.getDatabase(), objAccessInfo.getView()); - break; - - case COLUMN: - { - String deniedColumn = findDeniedColumn(ugi, accessType, objAccessInfo.getDatabase(), objAccessInfo.getTable(), objAccessInfo.getColumns()); - - ret = StringUtil.isEmpty(deniedColumn); - - if(! ret) { - objAccessInfo.setDeinedObjectName(RangerHiveObjectAccessInfo.getObjectName(objAccessInfo.getDatabase(), objAccessInfo.getTable(), deniedColumn)); - } - } - break; - - case FUNCTION: - ret = isUDFAccessAllowed(ugi, accessType, objAccessInfo.getDatabase(), objAccessInfo.getFunction()); - break; - - case URI: - // Handled in RangerHiveAuthorizer - break; - - case NONE: - break; - } - - return ret; - } - - public boolean isAudited(RangerHiveObjectAccessInfo objAccessInfo) { - boolean ret = false; - - if( objAccessInfo.getAccessType() == HiveAccessType.NONE - || objAccessInfo.getObjectType() == HiveObjectType.NONE - || objAccessInfo.getObjectType() == HiveObjectType.URI - ) { - return false; - } - - String database = null; - String table = null; - List<String> columns = null; - boolean isUDF = false; - - switch(objAccessInfo.getObjectType()) { - case DATABASE: - database = objAccessInfo.getDatabase(); - break; - - case TABLE: - case INDEX: - case PARTITION: - database = objAccessInfo.getDatabase(); - table = objAccessInfo.getTable(); - break; - - case VIEW: - database = objAccessInfo.getDatabase(); - table = objAccessInfo.getView(); - break; - - case COLUMN: - database = objAccessInfo.getDatabase(); - table = objAccessInfo.getTable(); - columns = objAccessInfo.getColumns(); - break; - - case FUNCTION: - database = objAccessInfo.getDatabase(); - table = objAccessInfo.getFunction(); - isUDF = true; - break; - - case NONE: - case URI: - break; - } - - if(StringUtil.isEmpty(columns)) { - for (HiveAuthRule rule : allRuleList) { - if(isUDF != rule.isUdf()) { - continue; - } - - if (rule.isTableMatch(database, table)) { - ret = rule.isAudited() ; - - if (ret) { - if (LOG.isDebugEnabled()) { - LOG.debug("isAudited(database=" + database + ", table=" + table + ", columns=" + StringUtil.toString(columns) + ") => [" + ret + "] as matched for rule: " + rule); - } - - break ; - } - } - } - } else { - // is audit enabled for any one column being accessed? - for(String colName : columns) { - for (HiveAuthRule rule : allRuleList) { - if(isUDF != rule.isUdf()) { - continue; - } - - ret = rule.isMatched(database, table, colName) && rule.isAudited(); - - if (ret) { - if (LOG.isDebugEnabled()) { - LOG.debug("isAudited(database=" + database + ", table=" + table + ", columns=" + StringUtil.toString(columns) + ") => [" + ret + "] as matched for rule: " + rule); - } - - break ; - } - } - - if(ret) { - break; - } - } - } - - return ret ; - } - - private boolean isAccessAllowed(UserGroupInformation ugi, String accessType, String database) { - boolean ret = false; - - for (HiveAuthRule rule : allRuleList) { - ret = rule.isMatched(database, ugi.getShortUserName(), ugi.getGroupNames(), accessType); - - if(ret) { - if (LOG.isDebugEnabled()) { - LOG.debug("isAccessAllowed(user=" + ugi.getShortUserName() + ", groups=" + StringUtil.toString(ugi.getGroupNames()) + ", accessType=" + accessType + ", database=" + database + ") => [" + ret + "] as matched for rule: " + rule); - } - - break; - } - } - - return ret; - } - - private boolean isAccessAllowed(UserGroupInformation ugi, String accessType, String database, String tableOrView) { - boolean ret = false; - - for (HiveAuthRule rule : tblRuleList) { - ret = rule.isMatched(database, tableOrView, ugi.getShortUserName(), ugi.getGroupNames(), accessType); - - if(ret) { - if (LOG.isDebugEnabled()) { - LOG.debug("isAccessAllowed(user=" + ugi.getShortUserName() + ", groups=" + StringUtil.toString(ugi.getGroupNames()) + ", accessType=" + accessType + ", database=" + database + ", tableOrView=" + tableOrView + ") => [" + ret + "] as matched for rule: " + rule); - } - - break; - } - } - - return ret; - } - - private String findDeniedColumn(UserGroupInformation ugi, String accessType, String database, String tableOrView, List<String> columns) { - String deinedColumn = null; - - boolean isAllowed = isAccessAllowed(ugi, accessType, database, tableOrView); // check if access is allowed at the table level - - if(!isAllowed && !StringUtil.isEmpty(columns)) { - for(String column : columns) { - for (HiveAuthRule rule : colRuleList) { - isAllowed = rule.isMatched(database, tableOrView, column, ugi.getShortUserName(), ugi.getGroupNames(), accessType); - - if(isAllowed) { - if (LOG.isDebugEnabled()) { - LOG.debug("isAccessAllowed(user=" + ugi.getShortUserName() + ", groups=" + StringUtil.toString(ugi.getGroupNames()) + ", accessType=" + accessType + ", database=" + database + ", tableOrView=" + tableOrView + ", column=" + column + ") => [" + isAllowed + "] as matched for rule: " + rule); - } - - break; - } - } - - if(!isAllowed) { - deinedColumn = column; - - if (LOG.isDebugEnabled()) { - LOG.debug("isAccessAllowed(user=" + ugi.getShortUserName() + ", groups=" + StringUtil.toString(ugi.getGroupNames()) + ", accessType=" + accessType + ", database=" + database + ", tableOrView=" + tableOrView + ", column=" + column + ") => [" + isAllowed + "]"); - } - break; - } - } - } - - return deinedColumn; - } - - private boolean isUDFAccessAllowed(UserGroupInformation ugi, String accessType, String database, String udfName) { - boolean ret = false; - - for (HiveAuthRule rule : tblRuleList) { - if(! rule.isUdf()) { - continue; - } - - ret = rule.isMatched(database, udfName, ugi.getShortUserName(), ugi.getGroupNames(), accessType); - - if(ret) { - if (LOG.isDebugEnabled()) { - LOG.debug("isAccessAllowed(user=" + ugi.getShortUserName() + ", groups=" + StringUtil.toString(ugi.getGroupNames()) + ", accessType=" + accessType + ", database=" + database + ", udfName=" + udfName + ") => [" + ret + "] as matched for rule: " + rule); - } - - break; - } - } - - return ret; - } -} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthRule.java ---------------------------------------------------------------------- diff --git a/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthRule.java b/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthRule.java deleted file mode 100644 index 21bd7c1..0000000 --- a/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthRule.java +++ /dev/null @@ -1,222 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ranger.pdp.hive; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.ranger.authorization.hive.RangerHiveObjectAccessInfo.HiveAccessType; -import org.apache.ranger.authorization.hive.constants.RangerHiveConstants; -import org.apache.ranger.authorization.utils.StringUtil; - - -public class HiveAuthRule { - - private static final Log LOG = LogFactory.getLog(HiveAuthRule.class) ; - - public static final String WILDCARD_OBJECT = ".*" ; - - private String databaseName; - private String tableName; - private String columnName; - private String accessType; - private String group; - private String user; - private boolean tableRule = false; - private boolean allGranted = false; - private boolean udf = false; - private boolean tableExcluded = false; - private boolean columnExcluded = false; - private boolean audited = false; - private boolean encrypted = false; - - public HiveAuthRule(String dbName, String tableName, String colName, String permission, String user, String group) { - this(false, dbName,tableName,colName,permission,user,group, false, false) ; - } - - public HiveAuthRule(boolean udfInd, String dbName, String tableName, String colName, String permission, String user, String group, boolean tableExclusionFlag, boolean columnExclusionFlag) { - this.udf = udfInd ; - this.databaseName = StringUtil.toLower(dbName); - this.tableName = StringUtil.toLower(tableName); - this.columnName = StringUtil.toLower(colName); - this.accessType = permission ; - this.user = user; - this.group = group ; - this.tableExcluded = tableExclusionFlag ; - this.columnExcluded = columnExclusionFlag ; - - this.allGranted = StringUtil.equalsIgnoreCase(HiveAccessType.ALL.name(), accessType); - - tableRule = StringUtil.isEmpty(columnName) || WILDCARD_OBJECT.matches(columnName) ; - } - - @Override - public String toString() { - return "db:" + databaseName + ", table: " + tableName + ", columnName: " + columnName + ", accessType: " + accessType + ",user: " + user + ", group: " + group + ",isTable:" + tableRule + ",audited:" + audited + ",encrypted:" + encrypted ; - } - - public boolean isMatched(String user, String[] groups, String accessType) { - String dbName = null; - String tblName = null; - String colName = null; - - return isMatched(dbName, tblName, colName, user, groups, accessType) ; - } - - public boolean isMatched(String dbName, String user, String[] groups, String accessType) { - String tblName = null; - String colName = null; - - return isMatched(dbName, tblName, colName, user, groups, accessType) ; - } - - public boolean isMatched(String dbName, String tblName, String user, String[] groups, String accessType) { - String colName = null; - - return isMatched(dbName, tblName, colName, user, groups, accessType) ; - } - - public boolean isMatched(String dbName, String tblName, String colName, String user, String[] groups, String accessType) { - boolean ret = isMatched(dbName, tblName, colName); - - if(ret) { - // does accessType match? - ret = StringUtil.equalsIgnoreCase(accessType, this.accessType); - - if(! ret && !StringUtil.equalsIgnoreCase(accessType, HiveAccessType.ADMIN.name())) { - ret = this.isAllGranted() || StringUtil.equalsIgnoreCase(accessType, "USE"); - } - - if(ret) { - // does user/group match? - ret = StringUtil.equals(user, this.user) || - StringUtil.equals(RangerHiveConstants.PUBLIC_ACCESS_ROLE, this.group) || - StringUtil.contains(groups, this.group); - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug("isMatched(db=" + dbName + ", table=" + tblName + ", col=" + colName + ", user=" + user + ", groups=" + StringUtil.toString(groups) + ", accessType=" + accessType + ") => rule[" + this.databaseName + ":" + this.tableName + ":" + this.columnName + ":" + this.user + ":" + this.group + ":" + this.accessType + "] returns [" + ret + "]"); - } - - return ret ; - } - - public boolean isMatched(String dbName, String tblName, String colName) { - boolean ret = isTableMatch(dbName, tblName); - - if (ret) { - colName = StringUtil.toLower(colName); - - if (colName != null) { - ret = colName.matches(this.columnName); - - if (columnExcluded) { - ret = (! ret) ; - } - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug("isMatched(db=" + dbName + ", table=" + tblName + ", col=" + colName + ") => rule[" + this.databaseName + ":" + this.tableName + ":" + this.columnName + "] returns [" + ret + "]"); - } - - return ret ; - } - - public boolean isTableMatch(String dbName, String tblName) { - boolean ret = isDBMatch(dbName); - - if(ret) { - tblName = StringUtil.toLower(tblName); - - if(tblName != null) { - ret = tblName.matches(this.tableName); - - if(tableExcluded) { - ret = !ret; - } - } - } - - return ret; - } - - public boolean isDBMatch(String dbName) { - boolean ret = false; - - dbName = StringUtil.toLower(dbName); - - ret = dbName == null || dbName.matches(this.databaseName); - - return ret; - } - - public String getDbName() { - return databaseName; - } - - public String getTableName() { - return tableName; - } - - public String getColumnName() { - return columnName; - } - - public String getAccessType() { - return accessType; - } - - public String getUser() { - return user; - } - - public String getGroup() { - return group; - } - - public boolean isTableRule() { - return tableRule; - } - - public boolean isAllGranted() { - return allGranted ; - } - - public boolean isUdf() { - return udf; - } - - public boolean isAudited() { - return audited; - } - - public void setAudited(boolean audited) { - this.audited = audited; - } - - public boolean isEncrypted() { - return encrypted; - } - - public void setEncrypted(boolean encrypted) { - this.encrypted = encrypted; - } -} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthorizationProviderBase.java ---------------------------------------------------------------------- diff --git a/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthorizationProviderBase.java b/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthorizationProviderBase.java deleted file mode 100644 index 894d2df..0000000 --- a/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthorizationProviderBase.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ranger.pdp.hive; - - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.ql.metadata.AuthorizationException; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.ranger.authorization.hive.RangerHiveAccessVerifier; -import org.apache.ranger.authorization.hive.RangerHiveObjectAccessInfo; - -public class HiveAuthorizationProviderBase implements RangerHiveAccessVerifier { - - private static final Log LOG = LogFactory.getLog(HiveAuthorizationProviderBase.class); - - protected HiveAuthDB authDB = new HiveAuthDB() ; - - - public HiveAuthDB getAuthDB() { - return authDB ; - } - - @Override - public boolean isAccessAllowed(UserGroupInformation ugi, RangerHiveObjectAccessInfo objAccessInfo) { - HiveAuthDB ldb = authDB ; - - if (ldb == null) { - throw new AuthorizationException("No Authorization Agent is available for AuthorizationCheck") ; - } - - boolean ret = ldb.isAccessAllowed(ugi, objAccessInfo); - - return ret; - } - - @Override - public boolean isAudited(RangerHiveObjectAccessInfo objAccessInfo) { - HiveAuthDB ldb = authDB ; - - if (ldb == null) { - throw new AuthorizationException("No Authorization Agent is available for AuthorizationCheck") ; - } - - return ldb.isAudited(objAccessInfo) ; - } -} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/agents-impl/src/main/java/org/apache/ranger/pdp/hive/RangerAuthorizer.java ---------------------------------------------------------------------- diff --git a/agents-impl/src/main/java/org/apache/ranger/pdp/hive/RangerAuthorizer.java b/agents-impl/src/main/java/org/apache/ranger/pdp/hive/RangerAuthorizer.java deleted file mode 100644 index fc4291c..0000000 --- a/agents-impl/src/main/java/org/apache/ranger/pdp/hive/RangerAuthorizer.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - package org.apache.ranger.pdp.hive; - -import org.apache.hadoop.hive.ql.metadata.AuthorizationException; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.ranger.authorization.hive.RangerHiveAccessVerifier; -import org.apache.ranger.authorization.hive.RangerHiveObjectAccessInfo; - -public class RangerAuthorizer implements RangerHiveAccessVerifier { - - private RangerHiveAccessVerifier authDB = URLBasedAuthDB.getInstance() ; - - - @Override - public boolean isAccessAllowed(UserGroupInformation ugi, RangerHiveObjectAccessInfo objAccessInfo) { - if (authDB == null) { - throw new AuthorizationException("No Authorization Agent is available for AuthorizationCheck") ; - } - return authDB.isAccessAllowed(ugi, objAccessInfo); - } - - @Override - public boolean isAudited(RangerHiveObjectAccessInfo objAccessInfo) { - if (authDB == null) { - throw new AuthorizationException("No Authorization Agent is available for AuthorizationCheck") ; - } - return authDB.isAudited(objAccessInfo) ; - } -} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/agents-impl/src/main/java/org/apache/ranger/pdp/hive/URLBasedAuthDB.java ---------------------------------------------------------------------- diff --git a/agents-impl/src/main/java/org/apache/ranger/pdp/hive/URLBasedAuthDB.java b/agents-impl/src/main/java/org/apache/ranger/pdp/hive/URLBasedAuthDB.java deleted file mode 100644 index 9de2bf4..0000000 --- a/agents-impl/src/main/java/org/apache/ranger/pdp/hive/URLBasedAuthDB.java +++ /dev/null @@ -1,221 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ranger.pdp.hive; - -import java.util.ArrayList; -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.ranger.authorization.hadoop.config.RangerConfiguration; -import org.apache.ranger.pdp.config.PolicyChangeListener; -import org.apache.ranger.pdp.config.PolicyRefresher; -import org.apache.ranger.pdp.constants.RangerConstants; -import org.apache.ranger.pdp.model.Policy; -import org.apache.ranger.pdp.model.PolicyContainer; -import org.apache.ranger.pdp.model.RolePermission; - -public class URLBasedAuthDB extends HiveAuthorizationProviderBase implements PolicyChangeListener { - - private static final Log LOG = LogFactory.getLog(URLBasedAuthDB.class) ; - - private static URLBasedAuthDB me = null ; - - private PolicyContainer policyContainer = null ; - - private PolicyRefresher refresher = null ; - - - public static URLBasedAuthDB getInstance() { - if (me == null) { - synchronized(URLBasedAuthDB.class) { - URLBasedAuthDB temp = me ; - if (temp == null) { - me = new URLBasedAuthDB() ; - me.init() ; - } - } - } - return me ; - } - - private URLBasedAuthDB() { - String url = RangerConfiguration.getInstance().get(RangerConstants.RANGER_HIVE_POLICYMGR_URL_PROP); - long refreshInMilli = RangerConfiguration.getInstance().getLong( - RangerConstants.RANGER_HIVE_POLICYMGR_URL_RELOAD_INTERVAL_IN_MILLIS_PROP , - RangerConstants.RANGER_HIVE_POLICYMGR_URL_RELOAD_INTERVAL_IN_MILLIS_DEFAULT); - - String lastStoredFileName = RangerConfiguration.getInstance().get(RangerConstants.RANGER_HIVE_LAST_SAVED_POLICY_FILE_PROP) ; - - String sslConfigFileName = RangerConfiguration.getInstance().get(RangerConstants.RANGER_HIVE_POLICYMGR_SSL_CONFIG_FILE_PROP) ; - refresher = new PolicyRefresher(url, refreshInMilli,sslConfigFileName,lastStoredFileName) ; - - String saveAsFileName = RangerConfiguration.getInstance().get(RangerConstants.RANGER_HIVE_POLICYMGR_URL_SAVE_FILE_PROP) ; - if (saveAsFileName != null) { - refresher.setSaveAsFileName(saveAsFileName) ; - } - - if (lastStoredFileName != null) { - refresher.setLastStoredFileName(lastStoredFileName); - } - - } - - private void init() { - refresher.setPolicyChangeListener(this); - } - - public PolicyContainer getPolicyContainer() { - return policyContainer; - } - - @Override - public void OnPolicyChange(PolicyContainer policyContainer) { - - LOG.debug("OnPolicyChange() has been called with new PolicyContainer .....") ; - - try { - - ArrayList<HiveAuthRule> ruleListTemp = new ArrayList<HiveAuthRule>(); - - this.policyContainer = policyContainer; - - if (LOG.isDebugEnabled()) { - LOG.debug("Number of acl found (before isEnabled check): " + ( policyContainer.getAcl() == null ? 0 : policyContainer.getAcl().size() ) ); - } - - for(Policy acl : policyContainer.getAcl()) { - - if (! acl.isEnabled()) { - LOG.debug("Diabled acl found [" + acl + "]. Skipping this acl ...") ; - continue ; - } - - if (LOG.isDebugEnabled()) { - LOG.debug("Number of database found in acl [" + acl + "] " + ( acl.getDatabaseList() == null ? 0 : acl.getDatabaseList().size() ) ); - LOG.debug("Number of Tables found in acl [" + acl + "] " + ( acl.getTableList() == null ? 0 : acl.getTableList().size() ) ); - LOG.debug("Number of Columns found in acl [" + acl + "] " + ( acl.getColumnList()== null ? 0 : acl.getColumnList().size() ) ); - } - - boolean isUDF = false ; - - List<String> dbList = new ArrayList<String>() ; - String dbs = replaceFileBasedRegEx(acl.getDatabases()) ; - dbList.add(getRegExFormatted(dbs)) ; - - List<String> tableList = new ArrayList<String>() ; - String udfs = acl.getUdfs() ; - if (udfs != null) { - isUDF = true ; - dbList.clear(); - dbList.add(HiveAuthRule.WILDCARD_OBJECT) ; - tableList.clear(); - udfs = replaceFileBasedRegEx(udfs) ; - tableList.add(getRegExFormatted(udfs)) ; - } - else { - String tables = replaceFileBasedRegEx(acl.getTables()) ; - tableList.add(getRegExFormatted(tables)) ; - } - - List<String> columnList = new ArrayList<String>() ; - String columns = replaceFileBasedRegEx(acl.getColumns()) ; - columnList.add(getRegExFormatted(columns)) ; - - - boolean isAudited = (acl.getAuditInd() == 1) ; - - boolean isEncrypted = (acl.getEncryptInd() == 1) ; - - for(String db : dbList) { - - for(String table : tableList) { - - for(String col : columnList) { - - for(RolePermission rp : acl.getPermissions()) { - for (String accessLevel : rp.getAccess() ) { - for (String group : rp.getGroups()) { - HiveAuthRule rule = new HiveAuthRule(isUDF, db, table, col, accessLevel.toLowerCase(), null, group, acl.isTableSelectionExcluded(), acl.isColumnSelectionExcluded()); - rule.setAudited(isAudited); - rule.setEncrypted(isEncrypted); - LOG.debug("Adding rule [" + rule + "] to the authdb."); - ruleListTemp.add(rule); - } - for (String user : rp.getUsers()) { - HiveAuthRule rule = new HiveAuthRule(isUDF, db, table, col, accessLevel.toLowerCase(), user, null,acl.isTableSelectionExcluded(), acl.isColumnSelectionExcluded()); - rule.setAudited(isAudited); - rule.setEncrypted(isEncrypted); - LOG.debug("Adding rule [" + rule + "] to the authdb."); - ruleListTemp.add(rule); - } - } - } - - - } - } - } - } - HiveAuthDB authDBTemp = new HiveAuthDB(ruleListTemp); - authDB = authDBTemp; - } - catch(Throwable t) { - LOG.error("OnPolicyChange has failed with an exception", t); - } - } - - public static String getRegExFormatted(String userEnteredStr) { - - if (userEnteredStr == null || userEnteredStr.trim().length() == 0) { - return HiveAuthRule.WILDCARD_OBJECT ; - } - - StringBuilder sb = new StringBuilder() ; - - for(String s : userEnteredStr.split(",")) { - if (sb.length() == 0) { - sb.append("(") ; - } - else { - sb.append("|") ; - } - sb.append(s.trim()) ; - } - - if (sb.length() > 0) { - sb.append(")") ; - } - - return sb.toString() ; - } - - - public static String replaceFileBasedRegEx(String userEnteredStr) { - if (userEnteredStr != null) { - userEnteredStr = userEnteredStr.replaceAll("\\.", "\\.") - .replaceAll("\\?", "\\.") - .replaceAll("\\*", ".*") ; - } - return userEnteredStr ; - } - - -} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/hive-agent/pom.xml ---------------------------------------------------------------------- diff --git a/hive-agent/pom.xml b/hive-agent/pom.xml index 1b19025..c6d41be 100644 --- a/hive-agent/pom.xml +++ b/hive-agent/pom.xml @@ -108,5 +108,10 @@ <artifactId>ranger-plugins-audit</artifactId> <version>${project.version}</version> </dependency> + <dependency> + <groupId>org.apache.ranger</groupId> + <artifactId>plugin-common</artifactId> + <version>${project.version}</version> + </dependency> </dependencies> </project> http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessContext.java ---------------------------------------------------------------------- diff --git a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessContext.java b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessContext.java deleted file mode 100644 index 6c0a2b0..0000000 --- a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessContext.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.ranger.authorization.hive; - -import org.apache.commons.lang.builder.EqualsBuilder; -import org.apache.commons.lang.builder.HashCodeBuilder; -import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext; -import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext; - -public class RangerHiveAccessContext { - private String mClientIpAddress; - private String mClientType; - private String mCommandString; - private String mSessionString; - - public RangerHiveAccessContext(HiveAuthzContext context, HiveAuthzSessionContext sessionContext) { - if(context != null) { - mClientIpAddress = context.getIpAddress(); - mCommandString = context.getCommandString(); - } - - if(sessionContext != null) { - mClientType = sessionContext.getClientType().name(); - mSessionString = sessionContext.getSessionString(); - } - } - - public String getClientIpAddress() { - return mClientIpAddress; - } - - public void setClientIpAddress(String clientIpAddress) { - this.mClientIpAddress = clientIpAddress; - } - - public String getClientType() { - return mClientType; - } - - public void setClientType(String clientType) { - this.mClientType = clientType; - } - - public String getCommandString() { - return mCommandString; - } - - public void setCommandString(String commandString) { - this.mCommandString = commandString; - } - - public String getSessionString() { - return mSessionString; - } - - public void setSessionString(String sessionString) { - this.mSessionString = sessionString; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (obj == this) { - return true; - } - if (obj.getClass() != getClass()) { - return false; - } - RangerHiveAccessContext that = (RangerHiveAccessContext) obj; - return new EqualsBuilder() - .appendSuper(super.equals(obj)) - .append(mClientIpAddress, that.mClientIpAddress) - .append(mClientType, that.mClientType) - .append(mCommandString, that.mCommandString) - .append(mSessionString, that.mSessionString).isEquals(); - } - - @Override - public int hashCode() { - return new HashCodeBuilder(31, 37) - .appendSuper(41) - .append(mClientIpAddress) - .append(mClientType) - .append(mCommandString) - .append(mSessionString) - .toHashCode(); - } -} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessVerifier.java ---------------------------------------------------------------------- diff --git a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessVerifier.java b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessVerifier.java deleted file mode 100644 index ef4ad56..0000000 --- a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessVerifier.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - package org.apache.ranger.authorization.hive; - -import org.apache.hadoop.security.UserGroupInformation; - - -public interface RangerHiveAccessVerifier { - public boolean isAccessAllowed(UserGroupInformation ugi, RangerHiveObjectAccessInfo objAccessInfo) ; - - public boolean isAudited(RangerHiveObjectAccessInfo objAccessInfo) ; -} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessVerifierFactory.java ---------------------------------------------------------------------- diff --git a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessVerifierFactory.java b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessVerifierFactory.java deleted file mode 100644 index f02bfe8..0000000 --- a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessVerifierFactory.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - package org.apache.ranger.authorization.hive; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.ranger.authorization.hadoop.config.RangerConfiguration; -import org.apache.ranger.authorization.hadoop.constants.RangerHadoopConstants; - -public class RangerHiveAccessVerifierFactory { - - private static final Log LOG = LogFactory.getLog(RangerHiveAccessVerifierFactory.class) ; - - private static RangerHiveAccessVerifier hiveAccessVerififer = null ; - - public static RangerHiveAccessVerifier getInstance() { - if (hiveAccessVerififer == null) { - synchronized(RangerHiveAccessVerifierFactory.class) { - RangerHiveAccessVerifier temp = hiveAccessVerififer ; - if (temp == null) { - String hiveAccessVerifierClassName = RangerConfiguration.getInstance().get(RangerHadoopConstants.HIVE_ACCESS_VERIFIER_CLASS_NAME_PROP, RangerHadoopConstants.HIVE_ACCESS_VERIFIER_CLASS_NAME_DEFAULT_VALUE ) ; - - if (hiveAccessVerifierClassName != null) { - LOG.info("Hive Access Verification class [" + hiveAccessVerifierClassName + "] - Being built"); - try { - hiveAccessVerififer = (RangerHiveAccessVerifier) (Class.forName(hiveAccessVerifierClassName).newInstance()) ; - LOG.info("Created a new instance of class: [" + hiveAccessVerifierClassName + "] for Hive Access verification."); - } catch (InstantiationException e) { - LOG.error("Unable to create HiveAccess Verifier: [" + hiveAccessVerifierClassName + "]", e); - } catch (IllegalAccessException e) { - LOG.error("Unable to create HiveAccess Verifier: [" + hiveAccessVerifierClassName + "]", e); - } catch (ClassNotFoundException e) { - LOG.error("Unable to create HiveAccess Verifier: [" + hiveAccessVerifierClassName + "]", e); - } catch (Throwable t) { - LOG.error("Unable to create HiveAccess Verifier: [" + hiveAccessVerifierClassName + "]", t); - } - finally { - LOG.info("Created a new instance of class: [" + hiveAccessVerifierClassName + "] for Hive Access verification. (" + hiveAccessVerififer + ")"); - } - } - } - else { - LOG.error("Unable to obtain hiveAccessVerifier [" + RangerHadoopConstants.HIVE_ACCESS_VERIFIER_CLASS_NAME_PROP + "]"); - } - } - } - return hiveAccessVerififer ; - } -} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveObjectAccessInfo.java ---------------------------------------------------------------------- diff --git a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveObjectAccessInfo.java b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveObjectAccessInfo.java deleted file mode 100644 index 61b45e2..0000000 --- a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveObjectAccessInfo.java +++ /dev/null @@ -1,270 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - package org.apache.ranger.authorization.hive; - -import java.util.ArrayList; -import java.util.List; - -import org.apache.commons.lang.builder.EqualsBuilder; -import org.apache.commons.lang.builder.HashCodeBuilder; -import org.apache.ranger.authorization.utils.StringUtil; - -public class RangerHiveObjectAccessInfo { - public enum HiveObjectType { NONE, DATABASE, TABLE, VIEW, PARTITION, INDEX, COLUMN, FUNCTION, URI }; - public enum HiveAccessType { NONE, CREATE, ALTER, DROP, INDEX, LOCK, SELECT, UPDATE, USE, ALL, ADMIN }; - - private String mOperType = null; - private RangerHiveAccessContext mContext = null; - private HiveAccessType mAccessType = HiveAccessType.NONE; - private HiveObjectType mObjectType = HiveObjectType.NONE; - private String mDatabase = null; - private String mTable = null; - private String mView = null; - private String mPartition = null; - private String mIndex = null; - private List<String> mColumns = null; - private String mFunction = null; - private String mUri = null; - private String mDeniedObjectName = null; - - public RangerHiveObjectAccessInfo(String operType, RangerHiveAccessContext context, HiveAccessType accessType, String dbName) { - this(operType, context, accessType, dbName, null, HiveObjectType.DATABASE, dbName); - } - - public RangerHiveObjectAccessInfo(String operType, RangerHiveAccessContext context, HiveAccessType accessType, String dbName, String tblName) { - this(operType, context, accessType, dbName, tblName, HiveObjectType.TABLE, tblName); - } - - public RangerHiveObjectAccessInfo(String operType, RangerHiveAccessContext context, HiveAccessType accessType, String dbName, HiveObjectType objType, String objName) { - this(operType, context, accessType, dbName, null, objType, objName); - } - - public RangerHiveObjectAccessInfo(String operType, RangerHiveAccessContext context, HiveAccessType accessType, HiveObjectType objType, String objName) { - this(operType, context, accessType, null, null, objType, objName); - } - - public RangerHiveObjectAccessInfo(String operType, RangerHiveAccessContext context, HiveAccessType accessType, String dbName, String tblOrViewName, List<String> columns) { - mOperType = operType; - mContext = context; - mAccessType = accessType; - mObjectType = HiveObjectType.COLUMN; - mDatabase = dbName; - mTable = tblOrViewName; - mView = tblOrViewName; - mColumns = columns; - } - - public RangerHiveObjectAccessInfo(String operType, RangerHiveAccessContext context, HiveAccessType accessType, String dbName, String tblName, HiveObjectType objType, String objName) { - mOperType = operType; - mContext = context; - mAccessType = accessType; - mObjectType = objType; - mDatabase = dbName; - mTable = tblName; - mView = tblName; - - if(objName != null && ! objName.trim().isEmpty()) { - switch(objType) { - case DATABASE: - mDatabase = objName; - break; - - case TABLE: - mTable = objName; - break; - - case VIEW: - mView = objName; - break; - - case PARTITION: - mPartition = objName; - break; - - case INDEX: - mIndex = objName; - break; - - case COLUMN: - mColumns = new ArrayList<String>(); - mColumns.add(objName); - break; - - case FUNCTION: - mFunction = objName; - break; - - case URI: - mUri = objName; - break; - - case NONE: - break; - } - } - } - - public String getOperType() { - return mOperType; - } - - public RangerHiveAccessContext getContext() { - return mContext; - } - - public HiveAccessType getAccessType() { - return mAccessType; - } - - public HiveObjectType getObjectType() { - return mObjectType; - } - - public String getDatabase() { - return mDatabase; - } - - public String getTable() { - return mTable; - } - - public String getView() { - return mView; - } - - public String getPartition() { - return mPartition; - } - - public String getIndex() { - return mIndex; - } - - public List<String> getColumns() { - return mColumns; - } - - public String getFunction() { - return mFunction; - } - - public String getUri() { - return mUri; - } - - public void setDeinedObjectName(String deniedObjectName) { - mDeniedObjectName = deniedObjectName; - } - - public String getDeinedObjectName() { - return mDeniedObjectName; - } - - public String getObjectName() { - String objName = null; - - if(this.mObjectType == HiveObjectType.URI) { - objName = mUri; - } else { - String tblName = null; - String colName = null; - - if(! StringUtil.isEmpty(mTable)) - tblName = mTable; - else if(! StringUtil.isEmpty(mView)) - tblName = mView; - else if(! StringUtil.isEmpty(mFunction)) - tblName = mFunction; - - if(! StringUtil.isEmpty(mColumns)) - colName = StringUtil.toString(mColumns); - else if(! StringUtil.isEmpty(mIndex)) - colName = mIndex; - - objName = getObjectName(mDatabase, tblName, colName); - } - - return objName; - } - - public static String getObjectName(String dbName, String tblName, String colName) { - String objName = StringUtil.isEmpty(dbName) ? "" : dbName; - - if(!StringUtil.isEmpty(tblName)) { - objName += ("/" + tblName); - - if(!StringUtil.isEmpty(colName)) { - objName += ("/" + colName); - } - } - - return objName; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (obj == this) { - return true; - } - if (obj.getClass() != getClass()) { - return false; - } - RangerHiveObjectAccessInfo that = (RangerHiveObjectAccessInfo) obj; - return new EqualsBuilder() - .appendSuper(super.equals(obj)) - .append(mAccessType, that.mAccessType) - .append(mColumns, that.mColumns) - .append(mContext, that.mContext) - .append(mDatabase, that.mDatabase) - .append(mDeniedObjectName, that.mDeniedObjectName) - .append(mFunction, that.mFunction) - .append(mIndex, that.mIndex) - .append(mObjectType, that.mObjectType) - .append(mOperType, that.mOperType) - .append(mPartition, that.mPartition) - .append(mTable, that.mTable) - .append(mUri, that.mUri) - .append(mView, that.mView) - .isEquals(); - } - - @Override - public int hashCode() { - return new HashCodeBuilder(37, 41) - .appendSuper(43) - .append(mAccessType) - .append(mColumns) - .append(mContext) - .append(mDatabase) - .append(mDeniedObjectName) - .append(mFunction) - .append(mIndex) - .append(mObjectType) - .append(mOperType) - .append(mPartition) - .append(mTable) - .append(mUri) - .append(mView) - .toHashCode(); - } -} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAccessRequest.java ---------------------------------------------------------------------- diff --git a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAccessRequest.java b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAccessRequest.java new file mode 100644 index 0000000..cb35eac --- /dev/null +++ b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAccessRequest.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.ranger.authorization.hive.authorizer; + +import java.util.Set; + +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; +import org.apache.ranger.authorization.utils.StringUtil; +import org.apache.ranger.plugin.policyengine.RangerAccessRequestImpl; +import org.apache.ranger.plugin.policyengine.RangerPolicyEngine; + + +public class RangerHiveAccessRequest extends RangerAccessRequestImpl { + private HiveAccessType accessType = HiveAccessType.NONE; + + public RangerHiveAccessRequest() { + super(); + } + + public RangerHiveAccessRequest(RangerHiveResource resource, + String user, + Set<String> userGroups, + HiveOperationType hiveOpType, + HiveAccessType accessType, + HiveAuthzContext context, + HiveAuthzSessionContext sessionContext) { + this.setResource(resource); + this.setUser(user); + this.setUserGroups(userGroups); + this.setAccessTime(StringUtil.getUTCDate()); + this.setAction(hiveOpType.name()); + + if(context != null) { + this.setClientIPAddress(context.getIpAddress()); + this.setRequestData(context.getCommandString()); + } + + if(sessionContext != null) { + this.setClientType(sessionContext.getClientType() == null ? null : sessionContext.getClientType().toString()); + this.setSessionId(sessionContext.getSessionString()); + } + + this.accessType = accessType; + + if(accessType == HiveAccessType.USE) { + this.setAccessType(RangerPolicyEngine.ANY_ACCESS); + } else { + this.setAccessType(accessType.toString().toLowerCase()); + } + } + + public HiveAccessType getAccessType() { + return accessType; + } + + public RangerHiveAccessRequest copy() { + RangerHiveAccessRequest ret = new RangerHiveAccessRequest(); + + ret.setResource(getResource()); + ret.setAccessTypes(getAccessTypes()); + ret.setUser(getUser()); + ret.setUserGroups(getUserGroups()); + ret.setAccessTime(getAccessTime()); + ret.setAction(getAction()); + ret.setClientIPAddress(getClientIPAddress()); + ret.setRequestData(getRequestData()); + ret.setClientType(getClientType()); + ret.setSessionId(getSessionId()); + ret.accessType = accessType; + + return ret; + } +} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuditHandler.java ---------------------------------------------------------------------- diff --git a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuditHandler.java b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuditHandler.java new file mode 100644 index 0000000..e24c094 --- /dev/null +++ b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuditHandler.java @@ -0,0 +1,177 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.ranger.authorization.hive.authorizer; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +import org.apache.ranger.audit.model.AuthzAuditEvent; +import org.apache.ranger.authorization.hadoop.config.RangerConfiguration; +import org.apache.ranger.authorization.hadoop.constants.RangerHadoopConstants; +import org.apache.ranger.authorization.utils.StringUtil; +import org.apache.ranger.plugin.audit.RangerDefaultAuditHandler; +import org.apache.ranger.plugin.policyengine.RangerAccessResult; +import org.apache.ranger.plugin.policyengine.RangerAccessResult.Result; + +public class RangerHiveAuditHandler extends RangerDefaultAuditHandler { + private static final String RangerModuleName = RangerConfiguration.getInstance().get(RangerHadoopConstants.AUDITLOG_RANGER_MODULE_ACL_NAME_PROP , RangerHadoopConstants.DEFAULT_RANGER_MODULE_ACL_NAME) ; + + Collection<AuthzAuditEvent> auditEvents = null; + boolean deniedExists = false; + + public RangerHiveAuditHandler() { + super(); + } + + @Override + public void logAudit(RangerAccessResult result) { + if(! result.getIsAudited()) { + return; + } + + AuthzAuditEvent auditEvent = new AuthzAuditEvent(); + + RangerHiveAccessRequest request = (RangerHiveAccessRequest)result.getAccessRequest(); + RangerHiveResource resource = (RangerHiveResource)request.getResource(); + boolean isAllowed = result.getResult() == Result.ALLOWED; + + auditEvent.setAclEnforcer(RangerModuleName); + auditEvent.setSessionId(request.getSessionId()); + auditEvent.setResourceType("@" + StringUtil.toLower(resource.getObjectType().name())); // to be consistent with earlier release + auditEvent.setAccessType(request.getAccessType().toString()); + auditEvent.setAction(request.getAction()); + auditEvent.setUser(request.getUser()); + auditEvent.setAccessResult((short)(isAllowed ? 1 : 0)); + auditEvent.setPolicyId(result.getPolicyId()); + auditEvent.setClientIP(request.getClientIPAddress()); + auditEvent.setClientType(request.getClientType()); + auditEvent.setEventTime(request.getAccessTime()); + auditEvent.setRepositoryType(result.getServiceType()); + auditEvent.setRepositoryName(result.getServiceName()) ; + auditEvent.setRequestData(request.getRequestData()); + auditEvent.setResourcePath(getResourceValueAsString(resource, result.getServiceDef())); + + addAuthzAuditEvent(auditEvent); + } + + /* + * This method is expected to be called ONLY to process the results for multiple-columns in a table. + * To ensure this, RangerHiveAuthorizer should call isAccessAllowed(Collection<requests>) only for this condition + */ + @Override + public void logAudit(Collection<RangerAccessResult> results) { + Map<Long, AuthzAuditEvent> auditEvents = new HashMap<Long, AuthzAuditEvent>(); + + for(RangerAccessResult result : results) { + if(! result.getIsAudited()) { + continue; + } + + RangerHiveAccessRequest request = (RangerHiveAccessRequest)result.getAccessRequest(); + RangerHiveResource resource = (RangerHiveResource)request.getResource(); + boolean isAllowed = result.getResult() == Result.ALLOWED; + AuthzAuditEvent auditEvent = auditEvents.get(result.getPolicyId()); + + if(auditEvent == null) { + auditEvent = new AuthzAuditEvent(); + auditEvents.put(result.getPolicyId(), auditEvent); + + auditEvent.setAclEnforcer(RangerModuleName); + auditEvent.setSessionId(request.getSessionId()); + auditEvent.setResourceType("@" + StringUtil.toLower(resource.getObjectType().name())); // to be consistent with earlier release + auditEvent.setAccessType(request.getAccessType().toString()); + auditEvent.setAction(request.getAction()); + auditEvent.setUser(request.getUser()); + auditEvent.setAccessResult((short)(isAllowed ? 1 : 0)); + auditEvent.setPolicyId(result.getPolicyId()); + auditEvent.setClientIP(request.getClientIPAddress()); + auditEvent.setClientType(request.getClientType()); + auditEvent.setEventTime(request.getAccessTime()); + auditEvent.setRepositoryType(result.getServiceType()); + auditEvent.setRepositoryName(result.getServiceName()) ; + auditEvent.setRequestData(request.getRequestData()); + auditEvent.setResourcePath(getResourceValueAsString(resource, result.getServiceDef())); + } else if(isAllowed){ + auditEvent.setResourcePath(auditEvent.getResourcePath() + "," + resource.getColumn()); + } else { + auditEvent.setResourcePath(getResourceValueAsString(resource, result.getServiceDef())); + } + + if(!isAllowed) { + auditEvent.setResourcePath(getResourceValueAsString(resource, result.getServiceDef())); + + break; + } + } + + for(AuthzAuditEvent auditEvent : auditEvents.values()) { + addAuthzAuditEvent(auditEvent); + } + } + + public void logAuditEventForDfs(String userName, String dfsCommand, boolean accessGranted, int repositoryType, String repositoryName) { + AuthzAuditEvent auditEvent = new AuthzAuditEvent(); + + auditEvent.setAclEnforcer(RangerModuleName); + auditEvent.setResourceType("@dfs"); // to be consistent with earlier release + auditEvent.setAccessType("DFS"); + auditEvent.setAction("DFS"); + auditEvent.setUser(userName); + auditEvent.setAccessResult((short)(accessGranted ? 1 : 0)); + auditEvent.setEventTime(StringUtil.getUTCDate()); + auditEvent.setRepositoryType(repositoryType); + auditEvent.setRepositoryName(repositoryName) ; + auditEvent.setRequestData(dfsCommand); + + auditEvent.setResourcePath(dfsCommand); + + addAuthzAuditEvent(auditEvent); + } + + public void flushAudit() { + if(auditEvents == null) { + return; + } + + for(AuthzAuditEvent auditEvent : auditEvents) { + if(deniedExists && auditEvent.getAccessResult() != 0) { // if deny exists, skip logging for allowed results + continue; + } + + super.logAuthzAudit(auditEvent); + } + } + + private void addAuthzAuditEvent(AuthzAuditEvent auditEvent) { + if(auditEvent != null) { + if(auditEvents == null) { + auditEvents = new ArrayList<AuthzAuditEvent>(); + } + + auditEvents.add(auditEvent); + + if(auditEvent.getAccessResult() == 0) { + deniedExists = true; + } + } + } +} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizer.java ---------------------------------------------------------------------- diff --git a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizer.java b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizer.java index 0dcea7c..df19603 100644 --- a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizer.java +++ b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizer.java @@ -20,8 +20,11 @@ package org.apache.ranger.authorization.hive.authorizer; import java.util.ArrayList; +import java.util.Collection; import java.util.List; +import java.util.Set; +import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileStatus; @@ -45,27 +48,24 @@ import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObje import org.apache.hadoop.security.UserGroupInformation; import org.apache.ranger.admin.client.RangerAdminRESTClient; import org.apache.ranger.admin.client.datatype.GrantRevokeData; -import org.apache.ranger.audit.model.EnumRepositoryType; -import org.apache.ranger.audit.model.AuthzAuditEvent; -import org.apache.ranger.audit.provider.AuditProviderFactory; import org.apache.ranger.authorization.hadoop.config.RangerConfiguration; import org.apache.ranger.authorization.hadoop.constants.RangerHadoopConstants; -import org.apache.ranger.authorization.hive.RangerHiveAccessContext; -import org.apache.ranger.authorization.hive.RangerHiveAccessVerifier; -import org.apache.ranger.authorization.hive.RangerHiveAccessVerifierFactory; -import org.apache.ranger.authorization.hive.RangerHiveObjectAccessInfo; -import org.apache.ranger.authorization.hive.RangerHiveObjectAccessInfo.HiveAccessType; -import org.apache.ranger.authorization.hive.RangerHiveObjectAccessInfo.HiveObjectType; import org.apache.ranger.authorization.utils.StringUtil; +import org.apache.ranger.plugin.policyengine.RangerAccessRequest; +import org.apache.ranger.plugin.policyengine.RangerAccessResult; +import org.apache.ranger.plugin.policyengine.RangerAccessResult.Result; +import org.apache.ranger.plugin.service.RangerBasePlugin; + +import com.google.common.collect.Sets; public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase { private static final Log LOG = LogFactory.getLog(RangerHiveAuthorizer.class) ; - private static final String RangerModuleName = RangerConfiguration.getInstance().get(RangerHadoopConstants.AUDITLOG_RANGER_MODULE_ACL_NAME_PROP , RangerHadoopConstants.DEFAULT_RANGER_MODULE_ACL_NAME) ; - private static final String repositoryName = RangerConfiguration.getInstance().get(RangerHadoopConstants.AUDITLOG_REPOSITORY_NAME_PROP); + private static final char COLUMN_SEP = ','; + private static final boolean UpdateXaPoliciesOnGrantRevoke = RangerConfiguration.getInstance().getBoolean(RangerHadoopConstants.HIVE_UPDATE_RANGER_POLICIES_ON_GRANT_REVOKE_PROP, RangerHadoopConstants.HIVE_UPDATE_RANGER_POLICIES_ON_GRANT_REVOKE_DEFAULT_VALUE); - private RangerHiveAccessVerifier mHiveAccessVerifier = null ; + private static RangerHivePlugin hivePlugin = null ; public RangerHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory, @@ -76,23 +76,32 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase { LOG.debug("RangerHiveAuthorizer.RangerHiveAuthorizer()"); - mHiveAccessVerifier = RangerHiveAccessVerifierFactory.getInstance() ; - - if(!RangerConfiguration.getInstance().isAuditInitDone()) { - if(sessionContext != null) { - String appType = "unknown"; - - switch(sessionContext.getClientType()) { - case HIVECLI: - appType = "hiveCLI"; - break; + if(hivePlugin == null) { + synchronized(RangerHiveAuthorizer.class) { + if(hivePlugin == null) { + RangerHivePlugin temp = new RangerHivePlugin(); + temp.init(); + + if(!RangerConfiguration.getInstance().isAuditInitDone()) { + if(sessionContext != null) { + String appType = "unknown"; + + switch(sessionContext.getClientType()) { + case HIVECLI: + appType = "hiveCLI"; + break; + + case HIVESERVER2: + appType = "hiveServer2"; + break; + } + + RangerConfiguration.getInstance().initAudit(appType); + } + } - case HIVESERVER2: - appType = "hiveServer2"; - break; + hivePlugin = temp; } - - RangerConfiguration.getInstance().initAudit(appType); } } } @@ -119,8 +128,10 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase { throw new HiveAuthzPluginException("GRANT/REVOKE not supported in Ranger HiveAuthorizer. Please use Ranger Security Admin to setup access control."); } + /* TODO: + * boolean isSuccess = false; - RangerHiveObjectAccessInfo objAccessInfo = getObjectAccessInfo(HiveOperationType.GRANT_PRIVILEGE, hivePrivObject, new RangerHiveAccessContext(null, getHiveAuthzSessionContext()), true); + RangerHiveObjectAccessInfo objAccessInfo = getHiveAccessRequests(HiveOperationType.GRANT_PRIVILEGE, hivePrivObject, new RangerHiveAccessContext(null, getHiveAuthzSessionContext()), true); try { GrantRevokeData grData = createGrantRevokeData(objAccessInfo, hivePrincipals, hivePrivileges, getGrantorUsername(grantorPrincipal), grantOption); @@ -144,6 +155,7 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase { logAuditEvent(ugi, objAccessInfo, isSuccess); } } + */ } /** @@ -167,8 +179,10 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase { throw new HiveAuthzPluginException("GRANT/REVOKE not supported in Ranger HiveAuthorizer. Please use Ranger Security Admin to setup access control."); } + /* TODO: + * boolean isSuccess = false; - RangerHiveObjectAccessInfo objAccessInfo = getObjectAccessInfo(HiveOperationType.REVOKE_PRIVILEGE, hivePrivObject, new RangerHiveAccessContext(null, getHiveAuthzSessionContext()), true); + RangerHiveObjectAccessInfo objAccessInfo = getHiveAccessRequests(HiveOperationType.REVOKE_PRIVILEGE, hivePrivObject, new RangerHiveAccessContext(null, getHiveAuthzSessionContext()), true); try { GrantRevokeData grData = createGrantRevokeData(objAccessInfo, hivePrincipals, hivePrivileges, getGrantorUsername(grantorPrincipal), grantOption); @@ -192,6 +206,7 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase { logAuditEvent(ugi, objAccessInfo, isSuccess); } } + */ } /** @@ -209,136 +224,167 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase { List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException { - - UserGroupInformation ugi = this.getCurrentUserGroupInfo(); + UserGroupInformation ugi = getCurrentUserGroupInfo(); if(ugi == null) { throw new HiveAccessControlException("Permission denied: user information not available"); } - RangerHiveAccessContext hiveContext = this.getAccessContext(context); + RangerHiveAuditHandler auditHandler = new RangerHiveAuditHandler(); - if(LOG.isDebugEnabled()) { - LOG.debug(toString(hiveOpType, inputHObjs, outputHObjs, hiveContext)); - } - - if(hiveOpType == HiveOperationType.DFS) { - handleDfsCommand(hiveOpType, inputHObjs, outputHObjs, hiveContext); - - return; - } + try { + HiveAuthzSessionContext sessionContext = getHiveAuthzSessionContext(); + String user = ugi.getShortUserName(); + Set<String> groups = Sets.newHashSet(ugi.getGroupNames()); - List<RangerHiveObjectAccessInfo> objAccessList = getObjectAccessInfo(hiveOpType, inputHObjs, outputHObjs, hiveContext); + if(LOG.isDebugEnabled()) { + LOG.debug(toString(hiveOpType, inputHObjs, outputHObjs, context, sessionContext)); + } - for(RangerHiveObjectAccessInfo objAccessInfo : objAccessList) { - boolean ret = false; + if(hiveOpType == HiveOperationType.DFS) { + handleDfsCommand(hiveOpType, inputHObjs, outputHObjs, context, sessionContext, user, groups, auditHandler); - if(objAccessInfo.getObjectType() == HiveObjectType.URI) { - ret = isURIAccessAllowed(ugi, objAccessInfo.getAccessType(), objAccessInfo.getUri(), getHiveConf()); - } else if(objAccessInfo.getAccessType() != HiveAccessType.ADMIN) { - ret = mHiveAccessVerifier.isAccessAllowed(ugi, objAccessInfo); - } + return; + } - if(! ret) { - if(mHiveAccessVerifier.isAudited(objAccessInfo)) { - logAuditEvent(ugi, objAccessInfo, false); - } - - String deniedObjectName = objAccessInfo.getDeinedObjectName(); - - if(StringUtil.isEmpty(deniedObjectName)) { - deniedObjectName = objAccessInfo.getObjectName(); - } + List<RangerHiveAccessRequest> requests = new ArrayList<RangerHiveAccessRequest>(); - throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", - ugi.getShortUserName(), objAccessInfo.getAccessType().name(), deniedObjectName)); - } - } + if(inputHObjs != null) { + for(HivePrivilegeObject hiveObj : inputHObjs) { + RangerHiveResource resource = getHiveResource(hiveOpType, hiveObj); - // access is allowed; audit all accesses - for(RangerHiveObjectAccessInfo objAccessInfo : objAccessList) { - if(mHiveAccessVerifier.isAudited(objAccessInfo)) { - logAuditEvent(ugi, objAccessInfo, true); - } - } - } - - private List<RangerHiveObjectAccessInfo> getObjectAccessInfo(HiveOperationType hiveOpType, - List<HivePrivilegeObject> inputsHObjs, - List<HivePrivilegeObject> outputHObjs, - RangerHiveAccessContext context) { - List<RangerHiveObjectAccessInfo> ret = new ArrayList<RangerHiveObjectAccessInfo>(); - - if(inputsHObjs != null) { - for(HivePrivilegeObject hiveObj : inputsHObjs) { - RangerHiveObjectAccessInfo hiveAccessObj = getObjectAccessInfo(hiveOpType, hiveObj, context, true); - - if( hiveAccessObj != null - && hiveAccessObj.getAccessType() != HiveAccessType.ADMIN // access check is performed at the Ranger policy server, as a part of updating the permissions - && !ret.contains(hiveAccessObj)) { - ret.add(hiveAccessObj); + if(resource.getObjectType() == HiveObjectType.URI) { + String path = hiveObj.getObjectName(); + FsAction permission = FsAction.READ; + + if(!isURIAccessAllowed(user, groups, permission, path, getHiveConf())) { + throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user, permission.name(), path)); + } + + continue; + } + + HiveAccessType accessType = getAccessType(hiveObj, hiveOpType, true); + + // ADMIN: access check is performed at the Ranger policy server, as a part of updating the permissions + if(accessType == HiveAccessType.ADMIN || accessType == HiveAccessType.NONE) { + continue; + } + + if(!existsByResourceAndAccessType(requests, resource, accessType)) { + RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, hiveOpType, accessType, context, sessionContext); + + requests.add(request); + } } } - } - if(outputHObjs != null) { - for(HivePrivilegeObject hiveObj : outputHObjs) { - RangerHiveObjectAccessInfo hiveAccessObj = getObjectAccessInfo(hiveOpType, hiveObj, context, false); - - if( hiveAccessObj != null - && hiveAccessObj.getAccessType() != HiveAccessType.ADMIN // access check is performed at the Ranger policy server, as a part of updating the permissions - && !ret.contains(hiveAccessObj)) { - ret.add(hiveAccessObj); + if(outputHObjs != null) { + for(HivePrivilegeObject hiveObj : outputHObjs) { + RangerHiveResource resource = getHiveResource(hiveOpType, hiveObj); + + if(resource.getObjectType() == HiveObjectType.URI) { + String path = hiveObj.getObjectName(); + FsAction permission = FsAction.WRITE; + + if(!isURIAccessAllowed(user, groups, permission, path, getHiveConf())) { + throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user, permission.name(), path)); + } + + continue; + } + + HiveAccessType accessType = getAccessType(hiveObj, hiveOpType, false); + + // ADMIN: access check is performed at the Ranger policy server, as a part of updating the permissions + if(accessType == HiveAccessType.ADMIN || accessType == HiveAccessType.NONE) { + continue; + } + + if(!existsByResourceAndAccessType(requests, resource, accessType)) { + RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, hiveOpType, accessType, context, sessionContext); + + requests.add(request); + } } } - } - if(ret.size() == 0 && LOG.isDebugEnabled()) { - LOG.debug("getObjectAccessInfo(): no objects found for access check! " + toString(hiveOpType, inputsHObjs, outputHObjs, context)); + for(RangerHiveAccessRequest request : requests) { + RangerHiveResource resource = (RangerHiveResource)request.getResource(); + RangerAccessResult result = null; + + if(resource.getObjectType() == HiveObjectType.COLUMN && StringUtils.contains(resource.getColumn(), COLUMN_SEP)) { + List<RangerAccessRequest> colRequests = new ArrayList<RangerAccessRequest>(); + + String[] columns = StringUtils.split(resource.getColumn(), COLUMN_SEP); + + for(String column : columns) { + column = column == null ? null : column.trim(); + + if(StringUtils.isEmpty(column.trim())) { + continue; + } + + RangerHiveResource colResource = new RangerHiveResource(HiveObjectType.COLUMN, resource.getDatabase(), resource.getTableOrUdf(), column); + + RangerHiveAccessRequest colRequest = request.copy(); + colRequest.setResource(colResource); + + colRequests.add(colRequest); + } + + Collection<RangerAccessResult> colResults = hivePlugin.isAccessAllowed(colRequests, auditHandler); + + if(colResults != null) { + for(RangerAccessResult colResult : colResults) { + result = colResult; + + if(result.getResult() != Result.ALLOWED) { + break; + } + } + } + } else { + result = hivePlugin.isAccessAllowed(request, auditHandler); + } + + if(result != null && result.getResult() != Result.ALLOWED) { + String path = auditHandler.getResourceValueAsString(request.getResource(), result.getServiceDef()); + + throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", + user, request.getAccessType().name(), path)); + } + } + } finally { + auditHandler.flushAudit(); } - - return ret; } - private RangerHiveObjectAccessInfo getObjectAccessInfo(HiveOperationType hiveOpType, HivePrivilegeObject hiveObj, RangerHiveAccessContext context, boolean isInput) { - RangerHiveObjectAccessInfo ret = null; + private RangerHiveResource getHiveResource(HiveOperationType hiveOpType, + HivePrivilegeObject hiveObj) { + RangerHiveResource ret = null; HiveObjectType objectType = getObjectType(hiveObj, hiveOpType); - HiveAccessType accessType = getAccessType(hiveObj, hiveOpType, isInput); - String operType = hiveOpType.name(); switch(objectType) { case DATABASE: - ret = new RangerHiveObjectAccessInfo(operType, context, accessType, hiveObj.getDbname()); + ret = new RangerHiveResource(objectType, hiveObj.getDbname()); break; case TABLE: - ret = new RangerHiveObjectAccessInfo(operType, context, accessType, hiveObj.getDbname(), HiveObjectType.TABLE, hiveObj.getObjectName()); - break; - case VIEW: - ret = new RangerHiveObjectAccessInfo(operType, context, accessType, hiveObj.getDbname(), HiveObjectType.VIEW, hiveObj.getObjectName()); - break; - case PARTITION: - ret = new RangerHiveObjectAccessInfo(operType, context, accessType, hiveObj.getDbname(), HiveObjectType.PARTITION, hiveObj.getObjectName()); - break; - case INDEX: - String indexName = "?"; // TODO: - ret = new RangerHiveObjectAccessInfo(operType, context, accessType, hiveObj.getDbname(), hiveObj.getObjectName(), HiveObjectType.INDEX, indexName); + case FUNCTION: + ret = new RangerHiveResource(objectType, hiveObj.getDbname(), hiveObj.getObjectName()); break; case COLUMN: - ret = new RangerHiveObjectAccessInfo(operType, context, accessType, hiveObj.getDbname(), hiveObj.getObjectName(), hiveObj.getColumns()); - break; - - case FUNCTION: - ret = new RangerHiveObjectAccessInfo(operType, context, accessType, hiveObj.getDbname(), HiveObjectType.FUNCTION, hiveObj.getObjectName()); + ret = new RangerHiveResource(objectType, hiveObj.getDbname(), hiveObj.getObjectName(), StringUtils.join(hiveObj.getColumns(), COLUMN_SEP)); break; case URI: - ret = new RangerHiveObjectAccessInfo(operType, context, accessType, HiveObjectType.URI, hiveObj.getObjectName()); + ret = new RangerHiveResource(objectType, hiveObj.getObjectName()); break; case NONE: @@ -555,32 +601,9 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase { return accessType; } - private boolean isURIAccessAllowed(UserGroupInformation ugi, HiveAccessType accessType, String uri, HiveConf conf) { + private boolean isURIAccessAllowed(String userName, Set<String> groups, FsAction action, String uri, HiveConf conf) { boolean ret = false; - FsAction action = FsAction.NONE; - - switch(accessType) { - case ALTER: - case CREATE: - case UPDATE: - case DROP: - case INDEX: - case LOCK: - case ADMIN: - case ALL: - action = FsAction.WRITE; - break; - - case SELECT: - case USE: - action = FsAction.READ; - break; - - case NONE: - break; - } - if(action == FsAction.NONE) { ret = true; } else { @@ -589,7 +612,6 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase { FileSystem fs = FileSystem.get(filePath.toUri(), conf); Path path = FileUtils.getPathOrParentThatExists(fs, filePath); FileStatus fileStatus = fs.getFileStatus(path); - String userName = ugi.getShortUserName(); if (FileUtils.isOwnerOfFileHierarchy(fs, fileStatus, userName)) { ret = true; @@ -607,7 +629,11 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase { private void handleDfsCommand(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs, List<HivePrivilegeObject> outputHObjs, - RangerHiveAccessContext context) + HiveAuthzContext context, + HiveAuthzSessionContext sessionContext, + String user, + Set<String> groups, + RangerHiveAuditHandler auditHandler) throws HiveAuthzPluginException, HiveAccessControlException { String dfsCommandParams = null; @@ -624,14 +650,42 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase { } } - UserGroupInformation ugi = this.getCurrentUserGroupInfo(); + int serviceType = -1; + String serviceName = null; - logAuditEventForDfs(ugi, dfsCommandParams, false); + if(hivePlugin != null) { + if(hivePlugin.getPolicyEngine() != null && + hivePlugin.getPolicyEngine().getServiceDef() != null && + hivePlugin.getPolicyEngine().getServiceDef().getId() != null ) { + serviceType = hivePlugin.getPolicyEngine().getServiceDef().getId().intValue(); + } + + serviceName = hivePlugin.getServiceName(); + } + + auditHandler.logAuditEventForDfs(user, dfsCommandParams, false, serviceType, serviceName); throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have privilege for [%s] command", - ugi.getShortUserName(), hiveOpType.name())); + user, hiveOpType.name())); } - + + private boolean existsByResourceAndAccessType(Collection<RangerHiveAccessRequest> requests, RangerHiveResource resource, HiveAccessType accessType) { + boolean ret = false; + + if(requests != null && resource != null) { + for(RangerHiveAccessRequest request : requests) { + if(request.getAccessType() == accessType && request.getResource().equals(resource)) { + ret = true; + + break; + } + } + } + + return ret; + } + + /* private String getGrantorUsername(HivePrincipal grantorPrincipal) { String grantor = grantorPrincipal != null ? grantorPrincipal.getName() : null; @@ -714,82 +768,13 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase { return grData; } - - private void logAuditEventForDfs(UserGroupInformation ugi, String dfsCommand, boolean accessGranted) { - AuthzAuditEvent auditEvent = new AuthzAuditEvent(); - - try { - auditEvent.setAclEnforcer(RangerModuleName); - auditEvent.setResourceType("@dfs"); // to be consistent with earlier release - auditEvent.setAccessType("DFS"); - auditEvent.setAction("DFS"); - auditEvent.setUser(ugi.getShortUserName()); - auditEvent.setAccessResult((short)(accessGranted ? 1 : 0)); - auditEvent.setEventTime(StringUtil.getUTCDate()); - auditEvent.setRepositoryType(EnumRepositoryType.HIVE); - auditEvent.setRepositoryName(repositoryName) ; - auditEvent.setRequestData(dfsCommand); - - auditEvent.setResourcePath(dfsCommand); - - if(LOG.isDebugEnabled()) { - LOG.debug("logAuditEvent [" + auditEvent + "] - START"); - } - - AuditProviderFactory.getAuditProvider().log(auditEvent); - - if(LOG.isDebugEnabled()) { - LOG.debug("logAuditEvent [" + auditEvent + "] - END"); - } - } - catch(Throwable t) { - LOG.error("ERROR logEvent [" + auditEvent + "]", t); - } - } - - private void logAuditEvent(UserGroupInformation ugi, RangerHiveObjectAccessInfo objAccessInfo, boolean accessGranted) { - AuthzAuditEvent auditEvent = new AuthzAuditEvent(); - - try { - auditEvent.setAclEnforcer(RangerModuleName); - auditEvent.setSessionId(objAccessInfo.getContext().getSessionString()); - auditEvent.setResourceType("@" + StringUtil.toLower(objAccessInfo.getObjectType().name())); // to be consistent with earlier release - auditEvent.setAccessType(objAccessInfo.getAccessType().toString()); - auditEvent.setAction(objAccessInfo.getOperType()); - auditEvent.setUser(ugi.getShortUserName()); - auditEvent.setAccessResult((short)(accessGranted ? 1 : 0)); - auditEvent.setClientIP(objAccessInfo.getContext().getClientIpAddress()); - auditEvent.setClientType(objAccessInfo.getContext().getClientType()); - auditEvent.setEventTime(StringUtil.getUTCDate()); - auditEvent.setRepositoryType(EnumRepositoryType.HIVE); - auditEvent.setRepositoryName(repositoryName) ; - auditEvent.setRequestData(objAccessInfo.getContext().getCommandString()); - - if(! accessGranted && !StringUtil.isEmpty(objAccessInfo.getDeinedObjectName())) { - auditEvent.setResourcePath(objAccessInfo.getDeinedObjectName()); - } else { - auditEvent.setResourcePath(objAccessInfo.getObjectName()); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("logAuditEvent [" + auditEvent + "] - START"); - } - - AuditProviderFactory.getAuditProvider().log(auditEvent); - - if(LOG.isDebugEnabled()) { - LOG.debug("logAuditEvent [" + auditEvent + "] - END"); - } - } - catch(Throwable t) { - LOG.error("ERROR logEvent [" + auditEvent + "]", t); - } - } + */ private String toString(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs, List<HivePrivilegeObject> outputHObjs, - RangerHiveAccessContext context) { + HiveAuthzContext context, + HiveAuthzSessionContext sessionContext) { StringBuilder sb = new StringBuilder(); sb.append("'checkPrivileges':{"); @@ -804,12 +789,10 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase { sb.append("]"); sb.append(", 'context':{"); - if(context != null) { - sb.append("'clientType':").append(context.getClientType()); - sb.append(", 'commandString':").append(context.getCommandString()); - sb.append(", 'ipAddress':").append(context.getClientIpAddress()); - sb.append(", 'sessionString':").append(context.getSessionString()); - } + sb.append("'clientType':").append(sessionContext == null ? null : sessionContext.getClientType()); + sb.append(", 'commandString':").append(context == null ? null : context.getCommandString()); + sb.append(", 'ipAddress':").append(context == null ? null : context.getIpAddress()); + sb.append(", 'sessionString':").append(sessionContext == null ? null : sessionContext.getSessionString()); sb.append("}"); sb.append(", 'user':").append(this.getCurrentUserGroupInfo().getUserName()); @@ -847,3 +830,14 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase { return sb; } } + +enum HiveObjectType { NONE, DATABASE, TABLE, VIEW, PARTITION, INDEX, COLUMN, FUNCTION, URI }; +enum HiveAccessType { NONE, CREATE, ALTER, DROP, INDEX, LOCK, SELECT, UPDATE, USE, ALL, ADMIN }; + +class RangerHivePlugin extends RangerBasePlugin { + public RangerHivePlugin() { + super("hive"); + } +} + +
