Repository: incubator-sentry Updated Branches: refs/heads/hive_plugin_v2 fe5e4a9a6 -> 912057309
SENTRY-847: [column level privilege] if grant column level privilege to user, show columns in table shouldn't require extra table level privilege (Guoquan Shen, Reviewed by: Colin Ma) Project: http://git-wip-us.apache.org/repos/asf/incubator-sentry/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-sentry/commit/91205730 Tree: http://git-wip-us.apache.org/repos/asf/incubator-sentry/tree/91205730 Diff: http://git-wip-us.apache.org/repos/asf/incubator-sentry/diff/91205730 Branch: refs/heads/hive_plugin_v2 Commit: 91205730928e517598a3a49fd985d28aca669b1f Parents: fe5e4a9 Author: Guoquan Shen <guoquan.s...@intel.com> Authored: Fri Aug 28 18:06:25 2015 +0800 Committer: Sun Dapeng <s...@apache.org> Committed: Wed Oct 28 14:55:21 2015 +0800 ---------------------------------------------------------------------- .../hive/ql/exec/SentryFilterDDLTask.java | 138 +++++++++++++++++++ .../binding/hive/HiveAuthzBindingHook.java | 70 +++++++++- .../binding/hive/authz/HiveAuthzPrivileges.java | 3 +- .../hive/authz/HiveAuthzPrivilegesMap.java | 10 +- .../file/SimpleFileProviderBackend.java | 2 +- .../e2e/dbprovider/TestColumnEndToEnd.java | 82 +++++++++++ 6 files changed, 301 insertions(+), 4 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/91205730/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java new file mode 100644 index 0000000..d47ca3b --- /dev/null +++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java @@ -0,0 +1,138 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.exec; + +import static org.apache.hadoop.util.StringUtils.stringifyException; + +import java.io.DataOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.ql.DriverContext; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.metadata.Hive; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils; +import org.apache.hadoop.hive.ql.plan.HiveOperation; +import org.apache.hadoop.hive.ql.plan.ShowColumnsDesc; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.io.IOUtils; +import org.apache.sentry.binding.hive.HiveAuthzBindingHook; +import org.apache.sentry.binding.hive.authz.HiveAuthzBinding; +import org.apache.sentry.core.common.Subject; + +import com.google.common.base.Preconditions; + +public class SentryFilterDDLTask extends DDLTask { + private static final long serialVersionUID = 1L; + private static final Log LOG = LogFactory.getLog(SentryFilterDDLTask.class); + + private HiveAuthzBinding hiveAuthzBinding; + private Subject subject; + private HiveOperation stmtOperation; + + public SentryFilterDDLTask(HiveAuthzBinding hiveAuthzBinding, Subject subject, + HiveOperation stmtOperation) { + Preconditions.checkNotNull(hiveAuthzBinding); + Preconditions.checkNotNull(subject); + Preconditions.checkNotNull(stmtOperation); + + this.hiveAuthzBinding = hiveAuthzBinding; + this.subject = subject; + this.stmtOperation = stmtOperation; + } + + public HiveAuthzBinding getHiveAuthzBinding() { + return hiveAuthzBinding; + } + + public Subject getSubject() { + return subject; + } + + public HiveOperation getStmtOperation() { + return stmtOperation; + } + + @Override + public int execute(DriverContext driverContext) { + // Currently the SentryFilterDDLTask only supports filter the "show columns in table " command. + ShowColumnsDesc showCols = work.getShowColumnsDesc(); + try { + if (showCols != null) { + return showFilterColumns(showCols); + } + } catch (Throwable e) { + failed(e); + return 1; + } + + return super.execute(driverContext); + } + + private void failed(Throwable e) { + while (e.getCause() != null && e.getClass() == RuntimeException.class) { + e = e.getCause(); + } + setException(e); + LOG.error(stringifyException(e)); + } + + /** + * Filter the command "show columns in table" + * + */ + private int showFilterColumns(ShowColumnsDesc showCols) throws HiveException { + Table table = Hive.get(conf).getTable(showCols.getTableName()); + + // write the results in the file + DataOutputStream outStream = null; + try { + Path resFile = new Path(showCols.getResFile()); + FileSystem fs = resFile.getFileSystem(conf); + outStream = fs.create(resFile); + + List<FieldSchema> cols = table.getCols(); + cols.addAll(table.getPartCols()); + // In case the query is served by HiveServer2, don't pad it with spaces, + // as HiveServer2 output is consumed by JDBC/ODBC clients. + boolean isOutputPadded = !SessionState.get().isHiveServerQuery(); + outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation( + fiterColumns(cols, table), false, isOutputPadded, null)); + outStream.close(); + outStream = null; + } catch (IOException e) { + throw new HiveException(e, ErrorMsg.GENERIC_ERROR); + } finally { + IOUtils.closeStream(outStream); + } + return 0; + } + + private List<FieldSchema> fiterColumns(List<FieldSchema> cols, Table table) throws HiveException { + // filter some columns that the subject has privilege on + return HiveAuthzBindingHook.filterShowColumns(getHiveAuthzBinding(), + cols, getStmtOperation(), getSubject().getName(), table.getTableName(), table.getDbName()); + } +} http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/91205730/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java index ddfb222..62410c7 100644 --- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java +++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java @@ -31,6 +31,9 @@ import java.util.Set; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.ql.exec.DDLTask; +import org.apache.hadoop.hive.ql.exec.SentryFilterDDLTask; import org.apache.hadoop.hive.ql.exec.SentryGrantRevokeTask; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.hooks.Entity; @@ -45,6 +48,7 @@ import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.HiveParser; import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext; import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.sentry.binding.hive.authz.HiveAuthzBinding; @@ -335,6 +339,22 @@ public class HiveAuthzBindingHook extends AbstractSemanticAnalyzerHook { // We don't handle authorizing this statement return; } + + /** + * Replace DDLTask using the SentryFilterDDLTask for protection, + * such as "show column" only allow show some column that user can access to. + * SENTRY-847 + */ + for (int i = 0; i < rootTasks.size(); i++) { + Task<? extends Serializable> task = rootTasks.get(i); + if (task instanceof DDLTask) { + SentryFilterDDLTask filterTask = + new SentryFilterDDLTask(hiveAuthzBinding, subject, stmtOperation); + filterTask.setWork((DDLWork)task.getWork()); + rootTasks.set(i, filterTask); + } + } + authorizeWithHiveBindings(context, stmtAuthObject, stmtOperation); } catch (AuthorizationException e) { executeOnFailureHooks(context, stmtOperation, e); @@ -506,7 +526,19 @@ public class HiveAuthzBindingHook extends AbstractSemanticAnalyzerHook { inputHierarchy.add(connectHierarchy); outputHierarchy.add(connectHierarchy); break; - + case COLUMN: + for (ReadEntity readEntity: inputs) { + if (readEntity.getAccessedColumns() != null && !readEntity.getAccessedColumns().isEmpty()) { + addColumnHierarchy(inputHierarchy, readEntity); + } else { + List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>(); + entityHierarchy.add(hiveAuthzBinding.getAuthServer()); + entityHierarchy.addAll(getAuthzHierarchyFromEntity(readEntity)); + entityHierarchy.add(Column.ALL); + inputHierarchy.add(entityHierarchy); + } + } + break; default: throw new AuthorizationException("Unknown operation scope type " + stmtAuthObject.getOperationScope().toString()); @@ -692,6 +724,42 @@ public class HiveAuthzBindingHook extends AbstractSemanticAnalyzerHook { return filteredResult; } + public static List<FieldSchema> filterShowColumns( + HiveAuthzBinding hiveAuthzBinding, List<FieldSchema> cols, + HiveOperation operation, String userName, String tableName, String dbName) + throws SemanticException { + List<FieldSchema> filteredResult = new ArrayList<FieldSchema>(); + Subject subject = new Subject(userName); + HiveAuthzPrivileges ColumnMetaDataPrivilege = + HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(HiveOperation.SHOWCOLUMNS); + + Database database = new Database(dbName); + Table table = new Table(tableName); + for (FieldSchema col : cols) { + // if user has privileges on column, add to filtered list, else discard + List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); + List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); + List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>(); + externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer()); + externalAuthorizableHierarchy.add(database); + externalAuthorizableHierarchy.add(table); + externalAuthorizableHierarchy.add(new Column(col.getName())); + inputHierarchy.add(externalAuthorizableHierarchy); + + try { + hiveAuthzBinding.authorize(operation, ColumnMetaDataPrivilege, subject, + inputHierarchy, outputHierarchy); + filteredResult.add(col); + } catch (AuthorizationException e) { + // squash the exception, user doesn't have privileges, so the column is + // not added to + // filtered list. + ; + } + } + return filteredResult; + } + public static List<String> filterShowDatabases( HiveAuthzBinding hiveAuthzBinding, List<String> queryResult, HiveOperation operation, String userName) throws SemanticException { http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/91205730/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivileges.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivileges.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivileges.java index 8cd82ef..f164b30 100644 --- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivileges.java +++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivileges.java @@ -52,7 +52,8 @@ public class HiveAuthzPrivileges { DATABASE, TABLE, FUNCTION, - CONNECT + CONNECT, + COLUMN } public static enum HiveExtendedOperation { http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/91205730/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivilegesMap.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivilegesMap.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivilegesMap.java index 0291b6c..e721555 100644 --- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivilegesMap.java +++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivilegesMap.java @@ -140,6 +140,12 @@ public class HiveAuthzPrivilegesMap { setOperationType(HiveOperationType.INFO). build(); + HiveAuthzPrivileges ColumnMetaDataPrivilege = new HiveAuthzPrivileges.AuthzPrivilegeBuilder(). + addInputObjectPriviledge(AuthorizableType.Column, EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT)). + setOperationScope(HiveOperationScope.COLUMN). + setOperationType(HiveOperationType.INFO). + build(); + HiveAuthzPrivileges dbImportPrivilege = new HiveAuthzPrivileges.AuthzPrivilegeBuilder(). addOutputObjectPriviledge(AuthorizableType.Db, EnumSet.of(DBModelAction.CREATE)). addInputObjectPriviledge(AuthorizableType.URI, EnumSet.of(DBModelAction.ALL)). @@ -255,9 +261,11 @@ public class HiveAuthzPrivilegesMap { hiveAuthzStmtPrivMap.put(HiveOperation.CREATEFUNCTION, functionPrivilege); hiveAuthzStmtPrivMap.put(HiveOperation.DROPFUNCTION, functionPrivilege); + // SHOWCOLUMNS + hiveAuthzStmtPrivMap.put(HiveOperation.SHOWCOLUMNS, ColumnMetaDataPrivilege); + // SHOWDATABASES // SHOWTABLES - hiveAuthzStmtPrivMap.put(HiveOperation.SHOWCOLUMNS, tableMetaDataPrivilege); hiveAuthzStmtPrivMap.put(HiveOperation.SHOW_TABLESTATUS, tableMetaDataPrivilege); hiveAuthzStmtPrivMap.put(HiveOperation.SHOW_TBLPROPERTIES, tableMetaDataPrivilege); hiveAuthzStmtPrivMap.put(HiveOperation.SHOW_CREATETABLE, tableMetaDataPrivilege); http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/91205730/sentry-provider/sentry-provider-file/src/main/java/org/apache/sentry/provider/file/SimpleFileProviderBackend.java ---------------------------------------------------------------------- diff --git a/sentry-provider/sentry-provider-file/src/main/java/org/apache/sentry/provider/file/SimpleFileProviderBackend.java b/sentry-provider/sentry-provider-file/src/main/java/org/apache/sentry/provider/file/SimpleFileProviderBackend.java index 526a0e0..1b83c0d 100644 --- a/sentry-provider/sentry-provider-file/src/main/java/org/apache/sentry/provider/file/SimpleFileProviderBackend.java +++ b/sentry-provider/sentry-provider-file/src/main/java/org/apache/sentry/provider/file/SimpleFileProviderBackend.java @@ -183,7 +183,7 @@ public class SimpleFileProviderBackend implements ProviderBackend { @Override public void close() { - groupRolePrivilegeTable.clear(); + // SENTRY-847 will use HiveAuthBinding again, so groupRolePrivilegeTable shouldn't clear itself } @Override http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/91205730/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestColumnEndToEnd.java ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestColumnEndToEnd.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestColumnEndToEnd.java index 159b9d9..718a736 100644 --- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestColumnEndToEnd.java +++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestColumnEndToEnd.java @@ -17,17 +17,22 @@ package org.apache.sentry.tests.e2e.dbprovider; +import static junit.framework.Assert.fail; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.FileOutputStream; import java.sql.Connection; +import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; +import java.util.ArrayList; +import java.util.List; import org.apache.sentry.provider.db.SentryAccessDeniedException; import org.apache.sentry.provider.file.PolicyFile; import org.apache.sentry.tests.e2e.hive.AbstractTestWithStaticConfiguration; +import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @@ -272,4 +277,81 @@ public class TestColumnEndToEnd extends AbstractTestWithStaticConfiguration { statement.close(); connection.close(); } + + @Test + public void testShowColumns() throws Exception { + // grant select on test_tb(s) to USER1_1 + Connection connection = context.createConnection(ADMIN1); + Statement statement = context.createStatement(connection); + statement.execute("CREATE database " + DB1); + statement.execute("use " + DB1); + statement.execute("CREATE TABLE test_tb (s string, i string)"); + statement.execute("CREATE ROLE user_role1"); + statement.execute("GRANT SELECT (s) ON TABLE test_tb TO ROLE user_role1"); + statement.execute("GRANT ROLE user_role1 TO GROUP " + USERGROUP1); + statement.close(); + connection.close(); + + // USER1_1 executes "show columns in test_tb" and gets the s column information + connection = context.createConnection(USER1_1); + statement = context.createStatement(connection); + statement.execute("use " + DB1); + ResultSet res = statement.executeQuery("show columns in test_tb"); + + List<String> expectedResult = new ArrayList<String>(); + List<String> returnedResult = new ArrayList<String>(); + expectedResult.add("s"); + while (res.next()) { + returnedResult.add(res.getString(1).trim()); + } + validateReturnedResult(expectedResult, returnedResult); + returnedResult.clear(); + expectedResult.clear(); + res.close(); + + statement.close(); + connection.close(); + + // grant select on test_tb(s, i) to USER2_1 + connection = context.createConnection(ADMIN1); + statement = context.createStatement(connection); + statement.execute("use " + DB1); + statement.execute("CREATE ROLE user_role2"); + statement.execute("GRANT SELECT(s, i) ON TABLE test_tb TO ROLE user_role2"); + statement.execute("GRANT ROLE user_role2 TO GROUP " + USERGROUP2); + statement.close(); + connection.close(); + + // USER2_1 executes "show columns in test_tb" and gets the s,i columns information + connection = context.createConnection(USER2_1); + statement = context.createStatement(connection); + statement.execute("use " + DB1); + res = statement.executeQuery("show columns in test_tb"); + + expectedResult.add("s"); + expectedResult.add("i"); + while (res.next()) { + returnedResult.add(res.getString(1).trim()); + } + validateReturnedResult(expectedResult, returnedResult); + returnedResult.clear(); + expectedResult.clear(); + res.close(); + + statement.close(); + connection.close(); + + // USER3_1 executes "show columns in test_tb" and the exception will be thrown + connection = context.createConnection(USER3_1); + statement = context.createStatement(connection); + try { + // USER3_1 has no privilege on any column, so "show columns in test_tb" will throw an exception + statement.execute("show columns in db_1.test_tb"); + fail("No valid privileges exception should have been thrown"); + } catch (Exception e) { + } + + statement.close(); + connection.close(); + } }