SENTRY-1839: Fork files from sentry-binding-hive-common package to sentry-binding-hive and sentry-binding-hive-v2 packages. (kalyan kumar kalvagadda reviewed by Colm O hEigeartaigh)
Project: http://git-wip-us.apache.org/repos/asf/sentry/repo Commit: http://git-wip-us.apache.org/repos/asf/sentry/commit/b19cb01b Tree: http://git-wip-us.apache.org/repos/asf/sentry/tree/b19cb01b Diff: http://git-wip-us.apache.org/repos/asf/sentry/diff/b19cb01b Branch: refs/heads/master Commit: b19cb01b4df53a4aa2d1bc269ce3e24a8b265ba9 Parents: 3965d17 Author: Kalyan Kumar Kalvagadda <[email protected]> Authored: Mon Jul 31 06:36:05 2017 -0500 Committer: Kalyan Kumar Kalvagadda <[email protected]> Committed: Mon Jul 31 06:37:06 2017 -0500 ---------------------------------------------------------------------- .../hive/ql/exec/SentryFilterDDLTask.java | 162 ---- .../binding/hive/HiveAuthzBindingHookBase.java | 880 ------------------- .../binding/hive/authz/SentryConfigTool.java | 643 -------------- .../metastore/AuthorizingObjectStoreBase.java | 412 --------- .../metastore/MetastoreAuthzBindingBase.java | 453 ---------- .../metastore/SentryHiveMetaStoreClient.java | 161 ---- .../metastore/SentryMetaStoreFilterHook.java | 201 ----- .../SentryMetastorePostEventListenerBase.java | 409 --------- .../hive/ql/exec/SentryFilterDDLTask.java | 162 ++++ .../binding/hive/authz/SentryConfigTool.java | 643 ++++++++++++++ .../hive/v2/HiveAuthzBindingHookBaseV2.java | 880 +++++++++++++++++++ .../binding/hive/v2/HiveAuthzBindingHookV2.java | 4 +- .../hive/v2/HiveAuthzBindingSessionHookV2.java | 4 +- .../hive/v2/SentryAuthorizerFactory.java | 4 +- .../metastore/AuthorizingObjectStoreBaseV2.java | 412 +++++++++ .../v2/metastore/AuthorizingObjectStoreV2.java | 6 +- .../metastore/MetastoreAuthzBindingBaseV2.java | 453 ++++++++++ .../v2/metastore/MetastoreAuthzBindingV2.java | 4 +- .../metastore/SentryHiveMetaStoreClientV2.java | 161 ++++ .../v2/metastore/SentryMetaStoreFilterHook.java | 201 +++++ .../SentryMetastorePostEventListenerBaseV2.java | 409 +++++++++ .../SentryMetastorePostEventListenerV2.java | 4 +- .../hive/ql/exec/SentryFilterDDLTask.java | 161 ++++ .../hive/ql/exec/SentryGrantRevokeTask.java | 2 +- .../binding/hive/HiveAuthzBindingHook.java | 1 + .../hive/HiveAuthzBindingSessionHook.java | 2 +- .../hive/authz/HiveAuthzBindingHookBase.java | 880 +++++++++++++++++++ .../binding/hive/authz/SentryConfigTool.java | 642 ++++++++++++++ .../metastore/AuthorizingObjectStore.java | 2 +- .../metastore/AuthorizingObjectStoreBase.java | 410 +++++++++ .../metastore/MetastoreAuthzBindingBase.java | 451 ++++++++++ .../metastore/SentryHiveMetaStoreClient.java | 161 ++++ .../metastore/SentryMetaStoreFilterHook.java | 201 +++++ .../SentryMetastorePostEventListenerBase.java | 409 +++++++++ .../org/apache/sentry/binding/hive/TestURI.java | 1 + .../tests/e2e/minisentry/InternalSentrySrv.java | 1 - 36 files changed, 6654 insertions(+), 3338 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/sentry/blob/b19cb01b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java deleted file mode 100644 index e257360..0000000 --- a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.ql.exec; - -import static org.apache.hadoop.util.StringUtils.stringifyException; - -import java.io.DataOutputStream; -import java.io.IOException; -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.ql.DriverContext; -import org.apache.hadoop.hive.ql.ErrorMsg; -import org.apache.hadoop.hive.ql.metadata.Hive; -import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils; -import org.apache.hadoop.hive.ql.plan.HiveOperation; -import org.apache.hadoop.hive.ql.plan.ShowColumnsDesc; -import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.io.IOUtils; -import org.apache.sentry.binding.hive.HiveAuthzBindingHookBase; -import org.apache.sentry.binding.hive.authz.HiveAuthzBinding; -import org.apache.sentry.core.common.Subject; - -import com.google.common.base.Preconditions; - -public class SentryFilterDDLTask extends DDLTask { - private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(SentryFilterDDLTask.class); - - private HiveAuthzBinding hiveAuthzBinding; - private Subject subject; - private HiveOperation stmtOperation; - - public SentryFilterDDLTask(HiveAuthzBinding hiveAuthzBinding, Subject subject, - HiveOperation stmtOperation) { - Preconditions.checkNotNull(hiveAuthzBinding); - Preconditions.checkNotNull(subject); - Preconditions.checkNotNull(stmtOperation); - - this.hiveAuthzBinding = hiveAuthzBinding; - this.subject = subject; - this.stmtOperation = stmtOperation; - } - - public HiveAuthzBinding getHiveAuthzBinding() { - return hiveAuthzBinding; - } - - public Subject getSubject() { - return subject; - } - - public HiveOperation getStmtOperation() { - return stmtOperation; - } - - @Override - public int execute(DriverContext driverContext) { - // Currently the SentryFilterDDLTask only supports filter the "show columns in table " command. - ShowColumnsDesc showCols = work.getShowColumnsDesc(); - try { - if (showCols != null) { - return showFilterColumns(showCols); - } - } catch (Throwable e) { - failed(e); - return 1; - } - - return super.execute(driverContext); - } - - private void failed(Throwable e) { - // Get the cause of the exception if available - Throwable error = e; - while (error.getCause() != null && error.getClass() == RuntimeException.class) { - error = error.getCause(); - } - setException(error); - LOG.error(stringifyException(error)); - } - - /** - * Filter the command "show columns in table" - * - */ - private int showFilterColumns(ShowColumnsDesc showCols) throws HiveException { - Table table = Hive.get(conf).getTable(showCols.getTableName()); - - // write the results in the file - DataOutputStream outStream = null; - try { - Path resFile = new Path(showCols.getResFile()); - FileSystem fs = resFile.getFileSystem(conf); - outStream = fs.create(resFile); - - List<FieldSchema> cols = table.getCols(); - cols.addAll(table.getPartCols()); - // In case the query is served by HiveServer2, don't pad it with spaces, - // as HiveServer2 output is consumed by JDBC/ODBC clients. - boolean isOutputPadded = !SessionState.get().isHiveServerQuery(); - outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation( - fiterColumns(cols, table), false, isOutputPadded, null)); - outStream.close(); - outStream = null; - } catch (IOException e) { - throw new HiveException(e, ErrorMsg.GENERIC_ERROR); - } finally { - IOUtils.closeStream(outStream); - } - return 0; - } - - private List<FieldSchema> fiterColumns(List<FieldSchema> cols, Table table) throws HiveException { - // filter some columns that the subject has privilege on - return HiveAuthzBindingHookBase.filterShowColumns(getHiveAuthzBinding(), - cols, getStmtOperation(), getSubject().getName(), table.getTableName(), table.getDbName()); - } - - public void copyDDLTask(DDLTask ddlTask) { - work = ddlTask.getWork(); - rootTask = ddlTask.isRootTask(); - childTasks = ddlTask.getChildTasks(); - parentTasks = ddlTask.getParentTasks(); - backupTask = ddlTask.getBackupTask(); - backupChildrenTasks = ddlTask.getBackupChildrenTasks(); - started = ddlTask.started(); - isdone = ddlTask.done(); - queued = ddlTask.getQueued(); - id = ddlTask.getId(); - taskCounters = ddlTask.getCounters(); - feedSubscribers = ddlTask.getFeedSubscribers(); - taskTag = ddlTask.getTaskTag(); - setLocalMode(ddlTask.isLocalMode()); - setRetryCmdWhenFail(ddlTask.ifRetryCmdWhenFail()); - queryPlan = ddlTask.getQueryPlan(); - jobID = ddlTask.getJobID(); - setException(ddlTask.getException()); - console = ddlTask.console; - setFetchSource(ddlTask.isFetchSource()); - } -} http://git-wip-us.apache.org/repos/asf/sentry/blob/b19cb01b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHookBase.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHookBase.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHookBase.java deleted file mode 100644 index c4daea1..0000000 --- a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHookBase.java +++ /dev/null @@ -1,880 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.sentry.binding.hive; - -import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; - -import java.io.Serializable; -import java.net.MalformedURLException; -import java.net.URI; -import java.net.URL; -import java.security.CodeSource; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.EnumSet; -import java.util.List; -import java.util.Set; - -import com.google.common.base.Preconditions; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.common.JavaUtils; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.ql.exec.FunctionRegistry; -import org.apache.hadoop.hive.ql.exec.Task; -import org.apache.hadoop.hive.ql.exec.Utilities; -import org.apache.hadoop.hive.ql.hooks.Entity; -import org.apache.hadoop.hive.ql.hooks.Entity.Type; -import org.apache.hadoop.hive.ql.hooks.Hook; -import org.apache.hadoop.hive.ql.hooks.ReadEntity; -import org.apache.hadoop.hive.ql.hooks.WriteEntity; -import org.apache.hadoop.hive.ql.metadata.AuthorizationException; -import org.apache.hadoop.hive.ql.parse.ASTNode; -import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook; -import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; -import org.apache.hadoop.hive.ql.parse.HiveParser; -import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext; -import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.HiveOperation; -import org.apache.hadoop.hive.ql.plan.PlanUtils; -import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.sentry.binding.hive.authz.HiveAuthzBinding; -import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges; -import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationScope; -import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationType; -import org.apache.sentry.binding.hive.conf.HiveAuthzConf; -import org.apache.sentry.core.common.Subject; -import org.apache.sentry.core.common.utils.PathUtils; -import org.apache.sentry.core.model.db.AccessURI; -import org.apache.sentry.core.model.db.Column; -import org.apache.sentry.core.model.db.DBModelAction; -import org.apache.sentry.core.model.db.DBModelAuthorizable; -import org.apache.sentry.core.model.db.DBModelAuthorizable.AuthorizableType; -import org.apache.sentry.core.model.db.Database; -import org.apache.sentry.core.model.db.Table; -import org.apache.sentry.provider.cache.PrivilegeCache; -import org.apache.sentry.provider.cache.SimplePrivilegeCache; -import org.apache.sentry.provider.common.AuthorizationProvider; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Splitter; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; - -public abstract class HiveAuthzBindingHookBase extends AbstractSemanticAnalyzerHook { - private static final Logger LOG = LoggerFactory - .getLogger(HiveAuthzBindingHookBase.class); - protected final HiveAuthzBinding hiveAuthzBinding; - protected final HiveAuthzConf authzConf; - protected Database currDB = Database.ALL; - protected Table currTab; - protected List<AccessURI> udfURIs; - protected AccessURI serdeURI; - protected AccessURI partitionURI; - protected Table currOutTab = null; - protected Database currOutDB = null; - protected final List<String> serdeWhiteList; - protected boolean serdeURIPrivilegesEnabled; - - protected final static HiveAuthzPrivileges columnMetaDataPrivilege = - new HiveAuthzPrivileges.AuthzPrivilegeBuilder() - .addInputObjectPriviledge(AuthorizableType.Column, - EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT)) - .setOperationScope(HiveOperationScope.COLUMN).setOperationType(HiveOperationType.INFO) - .build(); - - // True if this is a basic DESCRIBE <table> operation. False for other DESCRIBE variants - // like DESCRIBE [FORMATTED|EXTENDED]. Required because Hive treats these stmts as the same - // HiveOperationType, but we want to enforces different privileges on each statement. - // Basic DESCRIBE <table> is allowed with only column-level privs, while the variants - // require table-level privileges. - protected boolean isDescTableBasic = false; - - public HiveAuthzBindingHookBase() throws Exception { - SessionState session = SessionState.get(); - if(session == null) { - throw new IllegalStateException("Session has not been started"); - } - // HACK: set a random classname to force the Auth V2 in Hive - SessionState.get().setAuthorizer(null); - - HiveConf hiveConf = session.getConf(); - if(hiveConf == null) { - throw new IllegalStateException("Session HiveConf is null"); - } - authzConf = loadAuthzConf(hiveConf); - udfURIs = Lists.newArrayList(); - hiveAuthzBinding = new HiveAuthzBinding(hiveConf, authzConf); - String serdeWhiteLists = - authzConf.get(HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST, - HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST_DEFAULT); - serdeWhiteList = Arrays.asList(serdeWhiteLists.split(",")); - serdeURIPrivilegesEnabled = - authzConf.getBoolean(HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED, - HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED_DEFAULT); - - FunctionRegistry.setupPermissionsForBuiltinUDFs("", HiveAuthzConf.HIVE_UDF_BLACK_LIST); - } - - public static HiveAuthzConf loadAuthzConf(HiveConf hiveConf) { - boolean depreicatedConfigFile = false; - HiveAuthzConf newAuthzConf = null; - String hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_SENTRY_CONF_URL); - if(hiveAuthzConf == null || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) { - hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_ACCESS_CONF_URL); - depreicatedConfigFile = true; - } - - if(hiveAuthzConf == null || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) { - throw new IllegalArgumentException("Configuration key " + HiveAuthzConf.HIVE_SENTRY_CONF_URL - + " value '" + hiveAuthzConf + "' is invalid."); - } - try { - newAuthzConf = new HiveAuthzConf(new URL(hiveAuthzConf)); - } catch (MalformedURLException e) { - if (depreicatedConfigFile) { - throw new IllegalArgumentException("Configuration key " + HiveAuthzConf.HIVE_ACCESS_CONF_URL - + " specifies a malformed URL '" + hiveAuthzConf + "'", e); - } else { - throw new IllegalArgumentException("Configuration key " + HiveAuthzConf.HIVE_SENTRY_CONF_URL - + " specifies a malformed URL '" + hiveAuthzConf + "'", e); - } - } - return newAuthzConf; - } - - @Override - public abstract ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) - throws SemanticException; - - /** - * Post analyze hook that invokes hive auth bindings - */ - @Override - public abstract void postAnalyze(HiveSemanticAnalyzerHookContext context, - List<Task<? extends Serializable>> rootTasks) throws SemanticException; - - protected void executeOnFailureHooks(HiveSemanticAnalyzerHookContext context, - HiveOperation hiveOp, AuthorizationException e) { - SentryOnFailureHookContext hookCtx = new SentryOnFailureHookContextImpl( - context.getCommand(), context.getInputs(), context.getOutputs(), - hiveOp, currDB, currTab, udfURIs, null, context.getUserName(), - context.getIpAddress(), e, context.getConf()); - String csHooks = authzConf.get( - HiveAuthzConf.AuthzConfVars.AUTHZ_ONFAILURE_HOOKS.getVar(), "").trim(); - - try { - for (Hook aofh : getHooks(csHooks)) { - ((SentryOnFailureHook)aofh).run(hookCtx); - } - } catch (Exception ex) { - LOG.error("Error executing hook:", ex); - } - } - - /** - * The command 'create function ... using jar <jar resources>' can create a function - * with the supplied jar resources in the command, which is translated into ASTNode being - * [functionName functionClass resourceList] and resourceList being [resourceType resourcePath]. - * This function collects all the jar paths for the supplied jar resources. - * - * @param ast the AST node for the command - * @return the jar path list if any or an empty list - */ - protected List<String> getFunctionJars(ASTNode ast) { - ASTNode resourcesNode = (ASTNode) ast.getFirstChildWithType(HiveParser.TOK_RESOURCE_LIST); - - List<String> resources = new ArrayList<String>(); - if (resourcesNode != null) { - for (int idx = 0; idx < resourcesNode.getChildCount(); ++idx) { - ASTNode resNode = (ASTNode) resourcesNode.getChild(idx); - ASTNode resTypeNode = (ASTNode) resNode.getChild(0); - ASTNode resUriNode = (ASTNode) resNode.getChild(1); - if (resTypeNode.getType() == HiveParser.TOK_JAR) { - resources.add(PlanUtils.stripQuotes(resUriNode.getText())); - } - } - } - - return resources; - } - - @VisibleForTesting - protected static AccessURI extractPartition(ASTNode ast) throws SemanticException { - for (int i = 0; i < ast.getChildCount(); i++) { - ASTNode child = (ASTNode)ast.getChild(i); - if (child.getToken().getType() == HiveParser.TOK_PARTITIONLOCATION && - child.getChildCount() == 1) { - return parseURI(BaseSemanticAnalyzer. - unescapeSQLString(child.getChild(0).getText())); - } - } - return null; - } - - @VisibleForTesting - protected static AccessURI parseURI(String uri) throws SemanticException { - return parseURI(uri, false); - } - - @VisibleForTesting - protected static AccessURI parseURI(String uri, boolean isLocal) - throws SemanticException { - try { - HiveConf conf = SessionState.get().getConf(); - String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE); - Path warehousePath = new Path(warehouseDir); - - // If warehousePath is an absolute path and a scheme is null and authority is null as well, - // qualified it with default file system scheme and authority. - if (warehousePath.isAbsoluteAndSchemeAuthorityNull()) { - URI defaultUri = FileSystem.getDefaultUri(conf); - warehousePath = warehousePath.makeQualified(defaultUri, warehousePath); - warehouseDir = warehousePath.toUri().toString(); - } - return new AccessURI(PathUtils.parseURI(warehouseDir, uri, isLocal)); - } catch (Exception e) { - throw new SemanticException("Error parsing URI " + uri + ": " + - e.getMessage(), e); - } - } - - // Find the current database for session - protected Database getCanonicalDb() { - return new Database(SessionState.get().getCurrentDatabase()); - } - - protected void extractDbTableNameFromTOKTABLE(ASTNode astNode) throws SemanticException{ - String[] fqTableName = BaseSemanticAnalyzer.getQualifiedTableName(astNode); - Preconditions.checkArgument(fqTableName.length == 2, "BaseSemanticAnalyzer.getQualifiedTableName should return " + - "an array with dbName and tableName"); - currOutDB = new Database(fqTableName[0]); - currOutTab = new Table(fqTableName[1]); - } - - /*TODO: Deprecate */ - protected Database extractDatabase(ASTNode ast) throws SemanticException { - String tableName = BaseSemanticAnalyzer.getUnescapedName(ast); - if (tableName.contains(".")) { - return new Database(tableName.split("\\.")[0]); - } else { - return getCanonicalDb(); - } - } - /*TODO: Deprecate */ - protected Table extractTable(ASTNode ast) throws SemanticException { - String tableName = BaseSemanticAnalyzer.getUnescapedName(ast); - if (tableName.contains(".")) { - return new Table(tableName.split("\\.")[1]); - } else { - return new Table(tableName); - } - } - - public static void runFailureHook(SentryOnFailureHookContext hookContext, - String csHooks) { - try { - for (Hook aofh : getHooks(csHooks)) { - ((SentryOnFailureHook) aofh).run(hookContext); - } - } catch (Exception ex) { - LOG.error("Error executing hook:", ex); - } - } - /** - * Convert the input/output entities into authorizables. generate - * authorizables for cases like Database and metadata operations where the - * compiler doesn't capture entities. invoke the hive binding to validate - * permissions - * - * @param context - * @param stmtAuthObject - * @param stmtOperation - * @throws AuthorizationException - */ - protected void authorizeWithHiveBindings(HiveSemanticAnalyzerHookContext context, - HiveAuthzPrivileges stmtAuthObject, HiveOperation stmtOperation) throws AuthorizationException { - Set<ReadEntity> inputs = context.getInputs(); - Set<WriteEntity> outputs = context.getOutputs(); - List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); - List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); - - if(LOG.isDebugEnabled()) { - LOG.debug("stmtAuthObject.getOperationScope() = " + stmtAuthObject.getOperationScope()); - LOG.debug("context.getInputs() = " + context.getInputs()); - LOG.debug("context.getOutputs() = " + context.getOutputs()); - } - - // Workaround to allow DESCRIBE <table> to be executed with only column-level privileges, while - // still authorizing DESCRIBE [EXTENDED|FORMATTED] as table-level. - // This is done by treating DESCRIBE <table> the same as SHOW COLUMNS, which only requires column - // level privs. - if (isDescTableBasic) { - stmtAuthObject = columnMetaDataPrivilege; - } - - switch (stmtAuthObject.getOperationScope()) { - - case SERVER : - // validate server level privileges if applicable. Eg create UDF,register jar etc .. - List<DBModelAuthorizable> serverHierarchy = new ArrayList<DBModelAuthorizable>(); - serverHierarchy.add(hiveAuthzBinding.getAuthServer()); - inputHierarchy.add(serverHierarchy); - break; - case DATABASE: - // workaround for database scope statements (create/alter/drop db) - List<DBModelAuthorizable> dbHierarchy = new ArrayList<DBModelAuthorizable>(); - dbHierarchy.add(hiveAuthzBinding.getAuthServer()); - dbHierarchy.add(currDB); - inputHierarchy.add(dbHierarchy); - - if (currOutDB != null) { - List<DBModelAuthorizable> outputDbHierarchy = new ArrayList<DBModelAuthorizable>(); - outputDbHierarchy.add(hiveAuthzBinding.getAuthServer()); - outputDbHierarchy.add(currOutDB); - outputHierarchy.add(outputDbHierarchy); - } else { - outputHierarchy.add(dbHierarchy); - } - - getInputHierarchyFromInputs(inputHierarchy, inputs); - - if (serdeURI != null) { - List<DBModelAuthorizable> serdeUriHierarchy = new ArrayList<DBModelAuthorizable>(); - serdeUriHierarchy.add(hiveAuthzBinding.getAuthServer()); - serdeUriHierarchy.add(serdeURI); - outputHierarchy.add(serdeUriHierarchy); - } - break; - case TABLE: - // workaround for add partitions - if(partitionURI != null) { - inputHierarchy.add(ImmutableList.of(hiveAuthzBinding.getAuthServer(), partitionURI)); - } - - getInputHierarchyFromInputs(inputHierarchy, inputs); - for (WriteEntity writeEntity: outputs) { - if (filterWriteEntity(writeEntity)) { - continue; - } - List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>(); - entityHierarchy.add(hiveAuthzBinding.getAuthServer()); - entityHierarchy.addAll(getAuthzHierarchyFromEntity(writeEntity)); - outputHierarchy.add(entityHierarchy); - } - // workaround for metadata queries. - // Capture the table name in pre-analyze and include that in the input entity list - if (currTab != null) { - List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>(); - externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer()); - externalAuthorizableHierarchy.add(currDB); - externalAuthorizableHierarchy.add(currTab); - inputHierarchy.add(externalAuthorizableHierarchy); - } - - - - // workaround for DDL statements - // Capture the table name in pre-analyze and include that in the output entity list - if (currOutTab != null) { - List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>(); - externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer()); - externalAuthorizableHierarchy.add(currOutDB); - externalAuthorizableHierarchy.add(currOutTab); - outputHierarchy.add(externalAuthorizableHierarchy); - } - - if (serdeURI != null) { - List<DBModelAuthorizable> serdeUriHierarchy = new ArrayList<DBModelAuthorizable>(); - serdeUriHierarchy.add(hiveAuthzBinding.getAuthServer()); - serdeUriHierarchy.add(serdeURI); - outputHierarchy.add(serdeUriHierarchy); - } - - break; - case FUNCTION: - /* The 'FUNCTION' privilege scope currently used for - * - CREATE TEMP FUNCTION - * - DROP TEMP FUNCTION. - */ - if (!udfURIs.isEmpty()) { - List<DBModelAuthorizable> udfUriHierarchy = new ArrayList<DBModelAuthorizable>(); - udfUriHierarchy.add(hiveAuthzBinding.getAuthServer()); - udfUriHierarchy.addAll(udfURIs); - inputHierarchy.add(udfUriHierarchy); - for (WriteEntity writeEntity : outputs) { - List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>(); - entityHierarchy.add(hiveAuthzBinding.getAuthServer()); - entityHierarchy.addAll(getAuthzHierarchyFromEntity(writeEntity)); - outputHierarchy.add(entityHierarchy); - } - } - break; - case CONNECT: - /* The 'CONNECT' is an implicit privilege scope currently used for - * - USE <db> - * It's allowed when the user has any privilege on the current database. For application - * backward compatibility, we allow (optional) implicit connect permission on 'default' db. - */ - List<DBModelAuthorizable> connectHierarchy = new ArrayList<DBModelAuthorizable>(); - connectHierarchy.add(hiveAuthzBinding.getAuthServer()); - // by default allow connect access to default db - Table currTbl = Table.ALL; - Column currCol = Column.ALL; - if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(currDB.getName()) && - "false".equalsIgnoreCase(authzConf. - get(HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), "false"))) { - currDB = Database.ALL; - currTbl = Table.SOME; - } - - connectHierarchy.add(currDB); - connectHierarchy.add(currTbl); - connectHierarchy.add(currCol); - - inputHierarchy.add(connectHierarchy); - outputHierarchy.add(connectHierarchy); - break; - case COLUMN: - for (ReadEntity readEntity: inputs) { - if (readEntity.getAccessedColumns() != null && !readEntity.getAccessedColumns().isEmpty()) { - addColumnHierarchy(inputHierarchy, readEntity); - } else { - List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>(); - entityHierarchy.add(hiveAuthzBinding.getAuthServer()); - entityHierarchy.addAll(getAuthzHierarchyFromEntity(readEntity)); - entityHierarchy.add(Column.ALL); - inputHierarchy.add(entityHierarchy); - } - } - break; - default: - throw new AuthorizationException("Unknown operation scope type " + - stmtAuthObject.getOperationScope().toString()); - } - - HiveAuthzBinding binding = null; - try { - binding = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, context.getUserName()); - } catch (SemanticException e) { - // Will use the original hiveAuthzBinding - binding = hiveAuthzBinding; - } - // validate permission - binding.authorize(stmtOperation, stmtAuthObject, getCurrentSubject(context), inputHierarchy, - outputHierarchy); - } - - // Build the hierarchy of authorizable object for the given entity type. - private List<DBModelAuthorizable> getAuthzHierarchyFromEntity(Entity entity) { - List<DBModelAuthorizable> objectHierarchy = new ArrayList<DBModelAuthorizable>(); - switch (entity.getType()) { - case TABLE: - objectHierarchy.add(new Database(entity.getTable().getDbName())); - objectHierarchy.add(new Table(entity.getTable().getTableName())); - break; - case PARTITION: - case DUMMYPARTITION: - objectHierarchy.add(new Database(entity.getPartition().getTable().getDbName())); - objectHierarchy.add(new Table(entity.getPartition().getTable().getTableName())); - break; - case DFS_DIR: - case LOCAL_DIR: - try { - objectHierarchy.add(parseURI(entity.toString(), - entity.getType().equals(Entity.Type.LOCAL_DIR))); - } catch (Exception e) { - throw new AuthorizationException("Failed to get File URI", e); - } - break; - case DATABASE: - case FUNCTION: - // TODO use database entities from compiler instead of capturing from AST - break; - default: - throw new UnsupportedOperationException("Unsupported entity type " + - entity.getType().name()); - } - return objectHierarchy; - } - - /** - * Add column level hierarchy to inputHierarchy - * - * @param inputHierarchy - * @param entity - * @param sentryContext - */ - protected void addColumnHierarchy(List<List<DBModelAuthorizable>> inputHierarchy, - ReadEntity entity) { - List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>(); - entityHierarchy.add(hiveAuthzBinding.getAuthServer()); - entityHierarchy.addAll(getAuthzHierarchyFromEntity(entity)); - - switch (entity.getType()) { - case TABLE: - case PARTITION: - List<String> cols = entity.getAccessedColumns(); - for (String col : cols) { - List<DBModelAuthorizable> colHierarchy = new ArrayList<DBModelAuthorizable>(entityHierarchy); - colHierarchy.add(new Column(col)); - inputHierarchy.add(colHierarchy); - } - break; - default: - inputHierarchy.add(entityHierarchy); - } - } - - /** - * Get Authorizable from inputs and put into inputHierarchy - * - * @param inputHierarchy - * @param entity - * @param sentryContext - */ - protected void getInputHierarchyFromInputs(List<List<DBModelAuthorizable>> inputHierarchy, - Set<ReadEntity> inputs) { - for (ReadEntity readEntity: inputs) { - // skip the tables/view that are part of expanded view definition - // skip the Hive generated dummy entities created for queries like 'select <expr>' - if (isChildTabForView(readEntity) || isDummyEntity(readEntity)) { - continue; - } - if (readEntity.getAccessedColumns() != null && !readEntity.getAccessedColumns().isEmpty()) { - addColumnHierarchy(inputHierarchy, readEntity); - } else { - List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>(); - entityHierarchy.add(hiveAuthzBinding.getAuthServer()); - entityHierarchy.addAll(getAuthzHierarchyFromEntity(readEntity)); - inputHierarchy.add(entityHierarchy); - } - } - } - - // Check if this write entity needs to skipped - private boolean filterWriteEntity(WriteEntity writeEntity) - throws AuthorizationException { - // skip URI validation for session scratch file URIs - if (writeEntity.isTempURI()) { - return true; - } - try { - if (writeEntity.getTyp().equals(Type.DFS_DIR) - || writeEntity.getTyp().equals(Type.LOCAL_DIR)) { - HiveConf conf = SessionState.get().getConf(); - String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE); - URI scratchURI = new URI(PathUtils.parseDFSURI(warehouseDir, - conf.getVar(HiveConf.ConfVars.SCRATCHDIR))); - URI requestURI = new URI(PathUtils.parseDFSURI(warehouseDir, - writeEntity.getLocation().getPath())); - LOG.debug("scratchURI = " + scratchURI + ", requestURI = " + requestURI); - if (PathUtils.impliesURI(scratchURI, requestURI)) { - return true; - } - URI localScratchURI = new URI(PathUtils.parseLocalURI(conf.getVar(HiveConf.ConfVars.LOCALSCRATCHDIR))); - URI localRequestURI = new URI(PathUtils.parseLocalURI(writeEntity.getLocation().getPath())); - LOG.debug("localScratchURI = " + localScratchURI + ", localRequestURI = " + localRequestURI); - if (PathUtils.impliesURI(localScratchURI, localRequestURI)) { - return true; - } - } - } catch (Exception e) { - throw new AuthorizationException("Failed to extract uri details", e); - } - return false; - } - - public static List<String> filterShowTables( - HiveAuthzBinding hiveAuthzBinding, List<String> queryResult, - HiveOperation operation, String userName, String dbName) - throws SemanticException { - List<String> filteredResult = new ArrayList<String>(); - Subject subject = new Subject(userName); - HiveAuthzPrivileges tableMetaDataPrivilege = new HiveAuthzPrivileges.AuthzPrivilegeBuilder(). - addInputObjectPriviledge(AuthorizableType.Column, EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT)). - setOperationScope(HiveOperationScope.TABLE). - setOperationType(HiveOperationType.INFO). - build(); - - HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName); - - for (String tableName : queryResult) { - // if user has privileges on table, add to filtered list, else discard - Table table = new Table(tableName); - Database database; - database = new Database(dbName); - - List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); - List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); - List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>(); - externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer()); - externalAuthorizableHierarchy.add(database); - externalAuthorizableHierarchy.add(table); - externalAuthorizableHierarchy.add(Column.ALL); - inputHierarchy.add(externalAuthorizableHierarchy); - - try { - // do the authorization by new HiveAuthzBinding with PrivilegeCache - hiveBindingWithPrivilegeCache.authorize(operation, tableMetaDataPrivilege, subject, - inputHierarchy, outputHierarchy); - filteredResult.add(table.getName()); - } catch (AuthorizationException e) { - // squash the exception, user doesn't have privileges, so the table is - // not added to - // filtered list. - } - } - return filteredResult; - } - - public static List<FieldSchema> filterShowColumns( - HiveAuthzBinding hiveAuthzBinding, List<FieldSchema> cols, - HiveOperation operation, String userName, String tableName, String dbName) - throws SemanticException { - List<FieldSchema> filteredResult = new ArrayList<FieldSchema>(); - Subject subject = new Subject(userName); - HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName); - - Database database = new Database(dbName); - Table table = new Table(tableName); - for (FieldSchema col : cols) { - // if user has privileges on column, add to filtered list, else discard - List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); - List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); - List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>(); - externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer()); - externalAuthorizableHierarchy.add(database); - externalAuthorizableHierarchy.add(table); - externalAuthorizableHierarchy.add(new Column(col.getName())); - inputHierarchy.add(externalAuthorizableHierarchy); - - try { - // do the authorization by new HiveAuthzBinding with PrivilegeCache - hiveBindingWithPrivilegeCache.authorize(operation, columnMetaDataPrivilege, subject, - inputHierarchy, outputHierarchy); - filteredResult.add(col); - } catch (AuthorizationException e) { - // squash the exception, user doesn't have privileges, so the column is - // not added to - // filtered list. - } - } - return filteredResult; - } - - public static List<String> filterShowDatabases( - HiveAuthzBinding hiveAuthzBinding, List<String> queryResult, - HiveOperation operation, String userName) throws SemanticException { - List<String> filteredResult = new ArrayList<String>(); - Subject subject = new Subject(userName); - HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName); - - HiveAuthzPrivileges anyPrivilege = new HiveAuthzPrivileges.AuthzPrivilegeBuilder(). - addInputObjectPriviledge(AuthorizableType.Column, EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT)). - addInputObjectPriviledge(AuthorizableType.URI, EnumSet.of(DBModelAction.SELECT)). - setOperationScope(HiveOperationScope.CONNECT). - setOperationType(HiveOperationType.QUERY). - build(); - - for (String dbName:queryResult) { - // if user has privileges on database, add to filtered list, else discard - Database database = null; - - // if default is not restricted, continue - if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(dbName) && "false".equalsIgnoreCase( - hiveAuthzBinding.getAuthzConf().get( - HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), - "false"))) { - filteredResult.add(DEFAULT_DATABASE_NAME); - continue; - } - - database = new Database(dbName); - - List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); - List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>(); - List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>(); - externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer()); - externalAuthorizableHierarchy.add(database); - externalAuthorizableHierarchy.add(Table.ALL); - externalAuthorizableHierarchy.add(Column.ALL); - inputHierarchy.add(externalAuthorizableHierarchy); - - try { - // do the authorization by new HiveAuthzBinding with PrivilegeCache - hiveBindingWithPrivilegeCache.authorize(operation, anyPrivilege, subject, - inputHierarchy, outputHierarchy); - filteredResult.add(database.getName()); - } catch (AuthorizationException e) { - // squash the exception, user doesn't have privileges, so the table is - // not added to - // filtered list. - } - } - - return filteredResult; - } - - /** - * Check if the given read entity is a table that has parents of type Table - * Hive compiler performs a query rewrite by replacing view with its definition. In the process, tt captures both - * the original view and the tables/view that it selects from . - * The access authorization is only interested in the top level views and not the underlying tables. - * @param readEntity - * @return - */ - private boolean isChildTabForView(ReadEntity readEntity) { - // If this is a table added for view, then we need to skip that - if (!readEntity.getType().equals(Type.TABLE) && !readEntity.getType().equals(Type.PARTITION)) { - return false; - } - if (readEntity.getParents() != null && readEntity.getParents().size() > 0) { - for (ReadEntity parentEntity : readEntity.getParents()) { - if (!parentEntity.getType().equals(Type.TABLE)) { - return false; - } - } - return true; - } else { - return false; - } - } - - /** - * Returns the hooks specified in a configuration variable. The hooks are returned in a list in - * the order they were specified in the configuration variable. - * - * @param hookConfVar The configuration variable specifying a comma separated list of the hook - * class names. - * @return A list of the hooks, in the order they are listed in the value of hookConfVar - * @throws Exception - */ - private static <T extends Hook> List<T> getHooks(String csHooks) throws Exception { - - List<T> hooks = new ArrayList<T>(); - if (csHooks.isEmpty()) { - return hooks; - } - for (String hookClass : Splitter.on(",").omitEmptyStrings().trimResults().split(csHooks)) { - try { - @SuppressWarnings("unchecked") - T hook = - (T) Class.forName(hookClass, true, JavaUtils.getClassLoader()).newInstance(); - hooks.add(hook); - } catch (ClassNotFoundException e) { - LOG.error(hookClass + " Class not found:" + e.getMessage()); - throw e; - } - } - - return hooks; - } - - // Check if the given entity is identified as dummy by Hive compilers. - private boolean isDummyEntity(Entity entity) { - return entity.isDummy(); - } - - // create hiveBinding with PrivilegeCache - private static HiveAuthzBinding getHiveBindingWithPrivilegeCache(HiveAuthzBinding hiveAuthzBinding, - String userName) throws SemanticException { - // get the original HiveAuthzBinding, and get the user's privileges by AuthorizationProvider - AuthorizationProvider authProvider = hiveAuthzBinding.getCurrentAuthProvider(); - Set<String> userPrivileges = - authProvider.getPolicyEngine().getPrivileges( - authProvider.getGroupMapping().getGroups(userName), Sets.newHashSet(userName), - hiveAuthzBinding.getActiveRoleSet(), hiveAuthzBinding.getAuthServer()); - - // create PrivilegeCache using user's privileges - PrivilegeCache privilegeCache = new SimplePrivilegeCache(userPrivileges); - try { - // create new instance of HiveAuthzBinding whose backend provider should be SimpleCacheProviderBackend - return new HiveAuthzBinding(HiveAuthzBinding.HiveHook.HiveServer2, hiveAuthzBinding.getHiveConf(), - hiveAuthzBinding.getAuthzConf(), privilegeCache); - } catch (Exception e) { - LOG.error("Can not create HiveAuthzBinding with privilege cache."); - throw new SemanticException(e); - } - } - - private static boolean hasPrefixMatch(List<String> prefixList, final String str) { - for (String prefix : prefixList) { - if (str.startsWith(prefix)) { - return true; - } - } - - return false; - } - - /** - * Set the Serde URI privileges. If the URI privileges are not set, which serdeURI will be null, - * the URI authorization checks will be skipped. - */ - protected void setSerdeURI(String serdeClassName) throws SemanticException { - if (!serdeURIPrivilegesEnabled) { - return; - } - - // WhiteList Serde Jar can be used by any users. WhiteList checking is - // done by comparing the Java package name. The assumption is cluster - // admin will ensure there is no Java namespace collision. - // e.g org.apache.hadoop.hive.serde2 is used by hive and cluster admin should - // ensure no custom Serde class is introduced under the same namespace. - if (!hasPrefixMatch(serdeWhiteList, serdeClassName)) { - try { - CodeSource serdeSrc = - Class.forName(serdeClassName, true, Utilities.getSessionSpecifiedClassLoader()) - .getProtectionDomain().getCodeSource(); - if (serdeSrc == null) { - throw new SemanticException("Could not resolve the jar for Serde class " + serdeClassName); - } - - String serdeJar = serdeSrc.getLocation().getPath(); - if (serdeJar == null || serdeJar.isEmpty()) { - throw new SemanticException("Could not find the jar for Serde class " + serdeClassName - + "to validate privileges"); - } - - serdeURI = parseURI(serdeSrc.getLocation().toString(), true); - } catch (ClassNotFoundException e) { - throw new SemanticException("Error retrieving Serde class:" + e.getMessage(), e); - } - } - } - - protected HiveOperation getCurrentHiveStmtOp() { - SessionState sessState = SessionState.get(); - if (sessState == null) { - // TODO: Warn - return null; - } - return sessState.getHiveOperation(); - } - - protected Subject getCurrentSubject(HiveSemanticAnalyzerHookContext context) { - // Extract the username from the hook context - return new Subject(context.getUserName()); - } - -} http://git-wip-us.apache.org/repos/asf/sentry/blob/b19cb01b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java deleted file mode 100644 index 8a5085b..0000000 --- a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java +++ /dev/null @@ -1,643 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.sentry.binding.hive.authz; - -import java.security.CodeSource; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.GnuParser; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.OptionGroup; -import org.apache.commons.cli.Options; -import org.apache.commons.cli.ParseException; -import org.apache.commons.cli.Parser; -import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.hive.ql.Driver; -import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; -import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; -import org.apache.sentry.Command; -import org.apache.sentry.binding.hive.HiveAuthzBindingHookBase; -import org.apache.sentry.binding.hive.SentryPolicyFileFormatFactory; -import org.apache.sentry.binding.hive.SentryPolicyFileFormatter; -import org.apache.sentry.binding.hive.conf.HiveAuthzConf; -import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars; -import org.apache.sentry.core.common.exception.SentryConfigurationException; -import org.apache.sentry.core.common.Subject; -import org.apache.sentry.core.model.db.Server; -import org.apache.sentry.provider.common.AuthorizationProvider; -import org.apache.sentry.provider.db.service.thrift.SentryPolicyServiceClient; -import org.apache.sentry.service.thrift.SentryServiceClientFactory; - -/** - * set the required system property to be read by HiveConf and AuthzConf - * - * @throws Exception - */ -// Hack, hiveConf doesn't provide a reliable way check if it found a valid -// hive-site -// load auth provider -// get the configured sentry provider -// validate policy files -// import policy files -public class SentryConfigTool { - private String sentrySiteFile = null; - private String policyFile = null; - private String query = null; - private String jdbcURL = null; - private String user = null; - private String passWord = null; - private String importPolicyFilePath = null; - private String exportPolicyFilePath = null; - private String objectPath = null; - private boolean listPrivs = false; - private boolean validate = false; - private boolean importOverwriteRole = false; - private HiveConf hiveConf = null; - private HiveAuthzConf authzConf = null; - private AuthorizationProvider sentryProvider = null; - - public SentryConfigTool() { - - } - - public AuthorizationProvider getSentryProvider() { - return sentryProvider; - } - - public void setSentryProvider(AuthorizationProvider sentryProvider) { - this.sentryProvider = sentryProvider; - } - - public HiveConf getHiveConf() { - return hiveConf; - } - - public void setHiveConf(HiveConf hiveConf) { - this.hiveConf = hiveConf; - } - - public HiveAuthzConf getAuthzConf() { - return authzConf; - } - - public void setAuthzConf(HiveAuthzConf authzConf) { - this.authzConf = authzConf; - } - - public boolean isValidate() { - return validate; - } - - public void setValidate(boolean validate) { - this.validate = validate; - } - - public String getImportPolicyFilePath() { - return importPolicyFilePath; - } - - public void setImportPolicyFilePath(String importPolicyFilePath) { - this.importPolicyFilePath = importPolicyFilePath; - } - - public String getObjectPath() { - return objectPath; - } - - public void setObjectPath(String objectPath) { - this.objectPath = objectPath; - } - - public String getExportPolicyFilePath() { - return exportPolicyFilePath; - } - - public void setExportPolicyFilePath(String exportPolicyFilePath) { - this.exportPolicyFilePath = exportPolicyFilePath; - } - - public String getSentrySiteFile() { - return sentrySiteFile; - } - - public void setSentrySiteFile(String sentrySiteFile) { - this.sentrySiteFile = sentrySiteFile; - } - - public String getPolicyFile() { - return policyFile; - } - - public void setPolicyFile(String policyFile) { - this.policyFile = policyFile; - } - - public String getQuery() { - return query; - } - - public void setQuery(String query) { - this.query = query; - } - - public String getJdbcURL() { - return jdbcURL; - } - - public void setJdbcURL(String jdbcURL) { - this.jdbcURL = jdbcURL; - } - - public String getUser() { - return user; - } - - public void setUser(String user) { - this.user = user; - } - - public String getPassWord() { - return passWord; - } - - public void setPassWord(String passWord) { - this.passWord = passWord; - } - - public boolean isListPrivs() { - return listPrivs; - } - - public void setListPrivs(boolean listPrivs) { - this.listPrivs = listPrivs; - } - - public boolean isImportOverwriteRole() { - return importOverwriteRole; - } - - public void setImportOverwriteRole(boolean importOverwriteRole) { - this.importOverwriteRole = importOverwriteRole; - } - - /** - * set the required system property to be read by HiveConf and AuthzConf - * @throws Exception - */ - public void setupConfig() throws Exception { - System.out.println("Configuration: "); - CodeSource src = SentryConfigTool.class.getProtectionDomain() - .getCodeSource(); - if (src != null) { - System.out.println("Sentry package jar: " + src.getLocation()); - } - - if (getPolicyFile() != null) { - System.setProperty(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar(), - getPolicyFile()); - } - System.setProperty(AuthzConfVars.SENTRY_TESTING_MODE.getVar(), "true"); - setHiveConf(new HiveConf(SessionState.class)); - getHiveConf().setVar(ConfVars.SEMANTIC_ANALYZER_HOOK, - HiveAuthzBindingHookBase.class.getName()); - try { - System.out.println("Hive config: " + HiveConf.getHiveSiteLocation()); - } catch (NullPointerException e) { - // Hack, hiveConf doesn't provide a reliable way check if it found a valid - // hive-site - throw new SentryConfigurationException("Didn't find a hive-site.xml"); - - } - - if (getSentrySiteFile() != null) { - getHiveConf() - .set(HiveAuthzConf.HIVE_SENTRY_CONF_URL, getSentrySiteFile()); - } - - setAuthzConf(HiveAuthzConf.getAuthzConf(getHiveConf())); - System.out.println("Sentry config: " - + getAuthzConf().getHiveAuthzSiteFile()); - System.out.println("Sentry Policy: " - + getAuthzConf().get(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar())); - System.out.println("Sentry server: " - + getAuthzConf().get(AuthzConfVars.AUTHZ_SERVER_NAME.getVar())); - - setSentryProvider(getAuthorizationProvider()); - } - - // load auth provider - private AuthorizationProvider getAuthorizationProvider() - throws IllegalStateException, SentryConfigurationException { - String serverName = new Server(getAuthzConf().get( - AuthzConfVars.AUTHZ_SERVER_NAME.getVar())).getName(); - // get the configured sentry provider - try { - return HiveAuthzBinding.getAuthProvider(getHiveConf(), - authzConf, serverName); - } catch (SentryConfigurationException eC) { - printConfigErrors(eC); - throw eC; - } catch (Exception e) { - throw new IllegalStateException("Couldn't load sentry provider ", e); - } - } - - // validate policy files - public void validatePolicy() throws Exception { - try { - getSentryProvider().validateResource(true); - } catch (SentryConfigurationException e) { - printConfigErrors(e); - throw e; - } - System.out.println("No errors found in the policy file"); - } - - // import the sentry mapping data to database - public void importPolicy() throws Exception { - String requestorUserName = System.getProperty("user.name", ""); - // get the FileFormatter according to the configuration - SentryPolicyFileFormatter sentryPolicyFileFormatter = SentryPolicyFileFormatFactory - .createFileFormatter(authzConf); - // parse the input file, get the mapping data in map structure - Map<String, Map<String, Set<String>>> policyFileMappingData = sentryPolicyFileFormatter.parse( - importPolicyFilePath, authzConf); - // todo: here should be an validator to check the data's value, format, hierarchy - try(SentryPolicyServiceClient client = - SentryServiceClientFactory.create(getAuthzConf())) { - // import the mapping data to database - client.importPolicy(policyFileMappingData, requestorUserName, importOverwriteRole); - } - } - - // export the sentry mapping data to file - public void exportPolicy() throws Exception { - String requestorUserName = System.getProperty("user.name", ""); - try (SentryPolicyServiceClient client = - SentryServiceClientFactory.create(getAuthzConf())) { - // export the sentry mapping data from database to map structure - Map<String, Map<String, Set<String>>> policyFileMappingData = client - .exportPolicy(requestorUserName, objectPath); - // get the FileFormatter according to the configuration - SentryPolicyFileFormatter sentryPolicyFileFormatter = SentryPolicyFileFormatFactory - .createFileFormatter(authzConf); - // write the sentry mapping data to exportPolicyFilePath with the data in map structure - sentryPolicyFileFormatter.write(exportPolicyFilePath, policyFileMappingData); - } - } - - // list permissions for given user - public void listPrivs() throws Exception { - getSentryProvider().validateResource(true); - System.out.println("Available privileges for user " + getUser() + ":"); - Set<String> permList = getSentryProvider().listPrivilegesForSubject( - new Subject(getUser())); - for (String perms : permList) { - System.out.println("\t" + perms); - } - if (permList.isEmpty()) { - System.out.println("\t*** No permissions available ***"); - } - } - - // Verify the given query - public void verifyLocalQuery(String queryStr) throws Exception { - // setup Hive driver - SessionState session = new SessionState(getHiveConf()); - SessionState.start(session); - Driver driver = new Driver(session.getConf(), getUser()); - - // compile the query - CommandProcessorResponse compilerStatus = driver - .compileAndRespond(queryStr); - if (compilerStatus.getResponseCode() != 0) { - String errMsg = compilerStatus.getErrorMessage(); - if (errMsg.contains(HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE)) { - printMissingPerms(getHiveConf().get( - HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS)); - } - throw new SemanticException("Compilation error: " - + compilerStatus.getErrorMessage()); - } - driver.close(); - System.out - .println("User " + getUser() + " has privileges to run the query"); - } - - // connect to remote HS2 and run mock query - public void verifyRemoteQuery(String queryStr) throws Exception { - Class.forName("org.apache.hive.jdbc.HiveDriver"); - Connection conn = DriverManager.getConnection(getJdbcURL(), getUser(), - getPassWord()); - Statement stmt = conn.createStatement(); - if (!isSentryEnabledOnHiveServer(stmt)) { - throw new IllegalStateException("Sentry is not enabled on HiveServer2"); - } - stmt.execute("set " + HiveAuthzConf.HIVE_SENTRY_MOCK_COMPILATION + "=true"); - try { - stmt.execute(queryStr); - } catch (SQLException e) { - String errMsg = e.getMessage(); - if (errMsg.contains(HiveAuthzConf.HIVE_SENTRY_MOCK_ERROR)) { - System.out.println("User " - + readConfig(stmt, HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME) - + " has privileges to run the query"); - return; - } else if (errMsg - .contains(HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE)) { - printMissingPerms(readConfig(stmt, - HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS)); - throw e; - } else { - throw e; - } - } finally { - if (!stmt.isClosed()) { - stmt.close(); - } - conn.close(); - } - - } - - // verify senty session hook is set - private boolean isSentryEnabledOnHiveServer(Statement stmt) - throws SQLException { - String bindingString = readConfig(stmt, HiveConf.ConfVars.HIVE_SERVER2_SESSION_HOOK.varname).toUpperCase(); - return bindingString.contains("org.apache.sentry.binding.hive".toUpperCase()) - && bindingString.contains("HiveAuthzBindingSessionHook".toUpperCase()); - } - - // read a config value using 'set' statement - private String readConfig(Statement stmt, String configKey) - throws SQLException { - try (ResultSet res = stmt.executeQuery("set " + configKey)) { - if (!res.next()) { - return null; - } - // parse key=value result format - String result = res.getString(1); - res.close(); - return result.substring(result.indexOf("=") + 1); - } - } - - // print configuration/policy file errors and warnings - private void printConfigErrors(SentryConfigurationException configException) - throws SentryConfigurationException { - System.out.println(" *** Found configuration problems *** "); - for (String errMsg : configException.getConfigErrors()) { - System.out.println("ERROR: " + errMsg); - } - for (String warnMsg : configException.getConfigWarnings()) { - System.out.println("Warning: " + warnMsg); - } - } - - // extract the authorization errors from config property and print - private void printMissingPerms(String errMsg) { - if (errMsg == null || errMsg.isEmpty()) { - return; - } - System.out.println("*** Query compilation failed ***"); - String perms[] = errMsg.replaceFirst( - ".*" + HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE, "") - .split(";"); - System.out.println("Required privileges for given query:"); - for (int count = 0; count < perms.length; count++) { - System.out.println(" \t " + perms[count]); - } - } - - // print usage - private void usage(Options sentryOptions) { - HelpFormatter formatter = new HelpFormatter(); - formatter.printHelp("sentry --command config-tool", sentryOptions); - System.exit(-1); - } - - /** - * parse arguments - * - * <pre> - * -d,--debug Enable debug output - * -e,--query <arg> Query privilege verification, requires -u - * -h,--help Print usage - * -i,--policyIni <arg> Policy file path - * -j,--jdbcURL <arg> JDBC URL - * -l,--listPrivs,--listPerms List privilges for given user, requires -u - * -p,--password <arg> Password - * -s,--sentry-site <arg> sentry-site file path - * -u,--user <arg> user name - * -v,--validate Validate policy file - * -I,--import Import policy file - * -E,--export Export policy file - * -o,--overwrite Overwrite the exist role data when do the import - * -b,--objectPath The path of the object whose privileges will be exported - * </pre> - * - * @param args - */ - private void parseArgs(String[] args) { - boolean enableDebug = false; - - Options sentryOptions = new Options(); - - Option helpOpt = new Option("h", "help", false, "Print usage"); - helpOpt.setRequired(false); - - Option validateOpt = new Option("v", "validate", false, - "Validate policy file"); - validateOpt.setRequired(false); - - Option queryOpt = new Option("e", "query", true, - "Query privilege verification, requires -u"); - queryOpt.setRequired(false); - - Option listPermsOpt = new Option("l", "listPerms", false, - "list permissions for given user, requires -u"); - listPermsOpt.setRequired(false); - Option listPrivsOpt = new Option("listPrivs", false, - "list privileges for given user, requires -u"); - listPrivsOpt.setRequired(false); - - Option importOpt = new Option("I", "import", true, - "Import policy file"); - importOpt.setRequired(false); - - Option exportOpt = new Option("E", "export", true, "Export policy file"); - exportOpt.setRequired(false); - // required args - OptionGroup sentryOptGroup = new OptionGroup(); - sentryOptGroup.addOption(helpOpt); - sentryOptGroup.addOption(validateOpt); - sentryOptGroup.addOption(queryOpt); - sentryOptGroup.addOption(listPermsOpt); - sentryOptGroup.addOption(listPrivsOpt); - sentryOptGroup.addOption(importOpt); - sentryOptGroup.addOption(exportOpt); - sentryOptGroup.setRequired(true); - sentryOptions.addOptionGroup(sentryOptGroup); - - // optional args - Option jdbcArg = new Option("j", "jdbcURL", true, "JDBC URL"); - jdbcArg.setRequired(false); - sentryOptions.addOption(jdbcArg); - - Option sentrySitePath = new Option("s", "sentry-site", true, - "sentry-site file path"); - sentrySitePath.setRequired(false); - sentryOptions.addOption(sentrySitePath); - - Option globalPolicyPath = new Option("i", "policyIni", true, - "Policy file path"); - globalPolicyPath.setRequired(false); - sentryOptions.addOption(globalPolicyPath); - - Option userOpt = new Option("u", "user", true, "user name"); - userOpt.setRequired(false); - sentryOptions.addOption(userOpt); - - Option passWordOpt = new Option("p", "password", true, "Password"); - userOpt.setRequired(false); - sentryOptions.addOption(passWordOpt); - - Option debugOpt = new Option("d", "debug", false, "enable debug output"); - debugOpt.setRequired(false); - sentryOptions.addOption(debugOpt); - - Option overwriteOpt = new Option("o", "overwrite", false, "enable import overwrite"); - overwriteOpt.setRequired(false); - sentryOptions.addOption(overwriteOpt); - - Option objectPathOpt = new Option("b", "objectPath", - false, "The path of the object whose privileges will be exported"); - objectPathOpt.setRequired(false); - sentryOptions.addOption(objectPathOpt); - - try { - Parser parser = new GnuParser(); - CommandLine cmd = parser.parse(sentryOptions, args); - - for (Option opt : cmd.getOptions()) { - if (opt.getOpt().equals("s")) { - setSentrySiteFile(opt.getValue()); - } else if (opt.getOpt().equals("i")) { - setPolicyFile(opt.getValue()); - } else if (opt.getOpt().equals("e")) { - setQuery(opt.getValue()); - } else if (opt.getOpt().equals("j")) { - setJdbcURL(opt.getValue()); - } else if (opt.getOpt().equals("u")) { - setUser(opt.getValue()); - } else if (opt.getOpt().equals("p")) { - setPassWord(opt.getValue()); - } else if (opt.getOpt().equals("l") || opt.getOpt().equals("listPrivs")) { - setListPrivs(true); - } else if (opt.getOpt().equals("v")) { - setValidate(true); - } else if (opt.getOpt().equals("I")) { - setImportPolicyFilePath(opt.getValue()); - } else if (opt.getOpt().equals("E")) { - setExportPolicyFilePath(opt.getValue()); - } else if (opt.getOpt().equals("h")) { - usage(sentryOptions); - } else if (opt.getOpt().equals("d")) { - enableDebug = true; - } else if (opt.getOpt().equals("o")) { - setImportOverwriteRole(true); - } else if (opt.getOpt().equals("b")) { - setObjectPath(opt.getValue()); - } - } - - if (isListPrivs() && getUser() == null) { - throw new ParseException("Can't use -l without -u "); - } - if (getQuery() != null && getUser() == null) { - throw new ParseException("Must use -u with -e "); - } - } catch (ParseException e1) { - usage(sentryOptions); - } - - if (!enableDebug) { - // turn off log - LogManager.getRootLogger().setLevel(Level.OFF); - } - } - - public static class CommandImpl implements Command { - @Override - public void run(String[] args) throws Exception { - SentryConfigTool sentryTool = new SentryConfigTool(); - - try { - // parse arguments - sentryTool.parseArgs(args); - - // load configuration - sentryTool.setupConfig(); - - // validate configuration - if (sentryTool.isValidate()) { - sentryTool.validatePolicy(); - } - - if (!StringUtils.isEmpty(sentryTool.getImportPolicyFilePath())) { - sentryTool.importPolicy(); - } - - if (!StringUtils.isEmpty(sentryTool.getExportPolicyFilePath())) { - sentryTool.exportPolicy(); - } - - // list permissions for give user - if (sentryTool.isListPrivs()) { - sentryTool.listPrivs(); - } - - // verify given query - if (sentryTool.getQuery() != null) { - if (sentryTool.getJdbcURL() != null) { - sentryTool.verifyRemoteQuery(sentryTool.getQuery()); - } else { - sentryTool.verifyLocalQuery(sentryTool.getQuery()); - } - } - } catch (Exception e) { - System.out.println("Sentry tool reported Errors: " + e.getMessage()); - e.printStackTrace(System.out); - System.exit(1); - } - } - } -} http://git-wip-us.apache.org/repos/asf/sentry/blob/b19cb01b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStoreBase.java ---------------------------------------------------------------------- diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStoreBase.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStoreBase.java deleted file mode 100644 index 196bd2b..0000000 --- a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStoreBase.java +++ /dev/null @@ -1,412 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.sentry.binding.metastore; - -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URL; -import java.util.List; -import java.util.Set; - -import javax.security.auth.login.LoginException; - -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.ObjectStore; -import org.apache.hadoop.hive.metastore.api.ColumnStatistics; -import org.apache.hadoop.hive.metastore.api.Database; -import org.apache.hadoop.hive.metastore.api.Index; -import org.apache.hadoop.hive.metastore.api.InvalidObjectException; -import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; -import org.apache.hadoop.hive.metastore.api.Partition; -import org.apache.hadoop.hive.metastore.api.Table; -import org.apache.hadoop.hive.metastore.api.UnknownDBException; -import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.HiveOperation; -import org.apache.hadoop.hive.shims.Utils; -import org.apache.sentry.binding.hive.HiveAuthzBindingHookBase; -import org.apache.sentry.binding.hive.authz.HiveAuthzBinding; -import org.apache.sentry.binding.hive.conf.HiveAuthzConf; -import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars; - -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; - -/*** - * This class is the wrapper of ObjectStore which is the interface between the - * application logic and the database store. Do the authorization or filter the - * result when processing the metastore request. - * eg: - * Callers will only receive the objects back which they have privileges to - * access. - * If there is a request for the object list(like getAllTables()), the result - * will be filtered to exclude object the requestor doesn't have privilege to - * access. - */ -public class AuthorizingObjectStoreBase extends ObjectStore { - private static ImmutableSet<String> serviceUsers; - private static HiveConf hiveConf; - private static HiveAuthzConf authzConf; - private static HiveAuthzBinding hiveAuthzBinding; - private static String NO_ACCESS_MESSAGE_TABLE = "Table does not exist or insufficient privileges to access: "; - private static String NO_ACCESS_MESSAGE_DATABASE = "Database does not exist or insufficient privileges to access: "; - - @Override - public List<String> getDatabases(String pattern) throws MetaException { - return filterDatabases(super.getDatabases(pattern)); - } - - @Override - public List<String> getAllDatabases() throws MetaException { - return filterDatabases(super.getAllDatabases()); - } - - @Override - public Database getDatabase(String name) throws NoSuchObjectException { - Database db = super.getDatabase(name); - try { - if (filterDatabases(Lists.newArrayList(name)).isEmpty()) { - throw new NoSuchObjectException(getNoAccessMessageForDB(name)); - } - } catch (MetaException e) { - throw new NoSuchObjectException("Failed to authorized access to " + name - + " : " + e.getMessage()); - } - return db; - } - - @Override - public Table getTable(String dbName, String tableName) throws MetaException { - Table table = super.getTable(dbName, tableName); - if (table == null - || filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) { - return null; - } - return table; - } - - @Override - public Partition getPartition(String dbName, String tableName, - List<String> part_vals) throws MetaException, NoSuchObjectException { - if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) { - throw new NoSuchObjectException(getNoAccessMessageForTable(dbName, tableName)); - } - return super.getPartition(dbName, tableName, part_vals); - } - - @Override - public List<Partition> getPartitions(String dbName, String tableName, - int maxParts) throws MetaException, NoSuchObjectException { - if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) { - throw new MetaException(getNoAccessMessageForTable(dbName, tableName)); - } - return super.getPartitions(dbName, tableName, maxParts); - } - - @Override - public List<String> getTables(String dbName, String pattern) - throws MetaException { - return filterTables(dbName, super.getTables(dbName, pattern)); - } - - @Override - public List<Table> getTableObjectsByName(String dbname, List<String> tableNames) - throws MetaException, UnknownDBException { - return super.getTableObjectsByName(dbname, filterTables(dbname, tableNames)); - } - - @Override - public List<String> getAllTables(String dbName) throws MetaException { - return filterTables(dbName, super.getAllTables(dbName)); - } - - @Override - public List<String> listTableNamesByFilter(String dbName, String filter, - short maxTables) throws MetaException { - return filterTables(dbName, - super.listTableNamesByFilter(dbName, filter, maxTables)); - } - - @Override - public List<String> listPartitionNames(String dbName, String tableName, - short max_parts) throws MetaException { - if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) { - throw new MetaException(getNoAccessMessageForTable(dbName, tableName)); - } - return super.listPartitionNames(dbName, tableName, max_parts); - } - - @Override - public List<String> listPartitionNamesByFilter(String dbName, - String tableName, String filter, short max_parts) throws MetaException { - if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) { - throw new MetaException(getNoAccessMessageForTable(dbName, tableName)); - } - return super.listPartitionNamesByFilter(dbName, tableName, filter, - max_parts); - } - - @Override - public Index getIndex(String dbName, String origTableName, String indexName) - throws MetaException { - if (filterTables(dbName, Lists.newArrayList(origTableName)).isEmpty()) { - throw new MetaException(getNoAccessMessageForTable(dbName, origTableName)); - } - return super.getIndex(dbName, origTableName, indexName); - } - - @Override - public List<Index> getIndexes(String dbName, String origTableName, int max) - throws MetaException { - if (filterTables(dbName, Lists.newArrayList(origTableName)).isEmpty()) { - throw new MetaException(getNoAccessMessageForTable(dbName, origTableName)); - } - return super.getIndexes(dbName, origTableName, max); - } - - @Override - public List<String> listIndexNames(String dbName, String origTableName, - short max) throws MetaException { - if (filterTables(dbName, Lists.newArrayList(origTableName)).isEmpty()) { - throw new MetaException(getNoAccessMessageForTable(dbName, origTableName)); - } - return super.listIndexNames(dbName, origTableName, max); - } - - @Override - public List<Partition> getPartitionsByFilter(String dbName, - String tblName, String filter, short maxParts) throws MetaException, - NoSuchObjectException { - if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) { - throw new MetaException(getNoAccessMessageForTable(dbName, tblName)); - } - return super.getPartitionsByFilter(dbName, tblName, filter, maxParts); - } - - @Override - public List<Partition> getPartitionsByNames(String dbName, String tblName, - List<String> partNames) throws MetaException, NoSuchObjectException { - if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) { - throw new MetaException(getNoAccessMessageForTable(dbName, tblName)); - } - return super.getPartitionsByNames(dbName, tblName, partNames); - } - - @Override - public Partition getPartitionWithAuth(String dbName, String tblName, - List<String> partVals, String user_name, List<String> group_names) - throws MetaException, NoSuchObjectException, InvalidObjectException { - if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) { - throw new MetaException(getNoAccessMessageForTable(dbName, tblName)); - } - return super.getPartitionWithAuth(dbName, tblName, partVals, user_name, - group_names); - } - - @Override - public List<Partition> getPartitionsWithAuth(String dbName, String tblName, - short maxParts, String userName, List<String> groupNames) - throws MetaException, NoSuchObjectException, InvalidObjectException { - if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) { - throw new MetaException(getNoAccessMessageForTable(dbName, tblName)); - } - return super.getPartitionsWithAuth(dbName, tblName, maxParts, userName, - groupNames); - } - - @Override - public List<String> listPartitionNamesPs(String dbName, String tblName, - List<String> part_vals, short max_parts) throws MetaException, - NoSuchObjectException { - if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) { - throw new MetaException(getNoAccessMessageForTable(dbName, tblName)); - } - return super.listPartitionNamesPs(dbName, tblName, part_vals, max_parts); - } - - @Override - public List<Partition> listPartitionsPsWithAuth(String dbName, - String tblName, List<String> part_vals, short max_parts, String userName, - List<String> groupNames) throws MetaException, InvalidObjectException, - NoSuchObjectException { - if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) { - throw new MetaException(getNoAccessMessageForTable(dbName, tblName)); - } - return super.listPartitionsPsWithAuth(dbName, tblName, part_vals, - max_parts, userName, groupNames); - } - - @Override - public ColumnStatistics getTableColumnStatistics(String dbName, - String tableName, List<String> colNames) throws MetaException, - NoSuchObjectException { - if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) { - throw new MetaException(getNoAccessMessageForTable(dbName, tableName)); - } - return super.getTableColumnStatistics(dbName, tableName, colNames); - } - - @Override - public List<ColumnStatistics> getPartitionColumnStatistics( - String dbName, String tblName, List<String> partNames, - List<String> colNames) throws MetaException, NoSuchObjectException { - if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) { - throw new MetaException(getNoAccessMessageForTable(dbName, tblName)); - } - return super.getPartitionColumnStatistics(dbName, tblName, partNames, - colNames); - } - - /** - * Invoke Hive database filtering that removes the entries which use has no - * privileges to access - * @param dbList - * @return - * @throws MetaException - */ - private List<String> filterDatabases(List<String> dbList) - throws MetaException { - if (needsAuthorization(getUserName())) { - try { - return HiveAuthzBindingHookBase.filterShowDatabases(getHiveAuthzBinding(), - dbList, HiveOperation.SHOWDATABASES, getUserName()); - } catch (SemanticException e) { - throw new MetaException("Error getting DB list " + e.getMessage()); - } - } else { - return dbList; - } - } - - /** - * Invoke Hive table filtering that removes the entries which use has no - * privileges to access - * @param dbList - * @return - * @throws MetaException - */ - protected List<String> filterTables(String dbName, List<String> tabList) - throws MetaException { - if (needsAuthorization(getUserName())) { - try { - return HiveAuthzBindingHookBase.filterShowTables(getHiveAuthzBinding(), - tabList, HiveOperation.SHOWTABLES, getUserName(), dbName); - } catch (SemanticException e) { - throw new MetaException("Error getting Table list " + e.getMessage()); - } - } else { - return tabList; - } - } - - /** - * load Hive auth provider - * - * @return - * @throws MetaException - */ - private HiveAuthzBinding getHiveAuthzBinding() throws MetaException { - if (hiveAuthzBinding == null) { - try { - hiveAuthzBinding = new HiveAuthzBinding(HiveAuthzBinding.HiveHook.HiveMetaStore, - getHiveConf(), getAuthzConf()); - } catch (Exception e) { - throw new MetaException("Failed to load Hive binding " + e.getMessage()); - } - } - return hiveAuthzBinding; - } - - private ImmutableSet<String> getServiceUsers() throws MetaException { - if (serviceUsers == null) { - serviceUsers = ImmutableSet.copyOf(toTrimed(Sets.newHashSet(getAuthzConf().getStrings( - AuthzConfVars.AUTHZ_METASTORE_SERVICE_USERS.getVar(), new String[] { "" })))); - } - return serviceUsers; - } - - private HiveConf getHiveConf() { - if (hiveConf == null) { - hiveConf = new HiveConf(getConf(), this.getClass()); - } - return hiveConf; - } - - private HiveAuthzConf getAuthzConf() throws MetaException { - if (authzConf == null) { - String hiveAuthzConf = getConf().get(HiveAuthzConf.HIVE_SENTRY_CONF_URL); - if (hiveAuthzConf == null - || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) { - throw new MetaException("Configuration key " - + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf - + "' is invalid."); - } - try { - authzConf = new HiveAuthzConf(new URL(hiveAuthzConf)); - } catch (MalformedURLException e) { - throw new MetaException("Configuration key " - + HiveAuthzConf.HIVE_SENTRY_CONF_URL - + " specifies a malformed URL '" + hiveAuthzConf + "' " - + e.getMessage()); - } - } - return authzConf; - } - - /** - * Extract the user from underlying auth subsystem - * @return - * @throws MetaException - */ - private String getUserName() throws MetaException { - try { - return Utils.getUGI().getShortUserName(); - } catch (LoginException e) { - throw new MetaException("Failed to get username " + e.getMessage()); - } catch (IOException e) { - throw new MetaException("Failed to get username " + e.getMessage()); - } - } - - /** - * Check if the give user needs to be validated. - * @param userName - * @return - */ - private boolean needsAuthorization(String userName) throws MetaException { - return !getServiceUsers().contains(userName.trim()); - } - - private static Set<String> toTrimed(Set<String> s) { - Set<String> result = Sets.newHashSet(); - for (String v : s) { - result.add(v.trim()); - } - return result; - } - - protected String getNoAccessMessageForTable(String dbName, String tableName) { - return NO_ACCESS_MESSAGE_TABLE + "<" + dbName + ">.<" + tableName + ">"; - } - - private String getNoAccessMessageForDB(String dbName) { - return NO_ACCESS_MESSAGE_DATABASE + "<" + dbName + ">"; - } -}
