http://git-wip-us.apache.org/repos/asf/sentry/blob/e358fde7/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingHookV2.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingHookV2.java
 
b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingHookV2.java
deleted file mode 100644
index fac6ba3..0000000
--- 
a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingHookV2.java
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.binding.hive.v2;
-
-import java.io.Serializable;
-import java.security.CodeSource;
-import java.util.List;
-
-import org.apache.hadoop.hive.ql.exec.DDLTask;
-import org.apache.hadoop.hive.ql.exec.SentryFilterDDLTask;
-import org.apache.hadoop.hive.ql.exec.Task;
-import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.lib.Node;
-import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
-import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.DDLWork;
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.sentry.binding.hive.HiveAuthzBindingHookBaseV2;
-import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
-import org.apache.sentry.core.common.Subject;
-import org.apache.sentry.core.model.db.Database;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class HiveAuthzBindingHookV2 extends HiveAuthzBindingHookBaseV2 {
-  private static final Logger LOG = LoggerFactory
-      .getLogger(HiveAuthzBindingHookV2.class);
-
-  public HiveAuthzBindingHookV2() throws Exception {
-    super();
-  }
-
-  @Override
-  public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode 
ast)
-      throws SemanticException {
-    switch (ast.getToken().getType()) {
-    // Hive parser doesn't capture the database name in output entity, so we 
store it here for now
-      case HiveParser.TOK_CREATEFUNCTION:
-        String udfClassName = 
BaseSemanticAnalyzer.unescapeSQLString(ast.getChild(1).getText());
-        try {
-          CodeSource udfSrc =
-              Class.forName(udfClassName, true, 
Utilities.getSessionSpecifiedClassLoader())
-                  .getProtectionDomain().getCodeSource();
-          if (udfSrc == null) {
-            throw new SemanticException("Could not resolve the jar for UDF 
class " + udfClassName);
-          }
-          String udfJar = udfSrc.getLocation().getPath();
-          if (udfJar == null || udfJar.isEmpty()) {
-            throw new SemanticException("Could not find the jar for UDF class 
" + udfClassName +
-                "to validate privileges");
-          }
-          udfURIs.add(parseURI(udfSrc.getLocation().toString(), true));
-        } catch (ClassNotFoundException e) {
-          List<String> functionJars = getFunctionJars(ast);
-          if (functionJars.isEmpty()) {
-            throw new SemanticException("Error retrieving udf class", e);
-          } else {
-            // Add the jars from the command "Create function using jar" to 
the access list
-            // Defer to hive to check if the class is in the jars
-            for(String jar : functionJars) {
-              udfURIs.add(parseURI(jar, false));
-            }
-          }
-        }
-
-        // create/drop function is allowed with any database
-        currDB = Database.ALL;
-        break;
-      case HiveParser.TOK_DROPFUNCTION:
-        // create/drop function is allowed with any database
-        currDB = Database.ALL;
-        break;
-      case HiveParser.TOK_CREATETABLE:
-        for (Node childNode : ast.getChildren()) {
-          ASTNode childASTNode = (ASTNode) childNode;
-          if ("TOK_TABLESERIALIZER".equals(childASTNode.getText())) {
-            ASTNode serdeNode = (ASTNode) childASTNode.getChild(0);
-            String serdeClassName =
-                
BaseSemanticAnalyzer.unescapeSQLString(serdeNode.getChild(0).getText());
-            setSerdeURI(serdeClassName);
-          }
-        }
-        break;
-    }
-    return ast;
-  }
-
-  /**
-   * Post analyze hook that invokes hive auth bindings
-   */
-  @Override
-  public void postAnalyze(HiveSemanticAnalyzerHookContext context,
-      List<Task<? extends Serializable>> rootTasks) throws SemanticException {
-    HiveOperation stmtOperation = getCurrentHiveStmtOp();
-    Subject subject = new Subject(context.getUserName());
-    for (int i = 0; i < rootTasks.size(); i++) {
-      Task<? extends Serializable> task = rootTasks.get(i);
-      if (task instanceof DDLTask) {
-        SentryFilterDDLTask filterTask =
-            new SentryFilterDDLTask(hiveAuthzBinding, subject, stmtOperation);
-        filterTask.setWork((DDLWork)task.getWork());
-        rootTasks.set(i, filterTask);
-      }
-    }
-    HiveAuthzPrivileges stmtAuthObject = 
HiveAuthzPrivilegesMapV2.getHiveAuthzPrivileges(stmtOperation);
-    if (stmtOperation.equals(HiveOperation.CREATEFUNCTION)
-        || stmtOperation.equals(HiveOperation.DROPFUNCTION)
-        || stmtOperation.equals(HiveOperation.CREATETABLE)) {
-      try {
-        if (stmtAuthObject == null) {
-          // We don't handle authorizing this statement
-          return;
-        }
-
-        authorizeWithHiveBindings(context, stmtAuthObject, stmtOperation);
-      } catch (AuthorizationException e) {
-        executeOnFailureHooks(context, stmtOperation, e);
-        String permsRequired = "";
-        for (String perm : hiveAuthzBinding.getLastQueryPrivilegeErrors()) {
-          permsRequired += perm + ";";
-        }
-        
SessionState.get().getConf().set(HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS, 
permsRequired);
-        String msgForLog =
-            HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE
-                + "\n Required privileges for this query: " + permsRequired;
-        String msgForConsole =
-            HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE + "\n " + 
e.getMessage();
-        // AuthorizationException is not a real exception, use the info level 
to record this.
-        LOG.info(msgForLog);
-        throw new SemanticException(msgForConsole, e);
-      } finally {
-        hiveAuthzBinding.close();
-      }
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/e358fde7/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingSessionHookV2.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingSessionHookV2.java
 
b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingSessionHookV2.java
deleted file mode 100644
index 5a47da8..0000000
--- 
a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingSessionHookV2.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.binding.hive.v2;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hive.service.cli.HiveSQLException;
-import org.apache.hive.service.cli.session.HiveSessionHookContext;
-import org.apache.sentry.binding.hive.HiveAuthzBindingHookBaseV2;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
-
-import com.google.common.base.Joiner;
-
-/**
- * The session hook is the Session Hook for HiveAuthzBindingSessionHookV2, The 
configuration of
- * session will update for Hive Authz v2.
- */
-public class HiveAuthzBindingSessionHookV2 implements
-    org.apache.hive.service.cli.session.HiveSessionHook {
-  public static final String SCRATCH_DIR_PERMISSIONS = "700";
-  public static final String SEMANTIC_HOOK = 
HiveAuthzBindingHookV2.class.getName();
-  public static final String ACCESS_RESTRICT_LIST = Joiner.on(",").join(
-      ConfVars.SEMANTIC_ANALYZER_HOOK.varname, ConfVars.PREEXECHOOKS.varname,
-      ConfVars.SCRATCHDIR.varname, ConfVars.LOCALSCRATCHDIR.varname,
-      ConfVars.METASTOREURIS.varname, ConfVars.METASTORECONNECTURLKEY.varname,
-      ConfVars.HADOOPBIN.varname, ConfVars.HIVESESSIONID.varname, 
ConfVars.HIVEAUXJARS.varname,
-      ConfVars.SCRATCHDIRPERMISSION.varname,
-      ConfVars.HIVE_SECURITY_COMMAND_WHITELIST.varname,
-      ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY.varname,
-      ConfVars.HIVE_CAPTURE_TRANSFORM_ENTITY.varname, 
HiveAuthzConf.HIVE_ACCESS_CONF_URL,
-      HiveAuthzConf.HIVE_SENTRY_CONF_URL, 
HiveAuthzConf.HIVE_ACCESS_SUBJECT_NAME,
-      HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME, 
HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET);
-  public static final String WILDCARD_ACL_VALUE = "*";
-
-  /**
-   * The session hook for sentry authorization that sets the required session 
level configuration 1.
-   * Setup the sentry hooks - semantic, exec and filter hooks 2. Set 
additional config properties
-   * required for auth set HIVE_EXTENDED_ENITITY_CAPTURE = true set 
SCRATCHDIRPERMISSION = 700 3.
-   * Add sensitive config parameters to the config restrict list so that they 
can't be overridden by
-   * users
-   */
-  @Override
-  public void run(HiveSessionHookContext sessionHookContext) throws 
HiveSQLException {
-    // Add sentry hooks to the session configuration
-    HiveConf sessionConf = sessionHookContext.getSessionConf();
-
-    appendConfVar(sessionConf, ConfVars.SEMANTIC_ANALYZER_HOOK.varname, 
SEMANTIC_HOOK);
-    // enable sentry authorization V2
-    
sessionConf.setBoolean(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED.varname, 
true);
-    sessionConf.setBoolean(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, 
false);
-    sessionConf.set(HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER.varname,
-        "org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator");
-
-    // grant all privileges for table to its owner
-    sessionConf.setVar(ConfVars.HIVE_AUTHORIZATION_TABLE_OWNER_GRANTS, "");
-
-    // Enable compiler to capture transform URI referred in the query
-    sessionConf.setBoolVar(ConfVars.HIVE_CAPTURE_TRANSFORM_ENTITY, true);
-
-    // set security command list
-    HiveAuthzConf authzConf = 
HiveAuthzBindingHookBaseV2.loadAuthzConf(sessionConf);
-    String commandWhitelist =
-        authzConf.get(HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST,
-            HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST_DEFAULT);
-    sessionConf.setVar(ConfVars.HIVE_SECURITY_COMMAND_WHITELIST, 
commandWhitelist);
-
-    // set additional configuration properties required for auth
-    sessionConf.setVar(ConfVars.SCRATCHDIRPERMISSION, SCRATCH_DIR_PERMISSIONS);
-
-    // setup restrict list
-    sessionConf.addToRestrictList(ACCESS_RESTRICT_LIST);
-
-    // set user name
-    sessionConf.set(HiveAuthzConf.HIVE_ACCESS_SUBJECT_NAME, 
sessionHookContext.getSessionUser());
-    sessionConf.set(HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME, 
sessionHookContext.getSessionUser());
-
-    // Set MR ACLs to session user
-    updateJobACL(sessionConf, JobContext.JOB_ACL_VIEW_JOB, 
sessionHookContext.getSessionUser());
-    updateJobACL(sessionConf, JobContext.JOB_ACL_MODIFY_JOB, 
sessionHookContext.getSessionUser());
-  }
-
-  // Setup given sentry hooks
-  private void appendConfVar(HiveConf sessionConf, String confVar, String 
sentryConfVal) {
-    String currentValue = sessionConf.get(confVar, "").trim();
-    if (currentValue.isEmpty()) {
-      currentValue = sentryConfVal;
-    } else {
-      currentValue = sentryConfVal + "," + currentValue;
-    }
-    sessionConf.set(confVar, currentValue);
-  }
-
-  // Setup ACL to include session user
-  private void updateJobACL(HiveConf sessionConf, String aclName, String 
sessionUser) {
-    String aclString = sessionConf.get(aclName, "");
-    // An empty ACL, replace it with the user
-    if (aclString.isEmpty()) {
-      aclString = sessionUser;
-    } else {
-      // ACLs can start with a space if only groups are configured
-      if (aclString.startsWith(" ")) {
-        aclString = sessionUser + aclString;
-      } else {
-        // Do not replace the wildcard ACL, it would restrict access
-        boolean isWildcard = (aclString.contains(WILDCARD_ACL_VALUE) &&
-            aclString.trim().equals(WILDCARD_ACL_VALUE));
-        if (!isWildcard) {
-          aclString = sessionUser + "," + aclString;
-        }
-      }
-    }
-    sessionConf.set(aclName, aclString.trim());
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/e358fde7/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzPrivilegesMapV2.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzPrivilegesMapV2.java
 
b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzPrivilegesMapV2.java
deleted file mode 100644
index 61278fe..0000000
--- 
a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzPrivilegesMapV2.java
+++ /dev/null
@@ -1,327 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.binding.hive.v2;
-
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges;
-import 
org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationScope;
-import 
org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationType;
-import org.apache.sentry.core.model.db.DBModelAction;
-import org.apache.sentry.core.model.db.DBModelAuthorizable.AuthorizableType;
-
-public class HiveAuthzPrivilegesMapV2 {
-  private static final Map <HiveOperation, HiveAuthzPrivileges> 
hiveAuthzStmtPrivMap =
-    new HashMap<HiveOperation, HiveAuthzPrivileges>();
-  static {
-    HiveAuthzPrivileges createServerPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Server, 
EnumSet.of(DBModelAction.CREATE)).
-        setOperationScope(HiveOperationScope.SERVER).
-        setOperationType(HiveOperationType.DDL).
-        build();
-
-    HiveAuthzPrivileges createMacroPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addOutputObjectPriviledge(AuthorizableType.Db, 
EnumSet.of(DBModelAction.CREATE)).
-        setOperationScope(HiveOperationScope.DATABASE).
-        setOperationType(HiveOperationType.DDL).
-        build();
-    HiveAuthzPrivileges dropMacroPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addOutputObjectPriviledge(AuthorizableType.Db, 
EnumSet.of(DBModelAction.DROP)).
-        setOperationScope(HiveOperationScope.DATABASE).
-        setOperationType(HiveOperationType.DDL).
-        build();
-
-    HiveAuthzPrivileges tableCreatePrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addOutputObjectPriviledge(AuthorizableType.Db, 
EnumSet.of(DBModelAction.CREATE)).
-        addInputObjectPriviledge(AuthorizableType.URI, 
EnumSet.of(DBModelAction.ALL)).//TODO: make it optional
-        addOutputObjectPriviledge(AuthorizableType.URI, 
EnumSet.of(DBModelAction.ALL)).
-        setOperationScope(HiveOperationScope.DATABASE).
-        setOperationType(HiveOperationType.DDL).
-        build();
-    HiveAuthzPrivileges dropDbPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Db, 
EnumSet.of(DBModelAction.DROP)).
-        setOperationScope(HiveOperationScope.DATABASE).
-        setOperationType(HiveOperationType.DDL).
-        build();
-    HiveAuthzPrivileges alterDbPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addOutputObjectPriviledge(AuthorizableType.Db, 
EnumSet.of(DBModelAction.ALTER)).
-        setOperationScope(HiveOperationScope.DATABASE).
-        setOperationType(HiveOperationType.DDL).
-        build();
-
-    HiveAuthzPrivileges alterTablePrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addOutputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.ALTER)).
-        setOperationScope(HiveOperationScope.TABLE).
-        setOperationType(HiveOperationType.DDL).
-        build();
-    HiveAuthzPrivileges dropTablePrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.DROP)).
-        setOperationScope(HiveOperationScope.TABLE).
-        setOperationType(HiveOperationType.DDL).
-        build();
-    HiveAuthzPrivileges indexTablePrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.INDEX)).
-        setOperationScope(HiveOperationScope.TABLE).
-        setOperationType(HiveOperationType.DDL).
-        build();
-
-    HiveAuthzPrivileges alterTableAndUriPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addOutputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.ALTER)).
-        addOutputObjectPriviledge(AuthorizableType.URI, 
EnumSet.of(DBModelAction.ALL)).
-        setOperationScope(HiveOperationScope.TABLE).
-        setOperationType(HiveOperationType.DDL).
-        build();
-    HiveAuthzPrivileges addPartitionPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addOutputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.ALTER)).
-        //TODO: Uncomment this if we want to make it more restrictive
-        //addInputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.CREATE)).
-        addInputObjectPriviledge(AuthorizableType.URI, 
EnumSet.of(DBModelAction.SELECT)).//TODO: make it optional
-        addOutputObjectPriviledge(AuthorizableType.URI, 
EnumSet.of(DBModelAction.ALL)).
-        setOperationScope(HiveOperationScope.TABLE).
-        setOperationType(HiveOperationType.DDL).
-        build();
-    HiveAuthzPrivileges dropPartitionPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.ALTER)).
-        addInputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.DROP)).
-        setOperationScope(HiveOperationScope.TABLE).
-        setOperationType(HiveOperationType.DDL).
-        build();
-
-    HiveAuthzPrivileges alterTableRenamePrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.ALTER)).
-        addInputObjectPriviledge(AuthorizableType.Db, 
EnumSet.of(DBModelAction.CREATE)).
-        setOperationScope(HiveOperationScope.DATABASE).
-        setOperationType(HiveOperationType.DDL).
-        build();
-
-    HiveAuthzPrivileges alterPartPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.ALTER)).
-        setOperationScope(HiveOperationScope.TABLE).
-        setOperationType(HiveOperationType.INFO).
-        build();
-
-    /* Currently Hive treats select/insert/analyze as Query
-     * select = select on table
-     * insert = insert on table /all on uri
-     * analyze = select + insert on table
-     */
-    HiveAuthzPrivileges tableQueryPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.SELECT)).
-        addInputObjectPriviledge(AuthorizableType.URI, 
EnumSet.of(DBModelAction.ALL)).
-        addInputObjectPriviledge(AuthorizableType.Column, 
EnumSet.of(DBModelAction.SELECT)).
-        addOutputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.INSERT)).
-        addOutputObjectPriviledge(AuthorizableType.URI, 
EnumSet.of(DBModelAction.ALL)).
-        setOperationScope(HiveOperationScope.TABLE).
-        setOperationType(HiveOperationType.QUERY).
-        build();
-
-    HiveAuthzPrivileges tableLoadPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.URI, 
EnumSet.of(DBModelAction.ALL)).
-        addOutputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.INSERT)).
-        setOperationScope(HiveOperationScope.TABLE).
-        setOperationType(HiveOperationType.DATA_LOAD).
-        build();
-
-    HiveAuthzPrivileges tableExportPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.SELECT)).
-        addOutputObjectPriviledge(AuthorizableType.URI, 
EnumSet.of(DBModelAction.ALL)).
-        setOperationScope(HiveOperationScope.TABLE).
-        setOperationType(HiveOperationType.DATA_UNLOAD).
-        build();
-
-    HiveAuthzPrivileges tableMetaDataPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT)).
-        setOperationScope(HiveOperationScope.TABLE).
-        setOperationType(HiveOperationType.INFO).
-        build();
-
-    // Metadata statements which only require column-level privileges.
-    HiveAuthzPrivileges columnMetaDataPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Column, 
EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT)).
-        setOperationScope(HiveOperationScope.COLUMN).
-        setOperationType(HiveOperationType.INFO).
-        build();
-
-    HiveAuthzPrivileges dbImportPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addOutputObjectPriviledge(AuthorizableType.Db, 
EnumSet.of(DBModelAction.CREATE)).
-        addInputObjectPriviledge(AuthorizableType.URI, 
EnumSet.of(DBModelAction.ALL)).
-        setOperationScope(HiveOperationScope.DATABASE).
-        setOperationType(HiveOperationType.DDL).
-        build();
-
-    HiveAuthzPrivileges createViewPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-    addOutputObjectPriviledge(AuthorizableType.Db, 
EnumSet.of(DBModelAction.CREATE)).
-    addInputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.SELECT)).
-    addInputObjectPriviledge(AuthorizableType.URI, 
EnumSet.of(DBModelAction.ALL)).//TODO: This should not be required
-    setOperationScope(HiveOperationScope.DATABASE).
-    setOperationType(HiveOperationType.DDL).
-    build();
-
-    HiveAuthzPrivileges dbMetaDataPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-      addInputObjectPriviledge(AuthorizableType.Db, 
EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT)).
-      setOperationScope(HiveOperationScope.DATABASE).
-      setOperationType(HiveOperationType.INFO).
-      build();
-
-    HiveAuthzPrivileges tableLockPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder()
-        .addInputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.LOCK)).
-        setOperationScope(HiveOperationScope.TABLE).
-        setOperationType(HiveOperationType.DML).
-        build();
-
-    HiveAuthzPrivileges dbLockPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder()
-        .addInputObjectPriviledge(AuthorizableType.Db, 
EnumSet.of(DBModelAction.LOCK))
-        
.setOperationScope(HiveOperationScope.DATABASE).setOperationType(HiveOperationType.DML)
-        .build();
-
-    HiveAuthzPrivileges functionPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.URI, 
EnumSet.of(DBModelAction.ALL)).
-        addOutputObjectPriviledge(AuthorizableType.URI, 
EnumSet.of(DBModelAction.ALL)).
-        setOperationScope(HiveOperationScope.FUNCTION).
-        setOperationType(HiveOperationType.DATA_LOAD).
-        build();
-
-    HiveAuthzPrivileges anyPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Column, 
EnumSet.of(DBModelAction.SELECT,
-            DBModelAction.INSERT, DBModelAction.ALTER, DBModelAction.CREATE, 
DBModelAction.DROP,
-            DBModelAction.INDEX, DBModelAction.LOCK)).
-        setOperationScope(HiveOperationScope.CONNECT).
-        setOperationType(HiveOperationType.QUERY).
-        build();
-
-    HiveAuthzPrivileges truncateTablePrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addOutputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.DROP)).
-        setOperationScope(HiveOperationScope.TABLE).
-        setOperationType(HiveOperationType.DDL).
-        build();
-
-    hiveAuthzStmtPrivMap.put(HiveOperation.CREATEDATABASE, 
createServerPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.DROPDATABASE, dropDbPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.CREATETABLE, tableCreatePrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERDATABASE, alterDbPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERDATABASE_OWNER, 
alterDbPrivilege);
-
-    hiveAuthzStmtPrivMap.put(HiveOperation.CREATEMACRO, createMacroPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.DROPMACRO, dropMacroPrivilege);
-
-    hiveAuthzStmtPrivMap.put(HiveOperation.DROPTABLE, dropTablePrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.CREATEVIEW, createViewPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.DROPVIEW, dropTablePrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.CREATEINDEX, indexTablePrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.DROPINDEX, indexTablePrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERINDEX_PROPS, 
indexTablePrivilege);//TODO: Needs test case
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERINDEX_REBUILD, 
indexTablePrivilege);
-
-
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_PROPERTIES, 
alterTablePrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_SERDEPROPERTIES, 
alterTablePrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_CLUSTER_SORT, 
alterTablePrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_FILEFORMAT, 
alterTablePrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_TOUCH, 
alterTablePrivilege);
-
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_RENAMECOL, 
alterTablePrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_ADDCOLS, 
alterTablePrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_REPLACECOLS, 
alterTablePrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_PARTCOLTYPE, 
alterPartPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_BUCKETNUM, 
alterPartPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERPARTITION_BUCKETNUM, 
alterPartPrivilege);
-
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_RENAMEPART, 
alterTablePrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_ARCHIVE, 
alterTablePrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_UNARCHIVE, 
alterTablePrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERPARTITION_FILEFORMAT, 
alterTablePrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERPARTITION_SERDEPROPERTIES, 
alterTablePrivilege);
-
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_MERGEFILES, 
alterTablePrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_SKEWED, 
alterTablePrivilege);
-
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERPARTITION_SERIALIZER, 
alterTablePrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERPARTITION_MERGEFILES, 
alterTablePrivilege);
-
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERVIEW_PROPERTIES, 
alterTablePrivilege);
-
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_DROPPARTS, 
dropPartitionPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_ADDPARTS, 
addPartitionPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_RENAME, 
alterTableRenamePrivilege);
-
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_SERIALIZER, 
alterTableAndUriPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_LOCATION, 
alterTableAndUriPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERPARTITION_LOCATION, 
alterTableAndUriPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTBLPART_SKEWED_LOCATION, 
alterTableAndUriPrivilege);//TODO: Needs test case
-
-    // MSCK REPAIR TABLE <table name> / ALTER TABLE RECOVER PARTITIONS 
<tableName>
-    hiveAuthzStmtPrivMap.put(HiveOperation.MSCK, alterTablePrivilege);
-
-    hiveAuthzStmtPrivMap.put(HiveOperation.ANALYZE_TABLE, tableQueryPrivilege);
-
-    // SWITCHDATABASE
-    hiveAuthzStmtPrivMap.put(HiveOperation.SWITCHDATABASE, anyPrivilege);
-
-    // CREATEFUNCTION
-    // DROPFUNCTION
-    hiveAuthzStmtPrivMap.put(HiveOperation.CREATEFUNCTION, functionPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.DROPFUNCTION, functionPrivilege);
-
-    // SHOWCOLUMNS
-    hiveAuthzStmtPrivMap.put(HiveOperation.SHOWCOLUMNS, 
columnMetaDataPrivilege);
-
-    // SHOWDATABASES
-    // SHOWTABLES
-    hiveAuthzStmtPrivMap.put(HiveOperation.SHOW_TABLESTATUS, 
tableMetaDataPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.SHOW_TBLPROPERTIES, 
tableMetaDataPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.SHOW_CREATETABLE, 
tableMetaDataPrivilege);
-    // SHOWFUNCTIONS
-    hiveAuthzStmtPrivMap.put(HiveOperation.SHOWINDEXES, 
tableMetaDataPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.SHOWPARTITIONS, 
tableMetaDataPrivilege);
-    // SHOWLOCKS
-    hiveAuthzStmtPrivMap.put(HiveOperation.EXPORT, tableExportPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.IMPORT, dbImportPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.LOAD, tableLoadPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.LOCKTABLE, tableLockPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.UNLOCKTABLE, tableLockPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.LOCKDB, dbLockPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.UNLOCKDB, dbLockPrivilege);
-    // CREATEROLE
-    // DROPROLE
-    // GRANT_PRIVILEGE
-    // REVOKE_PRIVILEGE
-    // SHOW_GRANT
-    // GRANT_ROLE
-    // REVOKE_ROLE
-    // SHOW_ROLE_GRANT
-    hiveAuthzStmtPrivMap.put(HiveOperation.CREATETABLE_AS_SELECT,
-        new HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.SELECT)).
-        addInputObjectPriviledge(AuthorizableType.Column, 
EnumSet.of(DBModelAction.SELECT)).
-        
addInputObjectPriviledge(AuthorizableType.URI,EnumSet.of(DBModelAction.ALL)).
-        addOutputObjectPriviledge(AuthorizableType.Db, 
EnumSet.of(DBModelAction.CREATE)).
-        setOperationScope(HiveOperationScope.DATABASE).
-        setOperationType(HiveOperationType.DDL).
-        build());
-    hiveAuthzStmtPrivMap.put(HiveOperation.QUERY, tableQueryPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.DESCDATABASE, dbMetaDataPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.DESCTABLE, tableMetaDataPrivilege);
-    hiveAuthzStmtPrivMap.put(HiveOperation.TRUNCATETABLE, 
truncateTablePrivilege);
-  }
-
-  public static HiveAuthzPrivileges getHiveAuthzPrivileges(HiveOperation 
hiveStmtOp) {
-    return hiveAuthzStmtPrivMap.get(hiveStmtOp);
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/e358fde7/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryAuthorizerFactory.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryAuthorizerFactory.java
 
b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryAuthorizerFactory.java
deleted file mode 100644
index 8b56c49..0000000
--- 
a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryAuthorizerFactory.java
+++ /dev/null
@@ -1,164 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.binding.hive.v2;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
-import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory;
-import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
-import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext;
-import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext.CLIENT_TYPE;
-import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
-import org.apache.sentry.binding.hive.HiveAuthzBindingHookBaseV2;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
-import 
org.apache.sentry.binding.hive.v2.authorizer.DefaultSentryAccessController;
-import org.apache.sentry.binding.hive.v2.authorizer.DefaultSentryValidator;
-import org.apache.sentry.binding.hive.v2.authorizer.SentryHiveAccessController;
-import 
org.apache.sentry.binding.hive.v2.authorizer.SentryHiveAuthorizationValidator;
-import org.apache.sentry.binding.hive.v2.authorizer.SentryHiveAuthorizer;
-
-import com.google.common.annotations.VisibleForTesting;
-
-public class SentryAuthorizerFactory implements HiveAuthorizerFactory {
-  public static final String HIVE_SENTRY_ACCESS_CONTROLLER =
-      "hive.security.sentry.access.controller";
-  public static final String HIVE_SENTRY_AUTHORIZATION_CONTROLLER =
-      "hive.security.sentry.authorization.controller";
-  private HiveAuthzConf authzConf;
-
-  @Override
-  public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory 
metastoreClientFactory,
-      HiveConf conf, HiveAuthenticationProvider authenticator, 
HiveAuthzSessionContext ctx)
-          throws HiveAuthzPluginException {
-    HiveAuthzSessionContext sessionContext;
-    try {
-      this.authzConf = HiveAuthzBindingHookBaseV2.loadAuthzConf(conf);
-      sessionContext = applyTestSettings(ctx, conf);
-      assertHiveCliAuthDisabled(conf, sessionContext);
-    } catch (Exception e) {
-      throw new HiveAuthzPluginException(e);
-    }
-    SentryHiveAccessController accessController =
-        getAccessController(conf, authzConf, authenticator, sessionContext);
-    SentryHiveAuthorizationValidator authzValidator =
-        getAuthzValidator(conf, authzConf, authenticator);
-
-    return new SentryHiveAuthorizer(accessController, authzValidator);
-  }
-
-  private HiveAuthzSessionContext applyTestSettings(HiveAuthzSessionContext 
ctx, HiveConf conf) {
-    if (conf.getBoolVar(ConfVars.HIVE_TEST_AUTHORIZATION_SQLSTD_HS2_MODE)
-        && ctx.getClientType() == CLIENT_TYPE.HIVECLI) {
-      // create new session ctx object with HS2 as client type
-      HiveAuthzSessionContext.Builder ctxBuilder = new 
HiveAuthzSessionContext.Builder(ctx);
-      ctxBuilder.setClientType(CLIENT_TYPE.HIVESERVER2);
-      return ctxBuilder.build();
-    }
-    return ctx;
-  }
-
-  private void assertHiveCliAuthDisabled(HiveConf conf, 
HiveAuthzSessionContext ctx)
-      throws HiveAuthzPluginException {
-    if (ctx.getClientType() == CLIENT_TYPE.HIVECLI
-        && conf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
-      throw new HiveAuthzPluginException(
-          "SQL standards based authorization should not be enabled from hive 
cli"
-              + "Instead the use of storage based authorization in hive 
metastore is reccomended. Set "
-              + ConfVars.HIVE_AUTHORIZATION_ENABLED.varname + "=false to 
disable authz within cli");
-    }
-  }
-
-  /**
-   * just for testing
-   */
-  @VisibleForTesting
-  protected HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory 
metastoreClientFactory,
-      HiveConf conf, HiveAuthzConf authzConf, HiveAuthenticationProvider 
authenticator,
-      HiveAuthzSessionContext ctx) throws HiveAuthzPluginException {
-    SentryHiveAccessController accessController =
-        getAccessController(conf, authzConf, authenticator, ctx);
-    SentryHiveAuthorizationValidator authzValidator =
-        getAuthzValidator(conf, authzConf, authenticator);
-
-    return new SentryHiveAuthorizer(accessController, authzValidator);
-  }
-
-  /**
-   * Get instance of SentryAccessController from configuration
-   * Default return DefaultSentryAccessController
-   *
-   * @param conf
-   * @param authzConf
-   * @param hiveAuthzBinding
-   * @param authenticator
-   * @throws HiveAuthzPluginException
-   */
-  public static SentryHiveAccessController getAccessController(HiveConf conf,
-      HiveAuthzConf authzConf, HiveAuthenticationProvider authenticator,
-      HiveAuthzSessionContext ctx) throws HiveAuthzPluginException {
-    Class<? extends SentryHiveAccessController> clazz =
-        conf.getClass(HIVE_SENTRY_ACCESS_CONTROLLER, 
DefaultSentryAccessController.class,
-            SentryHiveAccessController.class);
-
-    if (clazz == null) {
-      // should not happen as default value is set
-      throw new HiveAuthzPluginException("Configuration value " + 
HIVE_SENTRY_ACCESS_CONTROLLER
-          + " is not set to valid SentryAccessController subclass");
-    }
-
-    try {
-      return new DefaultSentryAccessController(conf, authzConf, authenticator, 
ctx);
-    } catch (Exception e) {
-      throw new HiveAuthzPluginException(e);
-    }
-
-  }
-
-  /**
-   * Get instance of SentryAuthorizationValidator from configuration
-   * Default return DefaultSentryAuthorizationValidator
-   *
-   * @param conf
-   * @param authzConf
-   * @param authenticator
-   * @throws HiveAuthzPluginException
-   */
-  public static SentryHiveAuthorizationValidator getAuthzValidator(HiveConf 
conf,
-      HiveAuthzConf authzConf, HiveAuthenticationProvider authenticator)
-      throws HiveAuthzPluginException {
-    Class<? extends SentryHiveAuthorizationValidator> clazz =
-        conf.getClass(HIVE_SENTRY_AUTHORIZATION_CONTROLLER, 
DefaultSentryValidator.class,
-            SentryHiveAuthorizationValidator.class);
-
-    if (clazz == null) {
-      // should not happen as default value is set
-      throw new HiveAuthzPluginException("Configuration value "
-          + HIVE_SENTRY_AUTHORIZATION_CONTROLLER
-          + " is not set to valid SentryAuthorizationValidator subclass");
-    }
-
-    try {
-      return new DefaultSentryValidator(conf, authzConf, authenticator);
-    } catch (Exception e) {
-      throw new HiveAuthzPluginException(e);
-    }
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/e358fde7/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHiveAuthorizationTaskFactoryImplV2.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHiveAuthorizationTaskFactoryImplV2.java
 
b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHiveAuthorizationTaskFactoryImplV2.java
deleted file mode 100644
index 2d4bf64..0000000
--- 
a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHiveAuthorizationTaskFactoryImplV2.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license
- * agreements. See the NOTICE file distributed with this work for additional 
information regarding
- * copyright ownership. The ASF licenses this file to you under the Apache 
License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the 
License. You may obtain a
- * copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software 
distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 
KIND, either express
- * or implied. See the License for the specific language governing permissions 
and limitations under
- * the License.
- */
-package org.apache.sentry.binding.hive.v2;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.exec.SentryHivePrivilegeObjectDesc;
-import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
-import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer;
-import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-import 
org.apache.hadoop.hive.ql.parse.authorization.HiveAuthorizationTaskFactoryImpl;
-import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc;
-
-public class SentryHiveAuthorizationTaskFactoryImplV2 extends 
HiveAuthorizationTaskFactoryImpl {
-
-  public SentryHiveAuthorizationTaskFactoryImplV2(HiveConf conf, Hive db) {
-    super(conf, db);
-  }
-
-  @Override
-  protected PrivilegeObjectDesc parsePrivObject(ASTNode ast) throws 
SemanticException {
-    SentryHivePrivilegeObjectDesc subject = new 
SentryHivePrivilegeObjectDesc();
-    ASTNode child = (ASTNode) ast.getChild(0);
-    ASTNode gchild = (ASTNode) child.getChild(0);
-    if (child.getType() == HiveParser.TOK_TABLE_TYPE) {
-      subject.setTable(true);
-      String[] qualified = BaseSemanticAnalyzer.getQualifiedTableName(gchild);
-      subject.setObject(BaseSemanticAnalyzer.getDotName(qualified));
-    } else if (child.getType() == HiveParser.TOK_URI_TYPE) {
-      subject.setUri(true);
-      subject.setObject(gchild.getText());
-    } else if (child.getType() == HiveParser.TOK_SERVER_TYPE) {
-      subject.setServer(true);
-      subject.setObject(gchild.getText());
-    } else {
-      subject.setTable(false);
-      
subject.setObject(BaseSemanticAnalyzer.unescapeIdentifier(gchild.getText()));
-    }
-    // if partition spec node is present, set partition spec
-    for (int i = 1; i < child.getChildCount(); i++) {
-      gchild = (ASTNode) child.getChild(i);
-      if (gchild.getType() == HiveParser.TOK_PARTSPEC) {
-        subject.setPartSpec(DDLSemanticAnalyzer.getPartSpec(gchild));
-      } else if (gchild.getType() == HiveParser.TOK_TABCOLNAME) {
-        subject.setColumns(BaseSemanticAnalyzer.getColumnNames(gchild));
-      }
-    }
-    return subject;
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/e358fde7/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHivePrivilegeObject.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHivePrivilegeObject.java
 
b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHivePrivilegeObject.java
deleted file mode 100644
index 6277385..0000000
--- 
a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHivePrivilegeObject.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license
- * agreements. See the NOTICE file distributed with this work for additional 
information regarding
- * copyright ownership. The ASF licenses this file to You under the Apache 
License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the 
License. You may obtain a
- * copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software 
distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 
KIND, either express
- * or implied. See the License for the specific language governing permissions 
and limitations under
- * the License.
- */
-
-package org.apache.sentry.binding.hive.v2;
-
-import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
-
-public class SentryHivePrivilegeObject extends HivePrivilegeObject {
-
-  boolean isServer = false;
-
-  boolean isUri = false;
-
-  String objectName = "";
-
-  public SentryHivePrivilegeObject(HivePrivilegeObjectType type, String 
objectName) {
-    super(type, null, objectName);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/e358fde7/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryAccessController.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryAccessController.java
 
b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryAccessController.java
deleted file mode 100644
index f21f920..0000000
--- 
a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryAccessController.java
+++ /dev/null
@@ -1,564 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license
- * agreements. See the NOTICE file distributed with this work for additional 
information regarding
- * copyright ownership. The ASF licenses this file to you under the Apache 
License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the 
License. You may obtain a
- * copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software 
distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 
KIND, either express
- * or implied. See the License for the specific language governing permissions 
and limitations under
- * the License.
- */
-package org.apache.sentry.binding.hive.v2.authorizer;
-
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import org.apache.hadoop.hive.SentryHiveConstants;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
-import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
-import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
-import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext;
-import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext.CLIENT_TYPE;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
-import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
-import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
-import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant;
-import org.apache.sentry.binding.hive.SentryOnFailureHookContext;
-import org.apache.sentry.binding.hive.SentryOnFailureHookContextImpl;
-import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
-import org.apache.sentry.binding.hive.authz.HiveAuthzBinding.HiveHook;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
-import org.apache.sentry.binding.hive.v2.util.SentryAuthorizerUtil;
-import org.apache.sentry.core.common.ActiveRoleSet;
-import org.apache.sentry.core.common.Authorizable;
-import org.apache.sentry.core.common.exception.SentryAccessDeniedException;
-import org.apache.sentry.core.common.exception.SentryUserException;
-import org.apache.sentry.core.model.db.AccessConstants;
-import org.apache.sentry.core.model.db.DBModelAuthorizable;
-import org.apache.sentry.core.model.db.Server;
-import org.apache.sentry.api.service.thrift.SentryPolicyServiceClient;
-import org.apache.sentry.api.service.thrift.TSentryPrivilege;
-import org.apache.sentry.api.service.thrift.TSentryRole;
-import org.apache.sentry.service.thrift.SentryServiceClientFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Sets;
-
-public class DefaultSentryAccessController extends SentryHiveAccessController {
-
-  public static final Logger LOG = 
LoggerFactory.getLogger(DefaultSentryAccessController.class);
-
-  public static final String REQUIRED_AUTHZ_SERVER_NAME = "Config "
-      + AuthzConfVars.AUTHZ_SERVER_NAME.getVar() + " is required";
-
-  private HiveAuthenticationProvider authenticator;
-  private String serverName;
-  private HiveConf conf;
-  private HiveAuthzConf authzConf;
-  private HiveAuthzSessionContext ctx;
-
-  private HiveHook hiveHook;
-  private HiveAuthzBinding hiveAuthzBinding;
-  protected SentryPolicyServiceClient sentryClient;
-
-
-  public DefaultSentryAccessController(HiveConf conf, HiveAuthzConf authzConf,
-      HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) 
throws Exception {
-    initilize(conf, authzConf, authenticator, ctx);
-    this.hiveHook = HiveHook.HiveServer2;
-  }
-
-  public DefaultSentryAccessController(HiveHook hiveHook, HiveConf conf, 
HiveAuthzConf authzConf,
-      HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) 
throws Exception {
-    initilize(conf, authzConf, authenticator, ctx);
-    this.hiveHook = hiveHook;
-  }
-
-  /**
-   * initialize authenticator and hiveAuthzBinding.
-   */
-  protected void initilize(HiveConf conf, HiveAuthzConf authzConf,
-      HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) 
throws Exception {
-    Preconditions.checkNotNull(conf, "HiveConf cannot be null");
-    Preconditions.checkNotNull(authzConf, "HiveAuthzConf cannot be null");
-    Preconditions.checkNotNull(authenticator, "Hive authenticator provider 
cannot be null");
-    Preconditions.checkNotNull(ctx, "HiveAuthzSessionContext cannot be null");
-
-    this.conf = conf;
-    this.authzConf = authzConf;
-    this.authenticator = authenticator;
-    this.ctx = ctx;
-    this.serverName =
-        
Preconditions.checkNotNull(authzConf.get(AuthzConfVars.AUTHZ_SERVER_NAME.getVar()),
-            REQUIRED_AUTHZ_SERVER_NAME);
-  }
-
-  @Override
-  public void createRole(String roleName, HivePrincipal adminGrantor)
-      throws HiveAuthzPluginException, HiveAccessControlException {
-    if (AccessConstants.RESERVED_ROLE_NAMES.contains(roleName.toUpperCase())) {
-      String msg =
-          "Roles cannot be one of the reserved roles: " + 
AccessConstants.RESERVED_ROLE_NAMES;
-      throw new HiveAccessControlException(msg);
-    }
-    try {
-      sentryClient = getSentryClient();
-      sentryClient.createRole(authenticator.getUserName(), roleName);
-    } catch (SentryAccessDeniedException e) {
-      HiveOperation hiveOp = HiveOperation.CREATEROLE;
-      executeOnFailureHooks(hiveOp, e);
-    } catch (SentryUserException e) {
-      String msg = "Sentry failed to create role: " + e.getMessage();
-      executeOnErrorHooks(msg, e);
-    } finally {
-      closeClient();
-    }
-  }
-
-  @Override
-  public void dropRole(String roleName) throws HiveAuthzPluginException, 
HiveAccessControlException {
-    if (AccessConstants.RESERVED_ROLE_NAMES.contains(roleName.toUpperCase())) {
-      String msg =
-          "Roles cannot be one of the reserved roles: " + 
AccessConstants.RESERVED_ROLE_NAMES;
-      throw new HiveAccessControlException(msg);
-    }
-    try {
-      sentryClient = getSentryClient();
-      sentryClient.dropRole(authenticator.getUserName(), roleName);
-    } catch (SentryAccessDeniedException e) {
-      HiveOperation hiveOp = HiveOperation.DROPROLE;
-      executeOnFailureHooks(hiveOp, e);
-    } catch (SentryUserException e) {
-      String msg = "Sentry failed to drop role: " + e.getMessage();
-      executeOnErrorHooks(msg, e);
-    } finally {
-      closeClient();
-    }
-  }
-
-  @Override
-  public List<String> getAllRoles() throws HiveAccessControlException, 
HiveAuthzPluginException {
-    List<String> roles = new ArrayList<String>();
-    try {
-      sentryClient = getSentryClient();
-      roles = 
convert2RoleList(sentryClient.listAllRoles(authenticator.getUserName()));
-    } catch (SentryAccessDeniedException e) {
-      HiveOperation hiveOp = HiveOperation.SHOW_ROLES;
-      executeOnFailureHooks(hiveOp, e);
-    } catch (SentryUserException e) {
-      String msg = "Error when sentryClient listRoles: " + e.getMessage();
-      executeOnErrorHooks(msg, e);
-    } finally {
-      closeClient();
-    }
-    return roles;
-  }
-
-  @Override
-  public void grantPrivileges(List<HivePrincipal> hivePrincipals,
-      List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject,
-      HivePrincipal grantorPrincipal, boolean grantOption) throws 
HiveAuthzPluginException,
-      HiveAccessControlException {
-    grantOrRevokePrivlegeOnRole(hivePrincipals, hivePrivileges, 
hivePrivObject, grantOption, true);
-  }
-
-  @Override
-  public void revokePrivileges(List<HivePrincipal> hivePrincipals,
-      List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject,
-      HivePrincipal grantorPrincipal, boolean grantOption) throws 
HiveAuthzPluginException,
-      HiveAccessControlException {
-    grantOrRevokePrivlegeOnRole(hivePrincipals, hivePrivileges, 
hivePrivObject, grantOption, false);
-  }
-
-  @Override
-  public void grantRole(List<HivePrincipal> hivePrincipals, List<String> roles,
-      boolean grantOption, HivePrincipal grantorPrinc) throws 
HiveAuthzPluginException,
-      HiveAccessControlException {
-    grantOrRevokeRoleOnGroup(hivePrincipals, roles, grantorPrinc, true);
-  }
-
-  @Override
-  public void revokeRole(List<HivePrincipal> hivePrincipals, List<String> 
roles,
-      boolean grantOption, HivePrincipal grantorPrinc) throws 
HiveAuthzPluginException,
-      HiveAccessControlException {
-    grantOrRevokeRoleOnGroup(hivePrincipals, roles, grantorPrinc, false);
-  }
-
-
-  @Override
-  public List<HivePrivilegeInfo> showPrivileges(HivePrincipal principal, 
HivePrivilegeObject privObj)
-      throws HiveAuthzPluginException, HiveAccessControlException {
-    if (principal.getType() != HivePrincipalType.ROLE) {
-      String msg =
-          SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + 
principal.getType();
-      throw new HiveAuthzPluginException(msg);
-    }
-    List<HivePrivilegeInfo> infoList = new ArrayList<HivePrivilegeInfo>();
-    try {
-      sentryClient = getSentryClient();
-      List<List<DBModelAuthorizable>> authorizables =
-          SentryAuthorizerUtil.getAuthzHierarchy(new Server(serverName), 
privObj);
-      Set<TSentryPrivilege> tPrivilges = new HashSet<TSentryPrivilege>();
-      if (authorizables != null && !authorizables.isEmpty()) {
-        for (List<? extends Authorizable> authorizable : authorizables) {
-          
tPrivilges.addAll(sentryClient.listPrivilegesByRoleName(authenticator.getUserName(),
-              principal.getName(), authorizable));
-        }
-      } else {
-        
tPrivilges.addAll(sentryClient.listPrivilegesByRoleName(authenticator.getUserName(),
-            principal.getName(), null));
-      }
-
-      if (tPrivilges != null && !tPrivilges.isEmpty()) {
-        for (TSentryPrivilege privilege : tPrivilges) {
-          
infoList.add(SentryAuthorizerUtil.convert2HivePrivilegeInfo(privilege, 
principal));
-        }
-      }
-    } catch (SentryAccessDeniedException e) {
-      HiveOperation hiveOp = HiveOperation.SHOW_GRANT;
-      executeOnFailureHooks(hiveOp, e);
-    } catch (SentryUserException e) {
-      String msg = "Error when sentryClient listPrivilegesByRoleName: " + 
e.getMessage();
-      executeOnErrorHooks(msg, e);
-    } finally {
-      closeClient();
-    }
-    return infoList;
-  }
-
-  @Override
-  public void setCurrentRole(String roleName) throws 
HiveAccessControlException,
-      HiveAuthzPluginException {
-    try {
-      sentryClient = getSentryClient();
-      hiveAuthzBinding = new HiveAuthzBinding(hiveHook, conf, authzConf);
-      hiveAuthzBinding.setActiveRoleSet(roleName,
-          sentryClient.listUserRoles(authenticator.getUserName()));
-    } catch (SentryAccessDeniedException e) {
-      HiveOperation hiveOp = HiveOperation.GRANT_ROLE;
-      executeOnFailureHooks(hiveOp, e);
-    } catch (Exception e) {
-      String msg = "Error when sentryClient setCurrentRole: " + e.getMessage();
-      executeOnErrorHooks(msg, e);
-    } finally {
-      closeClient();
-      if (hiveAuthzBinding != null) {
-        hiveAuthzBinding.close();
-      }
-    }
-  }
-
-  @Override
-  public List<String> getCurrentRoleNames() throws HiveAuthzPluginException {
-    List<String> roles = new ArrayList<String>();
-    try {
-      sentryClient = getSentryClient();
-      hiveAuthzBinding = new HiveAuthzBinding(hiveHook, conf, authzConf);
-      ActiveRoleSet roleSet = hiveAuthzBinding.getActiveRoleSet();
-      if (roleSet.isAll()) {
-        roles = 
convert2RoleList(sentryClient.listUserRoles(authenticator.getUserName()));
-      } else {
-        roles.addAll(roleSet.getRoles());
-      }
-    } catch (Exception e) {
-      String msg = "Error when sentryClient listUserRoles: " + e.getMessage();
-      executeOnErrorHooks(msg, e);
-    } finally {
-      closeClient();
-      if (hiveAuthzBinding != null) {
-        hiveAuthzBinding.close();
-      }
-    }
-    return roles;
-  }
-
-  @Override
-  public List<HiveRoleGrant> getPrincipalGrantInfoForRole(String roleName)
-      throws HiveAuthzPluginException {
-    // TODO we will support in future
-    throw new HiveAuthzPluginException("Not supported of SHOW_ROLE_PRINCIPALS 
in Sentry");
-  }
-
-  @Override
-  public List<HiveRoleGrant> getRoleGrantInfoForPrincipal(HivePrincipal 
principal)
-      throws HiveAccessControlException, HiveAuthzPluginException {
-    List<HiveRoleGrant> hiveRoleGrants = new ArrayList<HiveRoleGrant>();
-    try {
-      sentryClient = getSentryClient();
-      Set<TSentryRole> roles = null;
-      if (principal.getType() == HivePrincipalType.GROUP) {
-        roles = sentryClient.listRolesByGroupName(authenticator.getUserName(), 
principal.getName());
-      } else if (principal.getType() == HivePrincipalType.USER) {
-        roles = sentryClient.listRolesByUserName(authenticator.getUserName(), 
principal.getName());
-      } else {
-        String msg =
-            SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + 
principal.getType();
-        throw new HiveAuthzPluginException(msg);
-      }
-      if (roles != null && !roles.isEmpty()) {
-        for (TSentryRole role : roles) {
-          hiveRoleGrants.add(SentryAuthorizerUtil.convert2HiveRoleGrant(role));
-        }
-      }
-    } catch (SentryAccessDeniedException e) {
-      HiveOperation hiveOp = HiveOperation.SHOW_ROLE_GRANT;
-      executeOnFailureHooks(hiveOp, e);
-    } catch (SentryUserException e) {
-      String msg = "Error when sentryClient listRolesByGroupName: " + 
e.getMessage();
-      executeOnErrorHooks(msg, e);
-    } finally {
-      closeClient();
-    }
-    return hiveRoleGrants;
-  }
-
-  @Override
-  public void applyAuthorizationConfigPolicy(HiveConf hiveConf) throws 
HiveAuthzPluginException {
-    // Apply rest of the configuration only to HiveServer2
-    if (ctx.getClientType() != CLIENT_TYPE.HIVESERVER2
-        || !hiveConf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
-      throw new HiveAuthzPluginException("Sentry only supports hiveserver2");
-    }
-  }
-
-  /**
-   * Grant(isGrant is true) or revoke(isGrant is false) db privileges to/from 
role via sentryClient,
-   * which is a instance of SentryPolicyServiceClientV2
-   *
-   * @param hivePrincipals
-   * @param hivePrivileges
-   * @param hivePrivObject
-   * @param grantOption
-   * @param isGrant
-   */
-  private void grantOrRevokePrivlegeOnRole(List<HivePrincipal> hivePrincipals,
-      List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject, 
boolean grantOption,
-      boolean isGrant) throws HiveAuthzPluginException, 
HiveAccessControlException {
-    try {
-      sentryClient = getSentryClient();
-
-      for (HivePrincipal principal : hivePrincipals) {
-        // Sentry only support grant privilege to ROLE
-        if (principal.getType() != HivePrincipalType.ROLE) {
-          String msg =
-              SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + 
principal.getType();
-          throw new HiveAuthzPluginException(msg);
-        }
-        for (HivePrivilege privilege : hivePrivileges) {
-          String grantorName = authenticator.getUserName();
-          String roleName = principal.getName();
-          String action = SentryAuthorizerUtil.convert2SentryAction(privilege);
-          List<String> columnNames = privilege.getColumns();
-          Boolean grantOp = null;
-          if (isGrant) {
-            grantOp = grantOption;
-          }
-
-          switch (hivePrivObject.getType()) {
-            case GLOBAL:
-              if (isGrant) {
-                sentryClient.grantServerPrivilege(grantorName, roleName,
-                    hivePrivObject.getObjectName(), action, grantOp);
-              } else {
-                sentryClient.revokeServerPrivilege(grantorName, roleName,
-                    hivePrivObject.getObjectName(), action, grantOp);
-              }
-              break;
-            case DATABASE:
-              if (isGrant) {
-                sentryClient.grantDatabasePrivilege(grantorName, roleName, 
serverName,
-                    hivePrivObject.getDbname(), action, grantOp);
-              } else {
-                sentryClient.revokeDatabasePrivilege(grantorName, roleName, 
serverName,
-                    hivePrivObject.getDbname(), action, grantOp);
-              }
-              break;
-            case TABLE_OR_VIEW:
-              // For column level security
-              if (columnNames != null && !columnNames.isEmpty()) {
-                if (action.equalsIgnoreCase(AccessConstants.INSERT)
-                    || action.equalsIgnoreCase(AccessConstants.ALL)) {
-                  String msg =
-                      SentryHiveConstants.PRIVILEGE_NOT_SUPPORTED + 
privilege.getName()
-                          + " on Column";
-                  throw new HiveAuthzPluginException(msg);
-                }
-                if (isGrant) {
-                  sentryClient.grantColumnsPrivileges(grantorName, roleName, 
serverName,
-                      hivePrivObject.getDbname(), 
hivePrivObject.getObjectName(), columnNames,
-                      action, grantOp);
-                } else {
-                  sentryClient.revokeColumnsPrivilege(grantorName, roleName, 
serverName,
-                      hivePrivObject.getDbname(), 
hivePrivObject.getObjectName(), columnNames,
-                      action, grantOp);
-                }
-              } else {
-                if (isGrant) {
-                  sentryClient.grantTablePrivilege(grantorName, roleName, 
serverName,
-                      hivePrivObject.getDbname(), 
hivePrivObject.getObjectName(), action, grantOp);
-                } else {
-                  sentryClient.revokeTablePrivilege(grantorName, roleName, 
serverName,
-                      hivePrivObject.getDbname(), 
hivePrivObject.getObjectName(), action, grantOp);
-                }
-              }
-              break;
-            case LOCAL_URI:
-            case DFS_URI:
-              String uRIString = hivePrivObject.getObjectName().replace("'", 
"").replace("\"", "");
-              if (isGrant) {
-                sentryClient.grantURIPrivilege(grantorName, roleName, 
serverName,
-                    uRIString, grantOp);
-              } else {
-                sentryClient.revokeURIPrivilege(grantorName, roleName, 
serverName,
-                    uRIString, grantOp);
-              }
-              break;
-            case FUNCTION:
-            case PARTITION:
-            case COLUMN:
-            case COMMAND_PARAMS:
-              // not support these type
-              throw new 
HiveAuthzPluginException(hivePrivObject.getType().name()
-                  + " are not supported in sentry");
-            default:
-              break;
-          }
-        }
-      }
-    } catch (SentryAccessDeniedException e) {
-      HiveOperation hiveOp =
-          isGrant ? HiveOperation.GRANT_PRIVILEGE : 
HiveOperation.REVOKE_PRIVILEGE;
-      executeOnFailureHooks(hiveOp, e);
-    } catch (SentryUserException e) {
-      String msg = "Error when sentryClient grant/revoke privilege:" + 
e.getMessage();
-      executeOnErrorHooks(msg, e);
-    } finally {
-      closeClient();
-    }
-  }
-  /**
-   * Grant(isGrant is true) or revoke(isGrant is false) role to/from group via 
sentryClient, which
-   * is a instance of SentryPolicyServiceClientV2
-   *
-   * @param hivePrincipals
-   * @param roles
-   * @param grantorPrinc
-   * @param isGrant
-   */
-  private void grantOrRevokeRoleOnGroup(List<HivePrincipal> hivePrincipals, 
List<String> roles,
-      HivePrincipal grantorPrinc, boolean isGrant) throws 
HiveAuthzPluginException,
-      HiveAccessControlException {
-    try {
-      sentryClient = getSentryClient();
-      // get principals
-      Set<String> groups = Sets.newHashSet();
-      Set<String> users = Sets.newHashSet();
-      for (HivePrincipal principal : hivePrincipals) {
-        if (principal.getType() == HivePrincipalType.GROUP) {
-          groups.add(principal.getName());
-        } else if (principal.getType() == HivePrincipalType.USER) {
-          users.add(principal.getName());
-        } else {
-          String msg =
-              SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + 
principal.getType();
-          throw new HiveAuthzPluginException(msg);
-
-        }
-      }
-
-      // grant/revoke role to/from principals
-      for (String roleName : roles) {
-        if (isGrant) {
-          if (groups.size() > 0) {
-            sentryClient.grantRoleToGroups(grantorPrinc.getName(), roleName, 
groups);
-          }
-          if (users.size() > 0) {
-            sentryClient.grantRoleToUsers(grantorPrinc.getName(), roleName, 
users);
-          }
-        } else {
-          if (groups.size() > 0) {
-            sentryClient.revokeRoleFromGroups(grantorPrinc.getName(), 
roleName, groups);
-          }
-          if (users.size() > 0) {
-            sentryClient.revokeRoleFromUsers(grantorPrinc.getName(), roleName, 
users);
-          }
-        }
-      }
-
-    } catch (SentryAccessDeniedException e) {
-      HiveOperation hiveOp = isGrant ? HiveOperation.GRANT_ROLE : 
HiveOperation.REVOKE_ROLE;
-      executeOnFailureHooks(hiveOp, e);
-    } catch (SentryUserException e) {
-      String msg = "Error when sentryClient grant/revoke role:" + 
e.getMessage();
-      executeOnErrorHooks(msg, e);
-    } finally {
-      closeClient();
-    }
-  }
-
-  private void executeOnFailureHooks(HiveOperation hiveOp, 
SentryAccessDeniedException e)
-      throws HiveAccessControlException {
-
-    // With Hive 2.x cmd information is not available from SessionState. More 
over cmd information
-    // is not used in SentryOnFailureHookContextImpl. If this information is 
really needed an issue
-    // should be raised with  Hive community to update HiveAccessController 
interface to pass
-    // HiveSemanticAnalyzerHookContext, which has cmd information. For now, 
empty string is used for
-    // cmd.
-    SentryOnFailureHookContext hookCtx =
-        new SentryOnFailureHookContextImpl("", null, null, hiveOp, null,
-            null, null, null, authenticator.getUserName(), null, new 
AuthorizationException(e),
-            authzConf);
-    SentryAuthorizerUtil.executeOnFailureHooks(hookCtx, authzConf);
-    throw new HiveAccessControlException(e.getMessage(), e);
-  }
-
-  private void executeOnErrorHooks(String msg, Exception e) throws 
HiveAuthzPluginException {
-    LOG.error(msg, e);
-    throw new HiveAuthzPluginException(msg, e);
-  }
-
-  private List<String> convert2RoleList(Set<TSentryRole> roleSet) {
-    List<String> roles = new ArrayList<String>();
-    if (roleSet != null && !roleSet.isEmpty()) {
-      for (TSentryRole tRole : roleSet) {
-        roles.add(tRole.getRoleName());
-      }
-    }
-    return roles;
-  }
-
-  private SentryPolicyServiceClient getSentryClient() throws 
HiveAuthzPluginException {
-    try {
-      Preconditions.checkNotNull(authzConf, "HiveAuthConf cannot be null");
-      return SentryServiceClientFactory.create(authzConf);
-    } catch (Exception e) {
-      String msg = "Error occurred when creating Sentry client: " + 
e.getMessage();
-      throw new HiveAuthzPluginException(msg, e);
-    }
-  }
-  private void closeClient() {
-    if (sentryClient != null) {
-      try {
-        sentryClient.close();
-      } catch (Exception e) {
-        LOG.error("Error while closing the connection with sentry server", e);
-      }
-    }
-  }
-
-}

Reply via email to