http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java
 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java
index c425e06..d752b25 100644
--- 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java
+++ 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java
@@ -16,41 +16,22 @@
  */
 package org.apache.sentry.binding.hive;
 
-import static 
org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
-
 import java.io.Serializable;
 import java.net.MalformedURLException;
-import java.net.URI;
 import java.net.URL;
 import java.security.CodeSource;
-import java.util.ArrayList;
-import java.util.EnumSet;
 import java.util.List;
 import java.util.Set;
-import java.util.Arrays;
 
-import com.google.common.base.Preconditions;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.exec.DDLTask;
-import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.SentryFilterDDLTask;
 import org.apache.hadoop.hive.ql.exec.SentryGrantRevokeTask;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.hooks.Entity;
-import org.apache.hadoop.hive.ql.hooks.Entity.Type;
-import org.apache.hadoop.hive.ql.hooks.Hook;
-import org.apache.hadoop.hive.ql.hooks.ReadEntity;
-import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
@@ -58,52 +39,20 @@ import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.DDLWork;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
 import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges;
-import 
org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationScope;
-import 
org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationType;
 import org.apache.sentry.binding.hive.authz.HiveAuthzPrivilegesMap;
 import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
 import org.apache.sentry.core.common.Subject;
-import org.apache.sentry.core.common.utils.PathUtils;
-import org.apache.sentry.core.model.db.AccessURI;
-import org.apache.sentry.core.model.db.Column;
-import org.apache.sentry.core.model.db.DBModelAction;
-import org.apache.sentry.core.model.db.DBModelAuthorizable;
-import org.apache.sentry.core.model.db.DBModelAuthorizable.AuthorizableType;
 import org.apache.sentry.core.model.db.Database;
 import org.apache.sentry.core.model.db.Table;
-import org.apache.sentry.provider.cache.PrivilegeCache;
-import org.apache.sentry.provider.cache.SimplePrivilegeCache;
-import org.apache.sentry.provider.common.AuthorizationProvider;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Splitter;
-import com.google.common.collect.ImmutableList;
+import com.google.common.base.Preconditions;
 
-public class HiveAuthzBindingHook extends AbstractSemanticAnalyzerHook {
+public class HiveAuthzBindingHook extends HiveAuthzBindingHookBase {
   private static final Logger LOG = LoggerFactory
       .getLogger(HiveAuthzBindingHook.class);
-  private final HiveAuthzBinding hiveAuthzBinding;
-  private final HiveAuthzConf authzConf;
-  private Database currDB = Database.ALL;
-  private Table currTab;
-  private AccessURI udfURI;
-  private AccessURI serdeURI;
-  private AccessURI partitionURI;
-  private Table currOutTab = null;
-  private Database currOutDB = null;
-  private final List<String> serdeWhiteList;
-  private boolean serdeURIPrivilegesEnabled;
-
-  // True if this is a basic DESCRIBE <table> operation. False for other 
DESCRIBE variants
-  // like DESCRIBE [FORMATTED|EXTENDED]. Required because Hive treats these 
stmts as the same
-  // HiveOperationType, but we want to enforces different privileges on each 
statement.
-  // Basic DESCRIBE <table> is allowed with only column-level privs, while the 
variants
-  // require table-level privileges.
-  public boolean isDescTableBasic = false;
 
   public HiveAuthzBindingHook() throws Exception {
     SessionState session = SessionState.get();
@@ -117,16 +66,7 @@ public class HiveAuthzBindingHook extends 
AbstractSemanticAnalyzerHook {
     if(hiveConf == null) {
       throw new IllegalStateException("Session HiveConf is null");
     }
-    authzConf = loadAuthzConf(hiveConf);
-    hiveAuthzBinding = new HiveAuthzBinding(hiveConf, authzConf);
-
-    String serdeWhiteLists = 
authzConf.get(HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST,
-        HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST_DEFAULT);
-    serdeWhiteList = Arrays.asList(serdeWhiteLists.split(","));
-    serdeURIPrivilegesEnabled = 
authzConf.getBoolean(HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED,
-        HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED_DEFAULT);
 
-    FunctionRegistry.setupPermissionsForBuiltinUDFs("", 
HiveAuthzConf.HIVE_UDF_BLACK_LIST);
   }
 
   public static HiveAuthzConf loadAuthzConf(HiveConf hiveConf) {
@@ -324,64 +264,6 @@ public class HiveAuthzBindingHook extends 
AbstractSemanticAnalyzerHook {
     return ast;
   }
 
-  // Find the current database for session
-  private Database getCanonicalDb() {
-    return new Database(SessionState.get().getCurrentDatabase());
-  }
-
-  private Database extractDatabase(ASTNode ast) throws SemanticException {
-    String tableName = BaseSemanticAnalyzer.getUnescapedName(ast);
-    if (tableName.contains(".")) {
-      return new Database(tableName.split("\\.")[0]);
-    } else {
-      return getCanonicalDb();
-    }
-  }
-  private Table extractTable(ASTNode ast) throws SemanticException {
-    String tableName = BaseSemanticAnalyzer.getUnescapedName(ast);
-    if (tableName.contains(".")) {
-      return new Table(tableName.split("\\.")[1]);
-    } else {
-      return new Table(tableName);
-    }
-  }
-
-  @VisibleForTesting
-  protected static AccessURI extractPartition(ASTNode ast) throws 
SemanticException {
-    for (int i = 0; i < ast.getChildCount(); i++) {
-      ASTNode child = (ASTNode)ast.getChild(i);
-      if (child.getToken().getType() == HiveParser.TOK_PARTITIONLOCATION &&
-          child.getChildCount() == 1) {
-        return parseURI(BaseSemanticAnalyzer.
-          unescapeSQLString(child.getChild(0).getText()));
-      }
-    }
-    return null;
-  }
-
-  @VisibleForTesting
-  protected static AccessURI parseURI(String uri) throws SemanticException {
-    return parseURI(uri, false);
-  }
-
-  @VisibleForTesting
-  protected static AccessURI parseURI(String uri, boolean isLocal)
-      throws SemanticException {
-    try {
-      HiveConf conf = SessionState.get().getConf();
-      String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE);
-      Path warehousePath = new Path(warehouseDir);
-      if (warehousePath.isAbsoluteAndSchemeAuthorityNull()) {
-        FileSystem fs = FileSystem.get(conf);
-        warehouseDir = fs.makeQualified(warehousePath).toUri().toString();
-      }
-      return new AccessURI(PathUtils.parseURI(warehouseDir, uri, isLocal));
-    } catch (Exception e) {
-      throw new SemanticException("Error parsing URI " + uri + ": " +
-        e.getMessage(), e);
-    }
-  }
-
   /**
    * Post analyze hook that invokes hive auth bindings
    */
@@ -459,598 +341,4 @@ public class HiveAuthzBindingHook extends 
AbstractSemanticAnalyzerHook {
     }
   }
 
-  private void executeOnFailureHooks(HiveSemanticAnalyzerHookContext context,
-      HiveOperation hiveOp, AuthorizationException e) {
-    SentryOnFailureHookContext hookCtx = new SentryOnFailureHookContextImpl(
-        context.getCommand(), context.getInputs(), context.getOutputs(),
-        hiveOp, currDB, currTab, udfURI, null, context.getUserName(),
-        context.getIpAddress(), e, context.getConf());
-    String csHooks = authzConf.get(
-        HiveAuthzConf.AuthzConfVars.AUTHZ_ONFAILURE_HOOKS.getVar(), "").trim();
-
-    try {
-      for (Hook aofh : getHooks(csHooks)) {
-        ((SentryOnFailureHook)aofh).run(hookCtx);
-      }
-    } catch (Exception ex) {
-      LOG.error("Error executing hook:", ex);
-    }
-  }
-
-  public static void runFailureHook(SentryOnFailureHookContext hookContext,
-      String csHooks) {
-    try {
-      for (Hook aofh : getHooks(csHooks)) {
-        ((SentryOnFailureHook) aofh).run(hookContext);
-      }
-    } catch (Exception ex) {
-      LOG.error("Error executing hook:", ex);
-    }
-  }
-  /**
-   * Convert the input/output entities into authorizables. generate
-   * authorizables for cases like Database and metadata operations where the
-   * compiler doesn't capture entities. invoke the hive binding to validate
-   * permissions
-   *
-   * @param context
-   * @param stmtAuthObject
-   * @param stmtOperation
-   * @throws AuthorizationException
-   */
-  private void authorizeWithHiveBindings(HiveSemanticAnalyzerHookContext 
context,
-      HiveAuthzPrivileges stmtAuthObject, HiveOperation stmtOperation) throws  
AuthorizationException {
-    Set<ReadEntity> inputs = context.getInputs();
-    Set<WriteEntity> outputs = context.getOutputs();
-    List<List<DBModelAuthorizable>> inputHierarchy = new 
ArrayList<List<DBModelAuthorizable>>();
-    List<List<DBModelAuthorizable>> outputHierarchy = new 
ArrayList<List<DBModelAuthorizable>>();
-
-    if(LOG.isDebugEnabled()) {
-      LOG.debug("stmtAuthObject.getOperationScope() = " + 
stmtAuthObject.getOperationScope());
-      LOG.debug("context.getInputs() = " + context.getInputs());
-      LOG.debug("context.getOutputs() = " + context.getOutputs());
-    }
-
-    // Workaround to allow DESCRIBE <table> to be executed with only 
column-level privileges, while
-    // still authorizing DESCRIBE [EXTENDED|FORMATTED] as table-level.
-    // This is done by treating DESCRIBE <table> the same as SHOW COLUMNS, 
which only requires column
-    // level privs.
-    if (isDescTableBasic) {
-      stmtAuthObject = 
HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(HiveOperation.SHOWCOLUMNS);
-    }
-
-    switch (stmtAuthObject.getOperationScope()) {
-
-    case SERVER :
-      // validate server level privileges if applicable. Eg create 
UDF,register jar etc ..
-      List<DBModelAuthorizable> serverHierarchy = new 
ArrayList<DBModelAuthorizable>();
-      serverHierarchy.add(hiveAuthzBinding.getAuthServer());
-      inputHierarchy.add(serverHierarchy);
-      break;
-    case DATABASE:
-      // workaround for database scope statements (create/alter/drop db)
-      List<DBModelAuthorizable> dbHierarchy = new 
ArrayList<DBModelAuthorizable>();
-      dbHierarchy.add(hiveAuthzBinding.getAuthServer());
-      dbHierarchy.add(currDB);
-      inputHierarchy.add(dbHierarchy);
-      outputHierarchy.add(dbHierarchy);
-
-      getInputHierarchyFromInputs(inputHierarchy, inputs);
-
-      if (serdeURI != null) {
-        List<DBModelAuthorizable> serdeUriHierarchy = new 
ArrayList<DBModelAuthorizable>();
-        serdeUriHierarchy.add(hiveAuthzBinding.getAuthServer());
-        serdeUriHierarchy.add(serdeURI);
-        outputHierarchy.add(serdeUriHierarchy);
-      }
-      break;
-    case TABLE:
-      // workaround for add partitions
-      if(partitionURI != null) {
-        inputHierarchy.add(ImmutableList.of(hiveAuthzBinding.getAuthServer(), 
partitionURI));
-      }
-
-      getInputHierarchyFromInputs(inputHierarchy, inputs);
-      for (WriteEntity writeEntity: outputs) {
-        if (filterWriteEntity(writeEntity)) {
-          continue;
-        }
-        List<DBModelAuthorizable> entityHierarchy = new 
ArrayList<DBModelAuthorizable>();
-        entityHierarchy.add(hiveAuthzBinding.getAuthServer());
-        entityHierarchy.addAll(getAuthzHierarchyFromEntity(writeEntity));
-        outputHierarchy.add(entityHierarchy);
-      }
-      // workaround for metadata queries.
-      // Capture the table name in pre-analyze and include that in the input 
entity list
-      if (currTab != null) {
-        List<DBModelAuthorizable> externalAuthorizableHierarchy = new 
ArrayList<DBModelAuthorizable>();
-        externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
-        externalAuthorizableHierarchy.add(currDB);
-        externalAuthorizableHierarchy.add(currTab);
-        inputHierarchy.add(externalAuthorizableHierarchy);
-      }
-
-
-
-      // workaround for DDL statements
-      // Capture the table name in pre-analyze and include that in the output 
entity list
-      if (currOutTab != null) {
-        List<DBModelAuthorizable> externalAuthorizableHierarchy = new 
ArrayList<DBModelAuthorizable>();
-        externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
-        externalAuthorizableHierarchy.add(currOutDB);
-        externalAuthorizableHierarchy.add(currOutTab);
-        outputHierarchy.add(externalAuthorizableHierarchy);
-      }
-
-      if (serdeURI != null) {
-        List<DBModelAuthorizable> serdeUriHierarchy = new 
ArrayList<DBModelAuthorizable>();
-        serdeUriHierarchy.add(hiveAuthzBinding.getAuthServer());
-        serdeUriHierarchy.add(serdeURI);
-        outputHierarchy.add(serdeUriHierarchy);
-      }
-
-      break;
-    case FUNCTION:
-      /* The 'FUNCTION' privilege scope currently used for
-       *  - CREATE TEMP FUNCTION
-       *  - DROP TEMP FUNCTION.
-       */
-      if (udfURI != null) {
-        List<DBModelAuthorizable> udfUriHierarchy = new 
ArrayList<DBModelAuthorizable>();
-        udfUriHierarchy.add(hiveAuthzBinding.getAuthServer());
-        udfUriHierarchy.add(udfURI);
-        inputHierarchy.add(udfUriHierarchy);
-        for (WriteEntity writeEntity : outputs) {
-          List<DBModelAuthorizable> entityHierarchy = new 
ArrayList<DBModelAuthorizable>();
-          entityHierarchy.add(hiveAuthzBinding.getAuthServer());
-          entityHierarchy.addAll(getAuthzHierarchyFromEntity(writeEntity));
-          outputHierarchy.add(entityHierarchy);
-        }
-      }
-      break;
-    case CONNECT:
-      /* The 'CONNECT' is an implicit privilege scope currently used for
-       *  - USE <db>
-       *  It's allowed when the user has any privilege on the current 
database. For application
-       *  backward compatibility, we allow (optional) implicit connect 
permission on 'default' db.
-       */
-      List<DBModelAuthorizable> connectHierarchy = new 
ArrayList<DBModelAuthorizable>();
-      connectHierarchy.add(hiveAuthzBinding.getAuthServer());
-      // by default allow connect access to default db
-      Table currTbl = Table.ALL;
-      Column currCol = Column.ALL;
-      if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(currDB.getName()) &&
-          "false".equalsIgnoreCase(authzConf.
-              
get(HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), "false"))) {
-        currDB = Database.ALL;
-        currTbl = Table.SOME;
-      }
-
-      connectHierarchy.add(currDB);
-      connectHierarchy.add(currTbl);
-      connectHierarchy.add(currCol);
-
-      inputHierarchy.add(connectHierarchy);
-      outputHierarchy.add(connectHierarchy);
-      break;
-    case COLUMN:
-      for (ReadEntity readEntity: inputs) {
-        if (readEntity.getAccessedColumns() != null && 
!readEntity.getAccessedColumns().isEmpty()) {
-          addColumnHierarchy(inputHierarchy, readEntity);
-        } else {
-          List<DBModelAuthorizable> entityHierarchy = new 
ArrayList<DBModelAuthorizable>();
-          entityHierarchy.add(hiveAuthzBinding.getAuthServer());
-          entityHierarchy.addAll(getAuthzHierarchyFromEntity(readEntity));
-          entityHierarchy.add(Column.ALL);
-          inputHierarchy.add(entityHierarchy);
-        }
-      }
-      break;
-    default:
-      throw new AuthorizationException("Unknown operation scope type " +
-          stmtAuthObject.getOperationScope().toString());
-    }
-
-    HiveAuthzBinding binding = null;
-    try {
-      binding = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, 
context.getUserName());
-    } catch (SemanticException e) {
-      // Will use the original hiveAuthzBinding
-      binding = hiveAuthzBinding;
-    }
-    // validate permission
-    binding.authorize(stmtOperation, stmtAuthObject, 
getCurrentSubject(context), inputHierarchy,
-        outputHierarchy);
-  }
-
-  private HiveOperation getCurrentHiveStmtOp() {
-    SessionState sessState = SessionState.get();
-    if (sessState == null) {
-      // TODO: Warn
-      return null;
-    }
-    return sessState.getHiveOperation();
-  }
-
-  private Subject getCurrentSubject(HiveSemanticAnalyzerHookContext context) {
-    // Extract the username from the hook context
-    return new Subject(context.getUserName());
-  }
-
-  // Build the hierarchy of authorizable object for the given entity type.
-  private List<DBModelAuthorizable> getAuthzHierarchyFromEntity(Entity entity) 
{
-    List<DBModelAuthorizable> objectHierarchy = new 
ArrayList<DBModelAuthorizable>();
-    switch (entity.getType()) {
-    case TABLE:
-      objectHierarchy.add(new Database(entity.getTable().getDbName()));
-      objectHierarchy.add(new Table(entity.getTable().getTableName()));
-      break;
-    case PARTITION:
-    case DUMMYPARTITION:
-      objectHierarchy.add(new 
Database(entity.getPartition().getTable().getDbName()));
-      objectHierarchy.add(new 
Table(entity.getPartition().getTable().getTableName()));
-      break;
-    case DFS_DIR:
-    case LOCAL_DIR:
-      try {
-        objectHierarchy.add(parseURI(entity.toString(),
-            entity.getType().equals(Entity.Type.LOCAL_DIR)));
-      } catch (Exception e) {
-        throw new AuthorizationException("Failed to get File URI", e);
-      }
-      break;
-    case DATABASE:
-    case FUNCTION:
-      // TODO use database entities from compiler instead of capturing from AST
-      break;
-    default:
-      throw new UnsupportedOperationException("Unsupported entity type " +
-          entity.getType().name());
-    }
-    return objectHierarchy;
-  }
-
-  /**
-   * Add column level hierarchy to inputHierarchy
-   *
-   * @param inputHierarchy
-   * @param entity
-   * @param sentryContext
-   */
-  private void addColumnHierarchy(List<List<DBModelAuthorizable>> 
inputHierarchy,
-      ReadEntity entity) {
-    List<DBModelAuthorizable> entityHierarchy = new 
ArrayList<DBModelAuthorizable>();
-    entityHierarchy.add(hiveAuthzBinding.getAuthServer());
-    entityHierarchy.addAll(getAuthzHierarchyFromEntity(entity));
-
-    switch (entity.getType()) {
-    case TABLE:
-    case PARTITION:
-      List<String> cols = entity.getAccessedColumns();
-      for (String col : cols) {
-        List<DBModelAuthorizable> colHierarchy = new 
ArrayList<DBModelAuthorizable>(entityHierarchy);
-        colHierarchy.add(new Column(col));
-        inputHierarchy.add(colHierarchy);
-      }
-      break;
-    default:
-      inputHierarchy.add(entityHierarchy);
-    }
-  }
-
-  /**
-   * Get Authorizable from inputs and put into inputHierarchy
-   *
-   * @param inputHierarchy
-   * @param entity
-   * @param sentryContext
-   */
-  private void getInputHierarchyFromInputs(List<List<DBModelAuthorizable>> 
inputHierarchy,
-      Set<ReadEntity> inputs) {
-    for (ReadEntity readEntity: inputs) {
-      // skip the tables/view that are part of expanded view definition
-      // skip the Hive generated dummy entities created for queries like 
'select <expr>'
-      if (isChildTabForView(readEntity) || isDummyEntity(readEntity)) {
-        continue;
-      }
-      if (readEntity.getAccessedColumns() != null && 
!readEntity.getAccessedColumns().isEmpty()) {
-        addColumnHierarchy(inputHierarchy, readEntity);
-      } else {
-        List<DBModelAuthorizable> entityHierarchy = new 
ArrayList<DBModelAuthorizable>();
-        entityHierarchy.add(hiveAuthzBinding.getAuthServer());
-        entityHierarchy.addAll(getAuthzHierarchyFromEntity(readEntity));
-        inputHierarchy.add(entityHierarchy);
-      }
-    }
-  }
-
-  // Check if this write entity needs to skipped
-  private boolean filterWriteEntity(WriteEntity writeEntity)
-      throws AuthorizationException {
-    // skip URI validation for session scratch file URIs
-    if (writeEntity.isTempURI()) {
-      return true;
-    }
-    try {
-      if (writeEntity.getTyp().equals(Type.DFS_DIR)
-          || writeEntity.getTyp().equals(Type.LOCAL_DIR)) {
-        HiveConf conf = SessionState.get().getConf();
-        String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE);
-        URI scratchURI = new URI(PathUtils.parseDFSURI(warehouseDir,
-          conf.getVar(HiveConf.ConfVars.SCRATCHDIR)));
-        URI requestURI = new URI(PathUtils.parseDFSURI(warehouseDir,
-          writeEntity.getLocation().getPath()));
-        LOG.debug("scratchURI = " + scratchURI + ", requestURI = " + 
requestURI);
-        if (PathUtils.impliesURI(scratchURI, requestURI)) {
-          return true;
-        }
-        URI localScratchURI = new 
URI(PathUtils.parseLocalURI(conf.getVar(HiveConf.ConfVars.LOCALSCRATCHDIR)));
-        URI localRequestURI = new 
URI(PathUtils.parseLocalURI(writeEntity.getLocation().getPath()));
-        LOG.debug("localScratchURI = " + localScratchURI + ", localRequestURI 
= " + localRequestURI);
-        if (PathUtils.impliesURI(localScratchURI, localRequestURI)) {
-          return true;
-        }
-      }
-    } catch (Exception e) {
-      throw new AuthorizationException("Failed to extract uri details", e);
-    }
-    return false;
-  }
-
-  public static List<String> filterShowTables(
-      HiveAuthzBinding hiveAuthzBinding, List<String> queryResult,
-      HiveOperation operation, String userName, String dbName)
-          throws SemanticException {
-    List<String> filteredResult = new ArrayList<String>();
-    Subject subject = new Subject(userName);
-    HiveAuthzPrivileges tableMetaDataPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Column, 
EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT)).
-        setOperationScope(HiveOperationScope.TABLE).
-        setOperationType(HiveOperationType.INFO).
-        build();
-
-    HiveAuthzBinding hiveBindingWithPrivilegeCache = 
getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName);
-
-    for (String tableName : queryResult) {
-      // if user has privileges on table, add to filtered list, else discard
-      Table table = new Table(tableName);
-      Database database;
-      database = new Database(dbName);
-
-      List<List<DBModelAuthorizable>> inputHierarchy = new 
ArrayList<List<DBModelAuthorizable>>();
-      List<List<DBModelAuthorizable>> outputHierarchy = new 
ArrayList<List<DBModelAuthorizable>>();
-      List<DBModelAuthorizable> externalAuthorizableHierarchy = new 
ArrayList<DBModelAuthorizable>();
-      externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
-      externalAuthorizableHierarchy.add(database);
-      externalAuthorizableHierarchy.add(table);
-      externalAuthorizableHierarchy.add(Column.ALL);
-      inputHierarchy.add(externalAuthorizableHierarchy);
-
-      try {
-        // do the authorization by new HiveAuthzBinding with PrivilegeCache
-        hiveBindingWithPrivilegeCache.authorize(operation, 
tableMetaDataPrivilege, subject,
-            inputHierarchy, outputHierarchy);
-        filteredResult.add(table.getName());
-      } catch (AuthorizationException e) {
-        // squash the exception, user doesn't have privileges, so the table is
-        // not added to
-        // filtered list.
-      }
-    }
-    return filteredResult;
-  }
-
-  public static List<FieldSchema> filterShowColumns(
-      HiveAuthzBinding hiveAuthzBinding, List<FieldSchema> cols,
-      HiveOperation operation, String userName, String tableName, String 
dbName)
-          throws SemanticException {
-    List<FieldSchema> filteredResult = new ArrayList<FieldSchema>();
-    Subject subject = new Subject(userName);
-    HiveAuthzPrivileges columnMetaDataPrivilege =
-        
HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(HiveOperation.SHOWCOLUMNS);
-    HiveAuthzBinding hiveBindingWithPrivilegeCache = 
getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName);
-
-    Database database = new Database(dbName);
-    Table table = new Table(tableName);
-    for (FieldSchema col : cols) {
-      // if user has privileges on column, add to filtered list, else discard
-      List<List<DBModelAuthorizable>> inputHierarchy = new 
ArrayList<List<DBModelAuthorizable>>();
-      List<List<DBModelAuthorizable>> outputHierarchy = new 
ArrayList<List<DBModelAuthorizable>>();
-      List<DBModelAuthorizable> externalAuthorizableHierarchy = new 
ArrayList<DBModelAuthorizable>();
-      externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
-      externalAuthorizableHierarchy.add(database);
-      externalAuthorizableHierarchy.add(table);
-      externalAuthorizableHierarchy.add(new Column(col.getName()));
-      inputHierarchy.add(externalAuthorizableHierarchy);
-
-      try {
-        // do the authorization by new HiveAuthzBinding with PrivilegeCache
-        hiveBindingWithPrivilegeCache.authorize(operation, 
columnMetaDataPrivilege, subject,
-            inputHierarchy, outputHierarchy);
-        filteredResult.add(col);
-      } catch (AuthorizationException e) {
-        // squash the exception, user doesn't have privileges, so the column is
-        // not added to
-        // filtered list.
-      }
-    }
-    return filteredResult;
-  }
-
-  public static List<String> filterShowDatabases(
-      HiveAuthzBinding hiveAuthzBinding, List<String> queryResult,
-      HiveOperation operation, String userName) throws SemanticException {
-    List<String> filteredResult = new ArrayList<String>();
-    Subject subject = new Subject(userName);
-    HiveAuthzBinding hiveBindingWithPrivilegeCache = 
getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName);
-
-    HiveAuthzPrivileges anyPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Column, 
EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT)).
-        addInputObjectPriviledge(AuthorizableType.URI, 
EnumSet.of(DBModelAction.SELECT)).
-        setOperationScope(HiveOperationScope.CONNECT).
-        setOperationType(HiveOperationType.QUERY).
-        build();
-
-    for (String dbName:queryResult) {
-      // if user has privileges on database, add to filtered list, else discard
-      Database database = null;
-
-      // if default is not restricted, continue
-      if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(dbName) && 
"false".equalsIgnoreCase(
-        hiveAuthzBinding.getAuthzConf().get(
-              HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(),
-              "false"))) {
-        filteredResult.add(DEFAULT_DATABASE_NAME);
-        continue;
-      }
-
-      database = new Database(dbName);
-
-      List<List<DBModelAuthorizable>> inputHierarchy = new 
ArrayList<List<DBModelAuthorizable>>();
-      List<List<DBModelAuthorizable>> outputHierarchy = new 
ArrayList<List<DBModelAuthorizable>>();
-      List<DBModelAuthorizable> externalAuthorizableHierarchy = new 
ArrayList<DBModelAuthorizable>();
-      externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
-      externalAuthorizableHierarchy.add(database);
-      externalAuthorizableHierarchy.add(Table.ALL);
-      externalAuthorizableHierarchy.add(Column.ALL);
-      inputHierarchy.add(externalAuthorizableHierarchy);
-
-      try {
-        // do the authorization by new HiveAuthzBinding with PrivilegeCache
-        hiveBindingWithPrivilegeCache.authorize(operation, anyPrivilege, 
subject,
-            inputHierarchy, outputHierarchy);
-        filteredResult.add(database.getName());
-      } catch (AuthorizationException e) {
-        // squash the exception, user doesn't have privileges, so the table is
-        // not added to
-        // filtered list.
-      }
-    }
-
-    return filteredResult;
-  }
-
-  /**
-   * Check if the given read entity is a table that has parents of type Table
-   * Hive compiler performs a query rewrite by replacing view with its 
definition. In the process, tt captures both
-   * the original view and the tables/view that it selects from .
-   * The access authorization is only interested in the top level views and 
not the underlying tables.
-   * @param readEntity
-   * @return
-   */
-  private boolean isChildTabForView(ReadEntity readEntity) {
-    // If this is a table added for view, then we need to skip that
-    if (!readEntity.getType().equals(Type.TABLE) && 
!readEntity.getType().equals(Type.PARTITION)) {
-      return false;
-    }
-    if (readEntity.getParents() != null && readEntity.getParents().size() > 0) 
{
-      for (ReadEntity parentEntity : readEntity.getParents()) {
-        if (!parentEntity.getType().equals(Type.TABLE)) {
-          return false;
-        }
-      }
-      return true;
-    } else {
-      return false;
-    }
-  }
-
-  /**
-   * Returns the hooks specified in a configuration variable.  The hooks are 
returned in a list in
-   * the order they were specified in the configuration variable.
-   *
-   * @param hookConfVar The configuration variable specifying a comma 
separated list of the hook
-   *                    class names.
-   * @return            A list of the hooks, in the order they are listed in 
the value of hookConfVar
-   * @throws Exception
-   */
-  private static <T extends Hook> List<T> getHooks(String csHooks) throws 
Exception {
-
-    List<T> hooks = new ArrayList<T>();
-    if (csHooks.isEmpty()) {
-      return hooks;
-    }
-    for (String hookClass : 
Splitter.on(",").omitEmptyStrings().trimResults().split(csHooks)) {
-      try {
-        @SuppressWarnings("unchecked")
-        T hook =
-            (T) Class.forName(hookClass, true, 
JavaUtils.getClassLoader()).newInstance();
-        hooks.add(hook);
-      } catch (ClassNotFoundException e) {
-        LOG.error(hookClass + " Class not found:" + e.getMessage());
-        throw e;
-      }
-    }
-
-    return hooks;
-  }
-
-  // Check if the given entity is identified as dummy by Hive compilers.
-  private boolean isDummyEntity(Entity entity) {
-    return entity.isDummy();
-  }
-
-  // create hiveBinding with PrivilegeCache
-  private static HiveAuthzBinding 
getHiveBindingWithPrivilegeCache(HiveAuthzBinding hiveAuthzBinding,
-      String userName) throws SemanticException {
-    // get the original HiveAuthzBinding, and get the user's privileges by 
AuthorizationProvider
-    AuthorizationProvider authProvider = 
hiveAuthzBinding.getCurrentAuthProvider();
-    Set<String> userPrivileges = authProvider.getPolicyEngine().getPrivileges(
-            authProvider.getGroupMapping().getGroups(userName), 
hiveAuthzBinding.getActiveRoleSet(),
-            hiveAuthzBinding.getAuthServer());
-
-    // create PrivilegeCache using user's privileges
-    PrivilegeCache privilegeCache = new SimplePrivilegeCache(userPrivileges);
-    try {
-      // create new instance of HiveAuthzBinding whose backend provider should 
be SimpleCacheProviderBackend
-      return new HiveAuthzBinding(HiveAuthzBinding.HiveHook.HiveServer2, 
hiveAuthzBinding.getHiveConf(),
-              hiveAuthzBinding.getAuthzConf(), privilegeCache);
-    } catch (Exception e) {
-      LOG.error("Can not create HiveAuthzBinding with privilege cache.");
-      throw new SemanticException(e);
-    }
-  }
-
-  private static boolean hasPrefixMatch(List<String> prefixList, final String 
str) {
-    for (String prefix : prefixList) {
-      if (str.startsWith(prefix)) {
-        return true;
-      }
-    }
-
-    return false;
-  }
-
-  /**
-   * Set the Serde URI privileges. If the URI privileges are not set, which 
serdeURI will be null,
-   * the URI authorization checks will be skipped.
-   */
-  private void setSerdeURI(String serdeClassName) throws SemanticException {
-    if (!serdeURIPrivilegesEnabled) {
-      return;
-    }
-
-    // WhiteList Serde Jar can be used by any users. WhiteList checking is
-    // done by comparing the Java package name. The assumption is cluster
-    // admin will ensure there is no Java namespace collision.
-    // e.g org.apache.hadoop.hive.serde2 is used by hive and cluster admin 
should
-    // ensure no custom Serde class is introduced under the same namespace.
-    if (!hasPrefixMatch(serdeWhiteList, serdeClassName)) {
-      try {
-        CodeSource serdeSrc = Class.forName(serdeClassName, true, 
Utilities.getSessionSpecifiedClassLoader()).getProtectionDomain().getCodeSource();
-        if (serdeSrc == null) {
-          throw new SemanticException("Could not resolve the jar for Serde 
class " + serdeClassName);
-        }
-
-        String serdeJar = serdeSrc.getLocation().getPath();
-        if (serdeJar == null || serdeJar.isEmpty()) {
-          throw new SemanticException("Could not find the jar for Serde class 
" + serdeClassName + "to validate privileges");
-        }
-
-        serdeURI = parseURI(serdeSrc.getLocation().toString(), true);
-      } catch (ClassNotFoundException e) {
-        throw new SemanticException("Error retrieving Serde class:" + 
e.getMessage(), e);
-      }
-    }
-  }
 }

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java
 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java
index 17b9003..6d9150f 100644
--- 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java
+++ 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java
@@ -104,7 +104,7 @@ public class HiveAuthzBindingSessionHook
 
     appendConfVar(sessionConf, ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
         SEMANTIC_HOOK);
-    HiveAuthzConf authzConf = HiveAuthzBindingHook.loadAuthzConf(sessionConf);
+    HiveAuthzConf authzConf = 
HiveAuthzBindingHookBase.loadAuthzConf(sessionConf);
     String commandWhitelist =
         authzConf.get(HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST,
             HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST_DEFAULT);

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java
 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java
deleted file mode 100644
index 45747df..0000000
--- 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java
+++ /dev/null
@@ -1,161 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.binding.hive;
-
-import java.io.File;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.sentry.policy.common.PolicyConstants;
-import org.apache.sentry.provider.common.PolicyFileConstants;
-import org.apache.sentry.provider.common.ProviderBackendContext;
-import org.apache.sentry.provider.file.SimpleFileProviderBackend;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.base.Charsets;
-import com.google.common.base.Joiner;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
-import com.google.common.collect.Table;
-import com.google.common.io.Files;
-
-/**
- * SentryIniPolicyFileFormatter is to parse file and write data to file for 
sentry mapping data with
- * ini format, eg:
- * [groups]
- * group1=role1
- * [roles]
- * role1=server=server1
- */
-public class SentryIniPolicyFileFormatter implements SentryPolicyFileFormatter 
{
-
-  private static final Logger LOGGER = 
LoggerFactory.getLogger(SentryIniPolicyFileFormatter.class);
-
-  private static final String NL = System.getProperty("line.separator", "\n");
-
-  /**
-   * Write the sentry mapping data to ini file.
-   * 
-   * @param resourcePath
-   *        The path of the output file
-   * @param sentryMappingData
-   *        The map for sentry mapping data, eg:
-   *        for the following mapping data:
-   *        group1=role1,role2
-   *        group2=role2,role3
-   *        role1=server=server1->db=db1
-   *        
role2=server=server1->db=db1->table=tbl1,server=server1->db=db1->table=tbl2
-   *        role3=server=server1->url=hdfs://localhost/path
-   * 
-   *        The sentryMappingData will be inputed as:
-   *        {
-   *        groups={[group1={role1, role2}], group2=[role2, role3]},
-   *        roles={role1=[server=server1->db=db1],
-   *        
role2=[server=server1->db=db1->table=tbl1,server=server1->db=db1->table=tbl2],
-   *        role3=[server=server1->url=hdfs://localhost/path]
-   *        }
-   *        }
-   */
-  @Override
-  public void write(String resourcePath, Map<String, Map<String, Set<String>>> 
sentryMappingData)
-      throws Exception {
-    File destFile = new File(resourcePath);
-    if (destFile.exists() && !destFile.delete()) {
-      throw new IllegalStateException("Unable to delete " + destFile);
-    }
-    String contents = Joiner
-        .on(NL)
-        .join(
-        generateSection(PolicyFileConstants.GROUPS,
-                sentryMappingData.get(PolicyFileConstants.GROUPS)),
-        generateSection(PolicyFileConstants.ROLES,
-                sentryMappingData.get(PolicyFileConstants.ROLES)),
-            "");
-    LOGGER.info("Writing policy file to " + destFile + ":\n" + contents);
-    Files.write(contents, destFile, Charsets.UTF_8);
-  }
-
-  /**
-   * parse the ini file and return a map with all data
-   * 
-   * @param resourcePath
-   *        The path of the input file
-   * @param conf
-   *        The configuration info
-   * @return the result of sentry mapping data in map structure.
-   */
-  @Override
-  public Map<String, Map<String, Set<String>>> parse(String resourcePath, 
Configuration conf)
-      throws Exception {
-    Map<String, Map<String, Set<String>>> resultMap = Maps.newHashMap();
-    // SimpleFileProviderBackend is used for parse the ini file
-    SimpleFileProviderBackend policyFileBackend = new 
SimpleFileProviderBackend(conf, resourcePath);
-    ProviderBackendContext context = new ProviderBackendContext();
-    context.setAllowPerDatabase(true);
-    // parse the ini file
-    policyFileBackend.initialize(context);
-
-    // SimpleFileProviderBackend parsed the input file and output the data in 
Table format.
-    Table<String, String, Set<String>> groupRolePrivilegeTable = 
policyFileBackend
-        .getGroupRolePrivilegeTable();
-    Map<String, Set<String>> groupRolesMap = Maps.newHashMap();
-    Map<String, Set<String>> rolePrivilegesMap = Maps.newHashMap();
-    for (String groupName : groupRolePrivilegeTable.rowKeySet()) {
-      for (String roleName : groupRolePrivilegeTable.columnKeySet()) {
-        // get the roles set for the current groupName
-        Set<String> tempRoles = groupRolesMap.get(groupName);
-        if (tempRoles == null) {
-          tempRoles = Sets.newHashSet();
-        }
-        Set<String> privileges = groupRolePrivilegeTable.get(groupName, 
roleName);
-        // if there has privilege for [group,role], if no privilege exist, the 
[group, role] info
-        // will be discard.
-        if (privileges != null) {
-          // update [group, role] mapping data
-          tempRoles.add(roleName);
-          groupRolesMap.put(groupName, tempRoles);
-          // update [role, privilege] mapping data
-          rolePrivilegesMap.put(roleName, privileges);
-        }
-      }
-    }
-    resultMap.put(PolicyFileConstants.GROUPS, groupRolesMap);
-    resultMap.put(PolicyFileConstants.ROLES, rolePrivilegesMap);
-    return resultMap;
-  }
-
-  // generate the ini section according to the mapping data.
-  private String generateSection(String name, Map<String, Set<String>> 
mappingData) {
-    if (mappingData.isEmpty()) {
-      return "";
-    }
-    List<String> lines = Lists.newArrayList();
-    lines.add("[" + name + "]");
-    for (Map.Entry<String, Set<String>> entry : mappingData.entrySet()) {
-      lines.add(PolicyConstants.KV_JOINER.join(entry.getKey(),
-          PolicyConstants.ROLE_JOINER.join(entry.getValue())));
-    }
-    return Joiner.on(NL).join(lines);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHook.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHook.java
 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHook.java
deleted file mode 100644
index 45a2925..0000000
--- 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHook.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.binding.hive;
-
-import org.apache.hadoop.hive.ql.hooks.Hook;
-
-/**
- *
- * SentryOnFailureHook allows Sentry to be extended
- * with custom logic to be executed upon authorization failure.
- *
- */
-public interface SentryOnFailureHook extends Hook {
-
-  /**
-   *
-   * @param context
-   *     The hook context passed to each hook.
-   * @throws Exception
-   */
-  void run(SentryOnFailureHookContext context) throws Exception;
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContext.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContext.java
 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContext.java
deleted file mode 100644
index c101a4f..0000000
--- 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContext.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.binding.hive;
-
-import java.util.Set;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.hooks.ReadEntity;
-import org.apache.hadoop.hive.ql.hooks.WriteEntity;
-import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.sentry.core.model.db.AccessURI;
-import org.apache.sentry.core.model.db.Database;
-import org.apache.sentry.core.model.db.Table;
-
-/**
- * Context information provided by Access to implementations
- * of AccessOnFailureHook
- */
-public interface SentryOnFailureHookContext  {
-
-  /**
-   * @return the command attempted by user
-   */
-  String getCommand();
-
-  /**
-    * @return the set of read entities
-    */
-  Set<ReadEntity> getInputs();
-
-  /**
-   * @return the set of write entities
-   */
-  Set<WriteEntity> getOutputs();
-
-  /**
-   * @return the operation
-   */
-  HiveOperation getHiveOp();
-
-  /**
-   * @return the user name
-   */
-  String getUserName();
-
-  /**
-   * @return the ip address
-   */
-  String getIpAddress();
-
-  /**
-   * @return the database object
-   */
-  Database getDatabase();
-
-  /**
-   * @return the table object
-   */
-  Table getTable();
-
-  /**
-   * @return the udf URI
-   */
-  AccessURI getUdfURI();
-
-  /**
-   * @return the partition URI
-   */
-  AccessURI getPartitionURI();
-
-  /**
-   * @return the authorization failure exception
-   */
-  AuthorizationException getException();
-
-  /**
-   * @return the config
-   */
-  Configuration getConf();
-
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContextImpl.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContextImpl.java
 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContextImpl.java
deleted file mode 100644
index f97d7f3..0000000
--- 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContextImpl.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.binding.hive;
-
-import java.util.Set;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.hooks.ReadEntity;
-import org.apache.hadoop.hive.ql.hooks.WriteEntity;
-import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.sentry.core.model.db.AccessURI;
-import org.apache.sentry.core.model.db.Database;
-import org.apache.sentry.core.model.db.Table;
-
-public class SentryOnFailureHookContextImpl implements 
SentryOnFailureHookContext {
-
-  private final String command;
-  private final Set<ReadEntity> inputs;
-  private final Set<WriteEntity> outputs;
-  private final HiveOperation hiveOp;
-  private final String userName;
-  private final String ipAddress;
-  private final Database database;
-  private final Table table;
-  private final AccessURI udfURI;
-  private final AccessURI partitionURI;
-  private final AuthorizationException authException;
-  private final Configuration conf;
-
-  public SentryOnFailureHookContextImpl(String command,
-      Set<ReadEntity> inputs, Set<WriteEntity> outputs, HiveOperation hiveOp,
-      Database db, Table tab, AccessURI udfURI, AccessURI partitionURI,
-      String userName, String ipAddress, AuthorizationException e,
-      Configuration conf) {
-    this.command = command;
-    this.inputs = inputs;
-    this.outputs = outputs;
-    this.hiveOp = hiveOp;
-    this.userName = userName;
-    this.ipAddress = ipAddress;
-    this.database = db;
-    this.table = tab;
-    this.udfURI = udfURI;
-    this.partitionURI = partitionURI;
-    this.authException = e;
-    this.conf = conf;
-  }
-
-  @Override
-  public String getCommand() {
-    return command;
-  }
-
-  @Override
-  public Set<ReadEntity> getInputs() {
-    return inputs;
-  }
-
-  @Override
-  public Set<WriteEntity> getOutputs() {
-    return outputs;
-  }
-
-  @Override
-  public HiveOperation getHiveOp() {
-    return hiveOp;
-  }
-
-  @Override
-  public String getUserName() {
-    return userName;
-  }
-
-  @Override
-  public String getIpAddress() {
-    return ipAddress;
-  }
-
-  @Override
-  public Database getDatabase() {
-    return database;
-  }
-
-  @Override
-  public Table getTable() {
-    return table;
-  }
-
-  @Override
-  public AccessURI getUdfURI() {
-    return udfURI;
-  }
-
-  @Override
-  public AccessURI getPartitionURI() {
-    return partitionURI;
-  }
-
-  @Override
-  public AuthorizationException getException() {
-    return authException;
-  }
-
-  @Override
-  public Configuration getConf() {
-    return conf;
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatFactory.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatFactory.java
 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatFactory.java
deleted file mode 100644
index d2c6072..0000000
--- 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatFactory.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.binding.hive;
-
-import java.lang.reflect.Constructor;
-
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
-
-/**
- * SentryPolicyFileFormatFactory is used to create FileFormatter for different 
file type according
- * to the configuration, the default FileFormatter is for ini file.
- */
-public class SentryPolicyFileFormatFactory {
-
-  public static SentryPolicyFileFormatter createFileFormatter(HiveAuthzConf 
conf) throws Exception {
-    // The default formatter is 
org.apache.sentry.binding.hive.SentryIniPolicyFileFormatter, for ini
-    // file.
-    String policyFileFormatterName = 
conf.get(AuthzConfVars.AUTHZ_POLICY_FILE_FORMATTER.getVar());
-    // load the policy file formatter class
-    Constructor<?> policyFileFormatterConstructor = 
Class.forName(policyFileFormatterName)
-        .getDeclaredConstructor();
-    policyFileFormatterConstructor.setAccessible(true);
-    SentryPolicyFileFormatter sentryPolicyFileFormatter = 
(SentryPolicyFileFormatter) policyFileFormatterConstructor
-        .newInstance();
-    return sentryPolicyFileFormatter;
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatter.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatter.java
 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatter.java
deleted file mode 100644
index 4f465b3..0000000
--- 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatter.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.binding.hive;
-
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.hadoop.conf.Configuration;
-
-/**
- * SentryPolicyFileFormatter is to parse file and write data to file for 
sentry mapping data.
- */
-public interface SentryPolicyFileFormatter {
-
-  // write the sentry mapping data to file
-  void write(String resourcePath, Map<String, Map<String, Set<String>>> 
sentryMappingData)
-      throws Exception;
-
-  // parse the sentry mapping data from file
-  Map<String, Map<String, Set<String>>> parse(String resourcePath, 
Configuration conf)
-      throws Exception;
-
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzBinding.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzBinding.java
 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzBinding.java
deleted file mode 100644
index 0a1d0e8..0000000
--- 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzBinding.java
+++ /dev/null
@@ -1,407 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.binding.hive.authz;
-
-import java.lang.reflect.Constructor;
-import java.util.EnumSet;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.sentry.SentryUserException;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
-import org.apache.sentry.binding.hive.conf.InvalidConfigurationException;
-import org.apache.sentry.core.common.ActiveRoleSet;
-import org.apache.sentry.core.common.Subject;
-import org.apache.sentry.core.model.db.AccessConstants;
-import org.apache.sentry.core.model.db.DBModelAction;
-import org.apache.sentry.core.model.db.DBModelAuthorizable;
-import org.apache.sentry.core.model.db.DBModelAuthorizable.AuthorizableType;
-import org.apache.sentry.core.model.db.Server;
-import org.apache.sentry.policy.common.PolicyEngine;
-import org.apache.sentry.provider.cache.PrivilegeCache;
-import org.apache.sentry.provider.cache.SimpleCacheProviderBackend;
-import org.apache.sentry.provider.common.AuthorizationProvider;
-import org.apache.sentry.provider.common.ProviderBackend;
-import org.apache.sentry.provider.common.ProviderBackendContext;
-import org.apache.sentry.provider.db.service.thrift.TSentryRole;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.base.Splitter;
-import com.google.common.base.Strings;
-import com.google.common.collect.Sets;
-
-public class HiveAuthzBinding {
-  private static final Logger LOG = LoggerFactory
-      .getLogger(HiveAuthzBinding.class);
-  private static final Splitter ROLE_SET_SPLITTER = 
Splitter.on(",").trimResults()
-      .omitEmptyStrings();
-  public static final String HIVE_BINDING_TAG = "hive.authz.bindings.tag";
-
-  private final HiveConf hiveConf;
-  private final Server authServer;
-  private final AuthorizationProvider authProvider;
-  private volatile boolean open;
-  private ActiveRoleSet activeRoleSet;
-  private HiveAuthzConf authzConf;
-
-  public static enum HiveHook {
-    HiveServer2,
-    HiveMetaStore
-    ;
-  }
-
-  public HiveAuthzBinding (HiveConf hiveConf, HiveAuthzConf authzConf) throws 
Exception {
-    this(HiveHook.HiveServer2, hiveConf, authzConf);
-  }
-
-  public HiveAuthzBinding (HiveHook hiveHook, HiveConf hiveConf, HiveAuthzConf 
authzConf) throws Exception {
-    validateHiveConfig(hiveHook, hiveConf, authzConf);
-    this.hiveConf = hiveConf;
-    this.authzConf = authzConf;
-    this.authServer = new 
Server(authzConf.get(AuthzConfVars.AUTHZ_SERVER_NAME.getVar()));
-    this.authProvider = getAuthProvider(hiveConf, authzConf, 
authServer.getName());
-    this.open = true;
-    this.activeRoleSet = 
parseActiveRoleSet(hiveConf.get(HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET,
-        authzConf.get(HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET, "")).trim());
-  }
-
-  public HiveAuthzBinding (HiveHook hiveHook, HiveConf hiveConf, HiveAuthzConf 
authzConf,
-      PrivilegeCache privilegeCache) throws Exception {
-    validateHiveConfig(hiveHook, hiveConf, authzConf);
-    this.hiveConf = hiveConf;
-    this.authzConf = authzConf;
-    this.authServer = new 
Server(authzConf.get(AuthzConfVars.AUTHZ_SERVER_NAME.getVar()));
-    this.authProvider = getAuthProviderWithPrivilegeCache(authzConf, 
authServer.getName(), privilegeCache);
-    this.open = true;
-    this.activeRoleSet = 
parseActiveRoleSet(hiveConf.get(HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET,
-            authzConf.get(HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET, "")).trim());
-  }
-
-  private static ActiveRoleSet parseActiveRoleSet(String name)
-      throws SentryUserException {
-    return parseActiveRoleSet(name, null);
-  }
-
-  private static ActiveRoleSet parseActiveRoleSet(String name,
-      Set<TSentryRole> allowedRoles) throws SentryUserException {
-    // if unset, then we choose the default of ALL
-    if (name.isEmpty()) {
-      return ActiveRoleSet.ALL;
-    } else if (AccessConstants.NONE_ROLE.equalsIgnoreCase(name)) {
-      return new ActiveRoleSet(new HashSet<String>());
-    } else if (AccessConstants.ALL_ROLE.equalsIgnoreCase(name)) {
-      return ActiveRoleSet.ALL;
-    } else if 
(AccessConstants.RESERVED_ROLE_NAMES.contains(name.toUpperCase())) {
-      String msg = "Role " + name + " is reserved";
-      throw new IllegalArgumentException(msg);
-    } else {
-      if (allowedRoles != null) {
-        // check if the user has been granted the role
-        boolean foundRole = false;
-        for (TSentryRole role : allowedRoles) {
-          if (role.getRoleName().equalsIgnoreCase(name)) {
-            foundRole = true;
-            break;
-          }
-        }
-        if (!foundRole) {
-          //Set the reason for hive binding to pick up
-          throw new SentryUserException("Not authorized to set role " + name, 
"Not authorized to set role " + name);
-
-        }
-      }
-      return new ActiveRoleSet(Sets.newHashSet(ROLE_SET_SPLITTER.split(name)));
-    }
-  }
-
-  private void validateHiveConfig(HiveHook hiveHook, HiveConf hiveConf, 
HiveAuthzConf authzConf)
-      throws InvalidConfigurationException{
-    if(hiveHook.equals(HiveHook.HiveMetaStore)) {
-      validateHiveMetaStoreConfig(hiveConf, authzConf);
-    }else if(hiveHook.equals(HiveHook.HiveServer2)) {
-      validateHiveServer2Config(hiveConf, authzConf);
-    }
-  }
-
-  private void validateHiveMetaStoreConfig(HiveConf hiveConf, HiveAuthzConf 
authzConf)
-      throws InvalidConfigurationException{
-    boolean isTestingMode = Boolean.parseBoolean(Strings.nullToEmpty(
-        authzConf.get(AuthzConfVars.SENTRY_TESTING_MODE.getVar())).trim());
-    LOG.debug("Testing mode is " + isTestingMode);
-    if(!isTestingMode) {
-      boolean sasl = hiveConf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL);
-      if(!sasl) {
-        throw new InvalidConfigurationException(
-            ConfVars.METASTORE_USE_THRIFT_SASL + " can't be false in 
non-testing mode");
-      }
-    } else {
-      boolean setUgi = hiveConf.getBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI);
-      if(!setUgi) {
-        throw new InvalidConfigurationException(
-            ConfVars.METASTORE_EXECUTE_SET_UGI.toString() + " can't be false 
in non secure mode");
-      }
-    }
-  }
-
-  private void validateHiveServer2Config(HiveConf hiveConf, HiveAuthzConf 
authzConf)
-      throws InvalidConfigurationException{
-    boolean isTestingMode = Boolean.parseBoolean(Strings.nullToEmpty(
-        authzConf.get(AuthzConfVars.SENTRY_TESTING_MODE.getVar())).trim());
-    LOG.debug("Testing mode is " + isTestingMode);
-    if(!isTestingMode) {
-      String authMethod = 
Strings.nullToEmpty(hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION)).trim();
-      if("none".equalsIgnoreCase(authMethod)) {
-        throw new 
InvalidConfigurationException(ConfVars.HIVE_SERVER2_AUTHENTICATION +
-            " can't be none in non-testing mode");
-      }
-      boolean impersonation = 
hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS);
-      boolean allowImpersonation = Boolean.parseBoolean(Strings.nullToEmpty(
-          
authzConf.get(AuthzConfVars.AUTHZ_ALLOW_HIVE_IMPERSONATION.getVar())).trim());
-
-      if(impersonation && !allowImpersonation) {
-        LOG.error("Role based authorization does not work with HiveServer2 
impersonation");
-        throw new 
InvalidConfigurationException(ConfVars.HIVE_SERVER2_ENABLE_DOAS +
-            " can't be set to true in non-testing mode");
-      }
-    }
-    String defaultUmask = 
hiveConf.get(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY);
-    if("077".equalsIgnoreCase(defaultUmask)) {
-      LOG.error("HiveServer2 required a default umask of 077");
-      throw new 
InvalidConfigurationException(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY +
-          " should be 077 in non-testing mode");
-    }
-  }
-
-  // Instantiate the configured authz provider
-  public static AuthorizationProvider getAuthProvider(HiveConf hiveConf, 
HiveAuthzConf authzConf,
-        String serverName) throws Exception {
-    // get the provider class and resources from the authz config
-    String authProviderName = 
authzConf.get(AuthzConfVars.AUTHZ_PROVIDER.getVar());
-    String resourceName =
-        authzConf.get(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar());
-    String providerBackendName = 
authzConf.get(AuthzConfVars.AUTHZ_PROVIDER_BACKEND.getVar());
-    String policyEngineName = 
authzConf.get(AuthzConfVars.AUTHZ_POLICY_ENGINE.getVar());
-
-    LOG.debug("Using authorization provider " + authProviderName +
-        " with resource " + resourceName + ", policy engine "
-        + policyEngineName + ", provider backend " + providerBackendName);
-      // load the provider backend class
-      Constructor<?> providerBackendConstructor =
-        
Class.forName(providerBackendName).getDeclaredConstructor(Configuration.class, 
String.class);
-      providerBackendConstructor.setAccessible(true);
-    ProviderBackend providerBackend = (ProviderBackend) 
providerBackendConstructor.
-        newInstance(new Object[] {authzConf, resourceName});
-
-    // load the policy engine class
-    Constructor<?> policyConstructor =
-      Class.forName(policyEngineName).getDeclaredConstructor(String.class, 
ProviderBackend.class);
-    policyConstructor.setAccessible(true);
-    PolicyEngine policyEngine = (PolicyEngine) policyConstructor.
-        newInstance(new Object[] {serverName, providerBackend});
-
-
-    // load the authz provider class
-    Constructor<?> constrctor =
-      Class.forName(authProviderName).getDeclaredConstructor(String.class, 
PolicyEngine.class);
-    constrctor.setAccessible(true);
-    return (AuthorizationProvider) constrctor.newInstance(new Object[] 
{resourceName, policyEngine});
-  }
-
-  // Instantiate the authz provider using PrivilegeCache, this method is used 
for metadata filter function.
-  public static AuthorizationProvider 
getAuthProviderWithPrivilegeCache(HiveAuthzConf authzConf,
-      String serverName, PrivilegeCache privilegeCache) throws Exception {
-    // get the provider class and resources from the authz config
-    String authProviderName = 
authzConf.get(AuthzConfVars.AUTHZ_PROVIDER.getVar());
-    String resourceName =
-            authzConf.get(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar());
-    String policyEngineName = 
authzConf.get(AuthzConfVars.AUTHZ_POLICY_ENGINE.getVar());
-
-    LOG.debug("Using authorization provider " + authProviderName +
-            " with resource " + resourceName + ", policy engine "
-            + policyEngineName + ", provider backend 
SimpleCacheProviderBackend");
-
-    ProviderBackend providerBackend = new 
SimpleCacheProviderBackend(authzConf, resourceName);
-    ProviderBackendContext context = new ProviderBackendContext();
-    context.setBindingHandle(privilegeCache);
-    providerBackend.initialize(context);
-
-    // load the policy engine class
-    Constructor<?> policyConstructor =
-            
Class.forName(policyEngineName).getDeclaredConstructor(String.class, 
ProviderBackend.class);
-    policyConstructor.setAccessible(true);
-    PolicyEngine policyEngine = (PolicyEngine) policyConstructor.
-            newInstance(new Object[] {serverName, providerBackend});
-
-    // load the authz provider class
-    Constructor<?> constrctor =
-            
Class.forName(authProviderName).getDeclaredConstructor(String.class, 
PolicyEngine.class);
-    constrctor.setAccessible(true);
-    return (AuthorizationProvider) constrctor.newInstance(new Object[] 
{resourceName, policyEngine});
-  }
-
-
-  /**
-   * Validate the privilege for the given operation for the given subject
-   * @param hiveOp
-   * @param stmtAuthPrivileges
-   * @param subject
-   * @param currDB
-   * @param inputEntities
-   * @param outputEntities
-   * @throws AuthorizationException
-   */
-  public void authorize(HiveOperation hiveOp, HiveAuthzPrivileges 
stmtAuthPrivileges,
-      Subject subject, List<List<DBModelAuthorizable>> inputHierarchyList,
-      List<List<DBModelAuthorizable>> outputHierarchyList)
-          throws AuthorizationException {
-    if (!open) {
-      throw new IllegalStateException("Binding has been closed");
-    }
-    boolean isDebug = LOG.isDebugEnabled();
-    if(isDebug) {
-      LOG.debug("Going to authorize statement " + hiveOp.name() +
-          " for subject " + subject.getName());
-    }
-
-    /* for each read and write entity captured by the compiler -
-     *    check if that object type is part of the input/output privilege list
-     *    If it is, then validate the access.
-     * Note the hive compiler gathers information on additional entities like 
partitions,
-     * etc which are not of our interest at this point. Hence its very
-     * much possible that the we won't be validating all the entities in the 
given list
-     */
-
-    // Check read entities
-    Map<AuthorizableType, EnumSet<DBModelAction>> requiredInputPrivileges =
-        stmtAuthPrivileges.getInputPrivileges();
-    if(isDebug) {
-      LOG.debug("requiredInputPrivileges = " + requiredInputPrivileges);
-      LOG.debug("inputHierarchyList = " + inputHierarchyList);
-    }
-    Map<AuthorizableType, EnumSet<DBModelAction>> requiredOutputPrivileges =
-        stmtAuthPrivileges.getOutputPrivileges();
-    if(isDebug) {
-      LOG.debug("requiredOuputPrivileges = " + requiredOutputPrivileges);
-      LOG.debug("outputHierarchyList = " + outputHierarchyList);
-    }
-
-    boolean found = false;
-    for (Map.Entry<AuthorizableType, EnumSet<DBModelAction>> entry : 
requiredInputPrivileges.entrySet()) {
-      AuthorizableType key = entry.getKey();
-      for (List<DBModelAuthorizable> inputHierarchy : inputHierarchyList) {
-        if (getAuthzType(inputHierarchy).equals(key)) {
-          found = true;
-          if (!authProvider.hasAccess(subject, inputHierarchy, 
entry.getValue(), activeRoleSet)) {
-            throw new AuthorizationException("User " + subject.getName() +
-                " does not have privileges for " + hiveOp.name());
-          }
-        }
-      }
-      if (!found && !key.equals(AuthorizableType.URI) && 
!(hiveOp.equals(HiveOperation.QUERY))
-          && !(hiveOp.equals(HiveOperation.CREATETABLE_AS_SELECT))) {
-        //URI privileges are optional for some privileges: anyPrivilege, 
tableDDLAndOptionalUriPrivilege
-        //Query can mean select/insert/analyze where all of them have 
different required privileges.
-        //CreateAsSelect can has table/columns privileges with select.
-        //For these alone we skip if there is no equivalent input privilege
-        //TODO: Even this case should be handled to make sure we do not skip 
the privilege check if we did not build
-        //the input privileges correctly
-        throw new AuthorizationException("Required privilege( " + key.name() + 
") not available in input privileges");
-      }
-      found = false;
-    }
-
-    for(AuthorizableType key: requiredOutputPrivileges.keySet()) {
-      for (List<DBModelAuthorizable> outputHierarchy : outputHierarchyList) {
-        if (getAuthzType(outputHierarchy).equals(key)) {
-          found = true;
-          if (!authProvider.hasAccess(subject, outputHierarchy, 
requiredOutputPrivileges.get(key), activeRoleSet)) {
-            throw new AuthorizationException("User " + subject.getName() +
-                " does not have privileges for " + hiveOp.name());
-          }
-        }
-      }
-      if(!found && !(key.equals(AuthorizableType.URI)) &&  
!(hiveOp.equals(HiveOperation.QUERY))) {
-        //URI privileges are optional for some privileges: tableInsertPrivilege
-        //Query can mean select/insert/analyze where all of them have 
different required privileges.
-        //For these alone we skip if there is no equivalent output privilege
-        //TODO: Even this case should be handled to make sure we do not skip 
the privilege check if we did not build
-        //the output privileges correctly
-        throw new AuthorizationException("Required privilege( " + key.name() + 
") not available in output privileges");
-      }
-      found = false;
-    }
-
-  }
-
-  public void setActiveRoleSet(String activeRoleSet,
-      Set<TSentryRole> allowedRoles) throws SentryUserException {
-    this.activeRoleSet = parseActiveRoleSet(activeRoleSet, allowedRoles);
-    hiveConf.set(HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET, activeRoleSet);
-  }
-
-  public ActiveRoleSet getActiveRoleSet() {
-    return activeRoleSet;
-  }
-
-  public Set<String> getGroups(Subject subject) {
-    return authProvider.getGroupMapping().getGroups(subject.getName());
-  }
-
-  public Server getAuthServer() {
-    if (!open) {
-      throw new IllegalStateException("Binding has been closed");
-    }
-    return authServer;
-  }
-
-  public HiveAuthzConf getAuthzConf() {
-    return authzConf;
-  }
-
-  public HiveConf getHiveConf() {
-    return hiveConf;
-  }
-
-  private AuthorizableType getAuthzType (List<DBModelAuthorizable> hierarchy){
-    return hierarchy.get(hierarchy.size() -1).getAuthzType();
-  }
-
-  public List<String> getLastQueryPrivilegeErrors() {
-    if (!open) {
-      throw new IllegalStateException("Binding has been closed");
-    }
-    return authProvider.getLastFailedPrivileges();
-  }
-
-  public void close() {
-    authProvider.close();
-  }
-
-  public AuthorizationProvider getCurrentAuthProvider() {
-    return authProvider;
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivileges.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivileges.java
 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivileges.java
deleted file mode 100644
index f164b30..0000000
--- 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivileges.java
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.binding.hive.authz;
-
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.sentry.core.model.db.DBModelAction;
-import org.apache.sentry.core.model.db.DBModelAuthorizable.AuthorizableType;
-
-/**
- * Hive objects with required access privileges mapped to auth provider 
privileges
- */
-public class HiveAuthzPrivileges {
-
-  /**
-   * Operation type used for privilege granting
-   */
-  public static enum HiveOperationType {
-    UNKNOWN,
-    DDL,
-    DML,
-    DATA_LOAD,
-    DATA_UNLOAD,
-    QUERY,
-    INFO
-  };
-
-  /**
-   * scope of the operation. The auth provider interface has different methods
-   * for some of these. Hence we want to be able to identity the auth scope of
-   * a statement eg. server level or DB level etc.
-   */
-  public static enum HiveOperationScope {
-    UNKNOWN,
-    SERVER,
-    DATABASE,
-    TABLE,
-    FUNCTION,
-    CONNECT,
-    COLUMN
-  }
-
-  public static enum HiveExtendedOperation {
-    TRANSFORM,
-    RESOURCE
-  }
-
-  public static class AuthzPrivilegeBuilder {
-    private final Map<AuthorizableType, EnumSet<DBModelAction>> 
inputPrivileges =
-        new HashMap<AuthorizableType ,EnumSet<DBModelAction>>();
-    private final Map<AuthorizableType,EnumSet<DBModelAction>> 
outputPrivileges =
-        new HashMap<AuthorizableType,EnumSet<DBModelAction>>();
-    private HiveOperationType operationType;
-    private HiveOperationScope operationScope;
-
-    public AuthzPrivilegeBuilder addInputObjectPriviledge(AuthorizableType 
inputObjectType, EnumSet<DBModelAction> inputPrivilege) {
-      inputPrivileges.put(inputObjectType, inputPrivilege);
-      return this;
-    }
-
-    public AuthzPrivilegeBuilder addOutputEntityPriviledge(AuthorizableType 
outputEntityType, EnumSet<DBModelAction> outputPrivilege) {
-      outputPrivileges.put(outputEntityType, outputPrivilege);
-      return this;
-    }
-
-    public AuthzPrivilegeBuilder addOutputObjectPriviledge(AuthorizableType 
outputObjectType, EnumSet<DBModelAction> outputPrivilege) {
-      outputPrivileges.put(outputObjectType, outputPrivilege);
-      return this;
-    }
-
-    public AuthzPrivilegeBuilder setOperationType(HiveOperationType 
operationType) {
-      this.operationType = operationType;
-      return this;
-    }
-
-    public AuthzPrivilegeBuilder setOperationScope(HiveOperationScope 
operationScope) {
-      this.operationScope = operationScope;
-      return this;
-    }
-
-    public HiveAuthzPrivileges build() {
-      if (operationScope.equals(HiveOperationScope.UNKNOWN)) {
-        throw new UnsupportedOperationException("Operation scope is not set");
-      }
-
-      if (operationType.equals(HiveOperationType.UNKNOWN)) {
-        throw new UnsupportedOperationException("Operation scope is not set");
-      }
-
-      return new HiveAuthzPrivileges(inputPrivileges, outputPrivileges, 
operationType, operationScope);
-    }
-  }
-
-  private final Map<AuthorizableType,EnumSet<DBModelAction>> inputPrivileges =
-      new HashMap<AuthorizableType,EnumSet<DBModelAction>>();
-  private final Map<AuthorizableType,EnumSet<DBModelAction>>  outputPrivileges 
=
-      new HashMap<AuthorizableType,EnumSet<DBModelAction>>();
-  private final HiveOperationType operationType;
-  private final HiveOperationScope operationScope;
-
-  protected HiveAuthzPrivileges(Map<AuthorizableType,EnumSet<DBModelAction>> 
inputPrivileges,
-      Map<AuthorizableType,EnumSet<DBModelAction>> outputPrivileges, 
HiveOperationType operationType,
-      HiveOperationScope operationScope) {
-    this.inputPrivileges.putAll(inputPrivileges);
-    this.outputPrivileges.putAll(outputPrivileges);
-    this.operationScope = operationScope;
-    this.operationType = operationType;
-  }
-
-  /**
-   * @return the inputPrivileges
-   */
-  public Map<AuthorizableType, EnumSet<DBModelAction>> getInputPrivileges() {
-    return inputPrivileges;
-  }
-
-  /**
-   * @return the outputPrivileges
-   */
-  public Map<AuthorizableType, EnumSet<DBModelAction>> getOutputPrivileges() {
-    return outputPrivileges;
-  }
-
-  /**
-   * @return the operationType
-   */
-  public HiveOperationType getOperationType() {
-    return operationType;
-  }
-
-  /**
-   * @return the operationScope
-   */
-  public HiveOperationScope getOperationScope() {
-    return operationScope;
-  }
-}

Reply via email to