http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzBinding.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzBinding.java
 
b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzBinding.java
new file mode 100644
index 0000000..0a1d0e8
--- /dev/null
+++ 
b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzBinding.java
@@ -0,0 +1,407 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive.authz;
+
+import java.lang.reflect.Constructor;
+import java.util.EnumSet;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.sentry.SentryUserException;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
+import org.apache.sentry.binding.hive.conf.InvalidConfigurationException;
+import org.apache.sentry.core.common.ActiveRoleSet;
+import org.apache.sentry.core.common.Subject;
+import org.apache.sentry.core.model.db.AccessConstants;
+import org.apache.sentry.core.model.db.DBModelAction;
+import org.apache.sentry.core.model.db.DBModelAuthorizable;
+import org.apache.sentry.core.model.db.DBModelAuthorizable.AuthorizableType;
+import org.apache.sentry.core.model.db.Server;
+import org.apache.sentry.policy.common.PolicyEngine;
+import org.apache.sentry.provider.cache.PrivilegeCache;
+import org.apache.sentry.provider.cache.SimpleCacheProviderBackend;
+import org.apache.sentry.provider.common.AuthorizationProvider;
+import org.apache.sentry.provider.common.ProviderBackend;
+import org.apache.sentry.provider.common.ProviderBackendContext;
+import org.apache.sentry.provider.db.service.thrift.TSentryRole;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Splitter;
+import com.google.common.base.Strings;
+import com.google.common.collect.Sets;
+
+public class HiveAuthzBinding {
+  private static final Logger LOG = LoggerFactory
+      .getLogger(HiveAuthzBinding.class);
+  private static final Splitter ROLE_SET_SPLITTER = 
Splitter.on(",").trimResults()
+      .omitEmptyStrings();
+  public static final String HIVE_BINDING_TAG = "hive.authz.bindings.tag";
+
+  private final HiveConf hiveConf;
+  private final Server authServer;
+  private final AuthorizationProvider authProvider;
+  private volatile boolean open;
+  private ActiveRoleSet activeRoleSet;
+  private HiveAuthzConf authzConf;
+
+  public static enum HiveHook {
+    HiveServer2,
+    HiveMetaStore
+    ;
+  }
+
+  public HiveAuthzBinding (HiveConf hiveConf, HiveAuthzConf authzConf) throws 
Exception {
+    this(HiveHook.HiveServer2, hiveConf, authzConf);
+  }
+
+  public HiveAuthzBinding (HiveHook hiveHook, HiveConf hiveConf, HiveAuthzConf 
authzConf) throws Exception {
+    validateHiveConfig(hiveHook, hiveConf, authzConf);
+    this.hiveConf = hiveConf;
+    this.authzConf = authzConf;
+    this.authServer = new 
Server(authzConf.get(AuthzConfVars.AUTHZ_SERVER_NAME.getVar()));
+    this.authProvider = getAuthProvider(hiveConf, authzConf, 
authServer.getName());
+    this.open = true;
+    this.activeRoleSet = 
parseActiveRoleSet(hiveConf.get(HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET,
+        authzConf.get(HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET, "")).trim());
+  }
+
+  public HiveAuthzBinding (HiveHook hiveHook, HiveConf hiveConf, HiveAuthzConf 
authzConf,
+      PrivilegeCache privilegeCache) throws Exception {
+    validateHiveConfig(hiveHook, hiveConf, authzConf);
+    this.hiveConf = hiveConf;
+    this.authzConf = authzConf;
+    this.authServer = new 
Server(authzConf.get(AuthzConfVars.AUTHZ_SERVER_NAME.getVar()));
+    this.authProvider = getAuthProviderWithPrivilegeCache(authzConf, 
authServer.getName(), privilegeCache);
+    this.open = true;
+    this.activeRoleSet = 
parseActiveRoleSet(hiveConf.get(HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET,
+            authzConf.get(HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET, "")).trim());
+  }
+
+  private static ActiveRoleSet parseActiveRoleSet(String name)
+      throws SentryUserException {
+    return parseActiveRoleSet(name, null);
+  }
+
+  private static ActiveRoleSet parseActiveRoleSet(String name,
+      Set<TSentryRole> allowedRoles) throws SentryUserException {
+    // if unset, then we choose the default of ALL
+    if (name.isEmpty()) {
+      return ActiveRoleSet.ALL;
+    } else if (AccessConstants.NONE_ROLE.equalsIgnoreCase(name)) {
+      return new ActiveRoleSet(new HashSet<String>());
+    } else if (AccessConstants.ALL_ROLE.equalsIgnoreCase(name)) {
+      return ActiveRoleSet.ALL;
+    } else if 
(AccessConstants.RESERVED_ROLE_NAMES.contains(name.toUpperCase())) {
+      String msg = "Role " + name + " is reserved";
+      throw new IllegalArgumentException(msg);
+    } else {
+      if (allowedRoles != null) {
+        // check if the user has been granted the role
+        boolean foundRole = false;
+        for (TSentryRole role : allowedRoles) {
+          if (role.getRoleName().equalsIgnoreCase(name)) {
+            foundRole = true;
+            break;
+          }
+        }
+        if (!foundRole) {
+          //Set the reason for hive binding to pick up
+          throw new SentryUserException("Not authorized to set role " + name, 
"Not authorized to set role " + name);
+
+        }
+      }
+      return new ActiveRoleSet(Sets.newHashSet(ROLE_SET_SPLITTER.split(name)));
+    }
+  }
+
+  private void validateHiveConfig(HiveHook hiveHook, HiveConf hiveConf, 
HiveAuthzConf authzConf)
+      throws InvalidConfigurationException{
+    if(hiveHook.equals(HiveHook.HiveMetaStore)) {
+      validateHiveMetaStoreConfig(hiveConf, authzConf);
+    }else if(hiveHook.equals(HiveHook.HiveServer2)) {
+      validateHiveServer2Config(hiveConf, authzConf);
+    }
+  }
+
+  private void validateHiveMetaStoreConfig(HiveConf hiveConf, HiveAuthzConf 
authzConf)
+      throws InvalidConfigurationException{
+    boolean isTestingMode = Boolean.parseBoolean(Strings.nullToEmpty(
+        authzConf.get(AuthzConfVars.SENTRY_TESTING_MODE.getVar())).trim());
+    LOG.debug("Testing mode is " + isTestingMode);
+    if(!isTestingMode) {
+      boolean sasl = hiveConf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL);
+      if(!sasl) {
+        throw new InvalidConfigurationException(
+            ConfVars.METASTORE_USE_THRIFT_SASL + " can't be false in 
non-testing mode");
+      }
+    } else {
+      boolean setUgi = hiveConf.getBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI);
+      if(!setUgi) {
+        throw new InvalidConfigurationException(
+            ConfVars.METASTORE_EXECUTE_SET_UGI.toString() + " can't be false 
in non secure mode");
+      }
+    }
+  }
+
+  private void validateHiveServer2Config(HiveConf hiveConf, HiveAuthzConf 
authzConf)
+      throws InvalidConfigurationException{
+    boolean isTestingMode = Boolean.parseBoolean(Strings.nullToEmpty(
+        authzConf.get(AuthzConfVars.SENTRY_TESTING_MODE.getVar())).trim());
+    LOG.debug("Testing mode is " + isTestingMode);
+    if(!isTestingMode) {
+      String authMethod = 
Strings.nullToEmpty(hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION)).trim();
+      if("none".equalsIgnoreCase(authMethod)) {
+        throw new 
InvalidConfigurationException(ConfVars.HIVE_SERVER2_AUTHENTICATION +
+            " can't be none in non-testing mode");
+      }
+      boolean impersonation = 
hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS);
+      boolean allowImpersonation = Boolean.parseBoolean(Strings.nullToEmpty(
+          
authzConf.get(AuthzConfVars.AUTHZ_ALLOW_HIVE_IMPERSONATION.getVar())).trim());
+
+      if(impersonation && !allowImpersonation) {
+        LOG.error("Role based authorization does not work with HiveServer2 
impersonation");
+        throw new 
InvalidConfigurationException(ConfVars.HIVE_SERVER2_ENABLE_DOAS +
+            " can't be set to true in non-testing mode");
+      }
+    }
+    String defaultUmask = 
hiveConf.get(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY);
+    if("077".equalsIgnoreCase(defaultUmask)) {
+      LOG.error("HiveServer2 required a default umask of 077");
+      throw new 
InvalidConfigurationException(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY +
+          " should be 077 in non-testing mode");
+    }
+  }
+
+  // Instantiate the configured authz provider
+  public static AuthorizationProvider getAuthProvider(HiveConf hiveConf, 
HiveAuthzConf authzConf,
+        String serverName) throws Exception {
+    // get the provider class and resources from the authz config
+    String authProviderName = 
authzConf.get(AuthzConfVars.AUTHZ_PROVIDER.getVar());
+    String resourceName =
+        authzConf.get(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar());
+    String providerBackendName = 
authzConf.get(AuthzConfVars.AUTHZ_PROVIDER_BACKEND.getVar());
+    String policyEngineName = 
authzConf.get(AuthzConfVars.AUTHZ_POLICY_ENGINE.getVar());
+
+    LOG.debug("Using authorization provider " + authProviderName +
+        " with resource " + resourceName + ", policy engine "
+        + policyEngineName + ", provider backend " + providerBackendName);
+      // load the provider backend class
+      Constructor<?> providerBackendConstructor =
+        
Class.forName(providerBackendName).getDeclaredConstructor(Configuration.class, 
String.class);
+      providerBackendConstructor.setAccessible(true);
+    ProviderBackend providerBackend = (ProviderBackend) 
providerBackendConstructor.
+        newInstance(new Object[] {authzConf, resourceName});
+
+    // load the policy engine class
+    Constructor<?> policyConstructor =
+      Class.forName(policyEngineName).getDeclaredConstructor(String.class, 
ProviderBackend.class);
+    policyConstructor.setAccessible(true);
+    PolicyEngine policyEngine = (PolicyEngine) policyConstructor.
+        newInstance(new Object[] {serverName, providerBackend});
+
+
+    // load the authz provider class
+    Constructor<?> constrctor =
+      Class.forName(authProviderName).getDeclaredConstructor(String.class, 
PolicyEngine.class);
+    constrctor.setAccessible(true);
+    return (AuthorizationProvider) constrctor.newInstance(new Object[] 
{resourceName, policyEngine});
+  }
+
+  // Instantiate the authz provider using PrivilegeCache, this method is used 
for metadata filter function.
+  public static AuthorizationProvider 
getAuthProviderWithPrivilegeCache(HiveAuthzConf authzConf,
+      String serverName, PrivilegeCache privilegeCache) throws Exception {
+    // get the provider class and resources from the authz config
+    String authProviderName = 
authzConf.get(AuthzConfVars.AUTHZ_PROVIDER.getVar());
+    String resourceName =
+            authzConf.get(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar());
+    String policyEngineName = 
authzConf.get(AuthzConfVars.AUTHZ_POLICY_ENGINE.getVar());
+
+    LOG.debug("Using authorization provider " + authProviderName +
+            " with resource " + resourceName + ", policy engine "
+            + policyEngineName + ", provider backend 
SimpleCacheProviderBackend");
+
+    ProviderBackend providerBackend = new 
SimpleCacheProviderBackend(authzConf, resourceName);
+    ProviderBackendContext context = new ProviderBackendContext();
+    context.setBindingHandle(privilegeCache);
+    providerBackend.initialize(context);
+
+    // load the policy engine class
+    Constructor<?> policyConstructor =
+            
Class.forName(policyEngineName).getDeclaredConstructor(String.class, 
ProviderBackend.class);
+    policyConstructor.setAccessible(true);
+    PolicyEngine policyEngine = (PolicyEngine) policyConstructor.
+            newInstance(new Object[] {serverName, providerBackend});
+
+    // load the authz provider class
+    Constructor<?> constrctor =
+            
Class.forName(authProviderName).getDeclaredConstructor(String.class, 
PolicyEngine.class);
+    constrctor.setAccessible(true);
+    return (AuthorizationProvider) constrctor.newInstance(new Object[] 
{resourceName, policyEngine});
+  }
+
+
+  /**
+   * Validate the privilege for the given operation for the given subject
+   * @param hiveOp
+   * @param stmtAuthPrivileges
+   * @param subject
+   * @param currDB
+   * @param inputEntities
+   * @param outputEntities
+   * @throws AuthorizationException
+   */
+  public void authorize(HiveOperation hiveOp, HiveAuthzPrivileges 
stmtAuthPrivileges,
+      Subject subject, List<List<DBModelAuthorizable>> inputHierarchyList,
+      List<List<DBModelAuthorizable>> outputHierarchyList)
+          throws AuthorizationException {
+    if (!open) {
+      throw new IllegalStateException("Binding has been closed");
+    }
+    boolean isDebug = LOG.isDebugEnabled();
+    if(isDebug) {
+      LOG.debug("Going to authorize statement " + hiveOp.name() +
+          " for subject " + subject.getName());
+    }
+
+    /* for each read and write entity captured by the compiler -
+     *    check if that object type is part of the input/output privilege list
+     *    If it is, then validate the access.
+     * Note the hive compiler gathers information on additional entities like 
partitions,
+     * etc which are not of our interest at this point. Hence its very
+     * much possible that the we won't be validating all the entities in the 
given list
+     */
+
+    // Check read entities
+    Map<AuthorizableType, EnumSet<DBModelAction>> requiredInputPrivileges =
+        stmtAuthPrivileges.getInputPrivileges();
+    if(isDebug) {
+      LOG.debug("requiredInputPrivileges = " + requiredInputPrivileges);
+      LOG.debug("inputHierarchyList = " + inputHierarchyList);
+    }
+    Map<AuthorizableType, EnumSet<DBModelAction>> requiredOutputPrivileges =
+        stmtAuthPrivileges.getOutputPrivileges();
+    if(isDebug) {
+      LOG.debug("requiredOuputPrivileges = " + requiredOutputPrivileges);
+      LOG.debug("outputHierarchyList = " + outputHierarchyList);
+    }
+
+    boolean found = false;
+    for (Map.Entry<AuthorizableType, EnumSet<DBModelAction>> entry : 
requiredInputPrivileges.entrySet()) {
+      AuthorizableType key = entry.getKey();
+      for (List<DBModelAuthorizable> inputHierarchy : inputHierarchyList) {
+        if (getAuthzType(inputHierarchy).equals(key)) {
+          found = true;
+          if (!authProvider.hasAccess(subject, inputHierarchy, 
entry.getValue(), activeRoleSet)) {
+            throw new AuthorizationException("User " + subject.getName() +
+                " does not have privileges for " + hiveOp.name());
+          }
+        }
+      }
+      if (!found && !key.equals(AuthorizableType.URI) && 
!(hiveOp.equals(HiveOperation.QUERY))
+          && !(hiveOp.equals(HiveOperation.CREATETABLE_AS_SELECT))) {
+        //URI privileges are optional for some privileges: anyPrivilege, 
tableDDLAndOptionalUriPrivilege
+        //Query can mean select/insert/analyze where all of them have 
different required privileges.
+        //CreateAsSelect can has table/columns privileges with select.
+        //For these alone we skip if there is no equivalent input privilege
+        //TODO: Even this case should be handled to make sure we do not skip 
the privilege check if we did not build
+        //the input privileges correctly
+        throw new AuthorizationException("Required privilege( " + key.name() + 
") not available in input privileges");
+      }
+      found = false;
+    }
+
+    for(AuthorizableType key: requiredOutputPrivileges.keySet()) {
+      for (List<DBModelAuthorizable> outputHierarchy : outputHierarchyList) {
+        if (getAuthzType(outputHierarchy).equals(key)) {
+          found = true;
+          if (!authProvider.hasAccess(subject, outputHierarchy, 
requiredOutputPrivileges.get(key), activeRoleSet)) {
+            throw new AuthorizationException("User " + subject.getName() +
+                " does not have privileges for " + hiveOp.name());
+          }
+        }
+      }
+      if(!found && !(key.equals(AuthorizableType.URI)) &&  
!(hiveOp.equals(HiveOperation.QUERY))) {
+        //URI privileges are optional for some privileges: tableInsertPrivilege
+        //Query can mean select/insert/analyze where all of them have 
different required privileges.
+        //For these alone we skip if there is no equivalent output privilege
+        //TODO: Even this case should be handled to make sure we do not skip 
the privilege check if we did not build
+        //the output privileges correctly
+        throw new AuthorizationException("Required privilege( " + key.name() + 
") not available in output privileges");
+      }
+      found = false;
+    }
+
+  }
+
+  public void setActiveRoleSet(String activeRoleSet,
+      Set<TSentryRole> allowedRoles) throws SentryUserException {
+    this.activeRoleSet = parseActiveRoleSet(activeRoleSet, allowedRoles);
+    hiveConf.set(HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET, activeRoleSet);
+  }
+
+  public ActiveRoleSet getActiveRoleSet() {
+    return activeRoleSet;
+  }
+
+  public Set<String> getGroups(Subject subject) {
+    return authProvider.getGroupMapping().getGroups(subject.getName());
+  }
+
+  public Server getAuthServer() {
+    if (!open) {
+      throw new IllegalStateException("Binding has been closed");
+    }
+    return authServer;
+  }
+
+  public HiveAuthzConf getAuthzConf() {
+    return authzConf;
+  }
+
+  public HiveConf getHiveConf() {
+    return hiveConf;
+  }
+
+  private AuthorizableType getAuthzType (List<DBModelAuthorizable> hierarchy){
+    return hierarchy.get(hierarchy.size() -1).getAuthzType();
+  }
+
+  public List<String> getLastQueryPrivilegeErrors() {
+    if (!open) {
+      throw new IllegalStateException("Binding has been closed");
+    }
+    return authProvider.getLastFailedPrivileges();
+  }
+
+  public void close() {
+    authProvider.close();
+  }
+
+  public AuthorizationProvider getCurrentAuthProvider() {
+    return authProvider;
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivileges.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivileges.java
 
b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivileges.java
new file mode 100644
index 0000000..f164b30
--- /dev/null
+++ 
b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivileges.java
@@ -0,0 +1,153 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive.authz;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.sentry.core.model.db.DBModelAction;
+import org.apache.sentry.core.model.db.DBModelAuthorizable.AuthorizableType;
+
+/**
+ * Hive objects with required access privileges mapped to auth provider 
privileges
+ */
+public class HiveAuthzPrivileges {
+
+  /**
+   * Operation type used for privilege granting
+   */
+  public static enum HiveOperationType {
+    UNKNOWN,
+    DDL,
+    DML,
+    DATA_LOAD,
+    DATA_UNLOAD,
+    QUERY,
+    INFO
+  };
+
+  /**
+   * scope of the operation. The auth provider interface has different methods
+   * for some of these. Hence we want to be able to identity the auth scope of
+   * a statement eg. server level or DB level etc.
+   */
+  public static enum HiveOperationScope {
+    UNKNOWN,
+    SERVER,
+    DATABASE,
+    TABLE,
+    FUNCTION,
+    CONNECT,
+    COLUMN
+  }
+
+  public static enum HiveExtendedOperation {
+    TRANSFORM,
+    RESOURCE
+  }
+
+  public static class AuthzPrivilegeBuilder {
+    private final Map<AuthorizableType, EnumSet<DBModelAction>> 
inputPrivileges =
+        new HashMap<AuthorizableType ,EnumSet<DBModelAction>>();
+    private final Map<AuthorizableType,EnumSet<DBModelAction>> 
outputPrivileges =
+        new HashMap<AuthorizableType,EnumSet<DBModelAction>>();
+    private HiveOperationType operationType;
+    private HiveOperationScope operationScope;
+
+    public AuthzPrivilegeBuilder addInputObjectPriviledge(AuthorizableType 
inputObjectType, EnumSet<DBModelAction> inputPrivilege) {
+      inputPrivileges.put(inputObjectType, inputPrivilege);
+      return this;
+    }
+
+    public AuthzPrivilegeBuilder addOutputEntityPriviledge(AuthorizableType 
outputEntityType, EnumSet<DBModelAction> outputPrivilege) {
+      outputPrivileges.put(outputEntityType, outputPrivilege);
+      return this;
+    }
+
+    public AuthzPrivilegeBuilder addOutputObjectPriviledge(AuthorizableType 
outputObjectType, EnumSet<DBModelAction> outputPrivilege) {
+      outputPrivileges.put(outputObjectType, outputPrivilege);
+      return this;
+    }
+
+    public AuthzPrivilegeBuilder setOperationType(HiveOperationType 
operationType) {
+      this.operationType = operationType;
+      return this;
+    }
+
+    public AuthzPrivilegeBuilder setOperationScope(HiveOperationScope 
operationScope) {
+      this.operationScope = operationScope;
+      return this;
+    }
+
+    public HiveAuthzPrivileges build() {
+      if (operationScope.equals(HiveOperationScope.UNKNOWN)) {
+        throw new UnsupportedOperationException("Operation scope is not set");
+      }
+
+      if (operationType.equals(HiveOperationType.UNKNOWN)) {
+        throw new UnsupportedOperationException("Operation scope is not set");
+      }
+
+      return new HiveAuthzPrivileges(inputPrivileges, outputPrivileges, 
operationType, operationScope);
+    }
+  }
+
+  private final Map<AuthorizableType,EnumSet<DBModelAction>> inputPrivileges =
+      new HashMap<AuthorizableType,EnumSet<DBModelAction>>();
+  private final Map<AuthorizableType,EnumSet<DBModelAction>>  outputPrivileges 
=
+      new HashMap<AuthorizableType,EnumSet<DBModelAction>>();
+  private final HiveOperationType operationType;
+  private final HiveOperationScope operationScope;
+
+  protected HiveAuthzPrivileges(Map<AuthorizableType,EnumSet<DBModelAction>> 
inputPrivileges,
+      Map<AuthorizableType,EnumSet<DBModelAction>> outputPrivileges, 
HiveOperationType operationType,
+      HiveOperationScope operationScope) {
+    this.inputPrivileges.putAll(inputPrivileges);
+    this.outputPrivileges.putAll(outputPrivileges);
+    this.operationScope = operationScope;
+    this.operationType = operationType;
+  }
+
+  /**
+   * @return the inputPrivileges
+   */
+  public Map<AuthorizableType, EnumSet<DBModelAction>> getInputPrivileges() {
+    return inputPrivileges;
+  }
+
+  /**
+   * @return the outputPrivileges
+   */
+  public Map<AuthorizableType, EnumSet<DBModelAction>> getOutputPrivileges() {
+    return outputPrivileges;
+  }
+
+  /**
+   * @return the operationType
+   */
+  public HiveOperationType getOperationType() {
+    return operationType;
+  }
+
+  /**
+   * @return the operationScope
+   */
+  public HiveOperationScope getOperationScope() {
+    return operationScope;
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java
 
b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java
new file mode 100644
index 0000000..73b0941
--- /dev/null
+++ 
b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java
@@ -0,0 +1,622 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.binding.hive.authz;
+
+import java.security.CodeSource;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionGroup;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.commons.cli.Parser;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.log4j.Level;
+import org.apache.log4j.LogManager;
+import org.apache.sentry.Command;
+import org.apache.sentry.binding.hive.HiveAuthzBindingHookBase;
+import org.apache.sentry.binding.hive.SentryPolicyFileFormatFactory;
+import org.apache.sentry.binding.hive.SentryPolicyFileFormatter;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
+import org.apache.sentry.core.common.SentryConfigurationException;
+import org.apache.sentry.core.common.Subject;
+import org.apache.sentry.core.model.db.Server;
+import org.apache.sentry.provider.common.AuthorizationProvider;
+import org.apache.sentry.provider.db.service.thrift.SentryPolicyServiceClient;
+import org.apache.sentry.service.thrift.SentryServiceClientFactory;
+
+/**
+ * set the required system property to be read by HiveConf and AuthzConf
+ *
+ * @throws Exception
+ */
+// Hack, hiveConf doesn't provide a reliable way check if it found a valid
+// hive-site
+// load auth provider
+// get the configured sentry provider
+// validate policy files
+// import policy files
+public class SentryConfigTool {
+  private String sentrySiteFile = null;
+  private String policyFile = null;
+  private String query = null;
+  private String jdbcURL = null;
+  private String user = null;
+  private String passWord = null;
+  private String importPolicyFilePath = null;
+  private String exportPolicyFilePath = null;
+  private boolean listPrivs = false;
+  private boolean validate = false;
+  private boolean importOverwriteRole = false;
+  private HiveConf hiveConf = null;
+  private HiveAuthzConf authzConf = null;
+  private AuthorizationProvider sentryProvider = null;
+
+  public SentryConfigTool() {
+
+  }
+
+  public AuthorizationProvider getSentryProvider() {
+    return sentryProvider;
+  }
+
+  public void setSentryProvider(AuthorizationProvider sentryProvider) {
+    this.sentryProvider = sentryProvider;
+  }
+
+  public HiveConf getHiveConf() {
+    return hiveConf;
+  }
+
+  public void setHiveConf(HiveConf hiveConf) {
+    this.hiveConf = hiveConf;
+  }
+
+  public HiveAuthzConf getAuthzConf() {
+    return authzConf;
+  }
+
+  public void setAuthzConf(HiveAuthzConf authzConf) {
+    this.authzConf = authzConf;
+  }
+
+  public boolean isValidate() {
+    return validate;
+  }
+
+  public void setValidate(boolean validate) {
+    this.validate = validate;
+  }
+
+  public String getImportPolicyFilePath() {
+    return importPolicyFilePath;
+  }
+
+  public void setImportPolicyFilePath(String importPolicyFilePath) {
+    this.importPolicyFilePath = importPolicyFilePath;
+  }
+
+  public String getExportPolicyFilePath() {
+    return exportPolicyFilePath;
+  }
+
+  public void setExportPolicyFilePath(String exportPolicyFilePath) {
+    this.exportPolicyFilePath = exportPolicyFilePath;
+  }
+
+  public String getSentrySiteFile() {
+    return sentrySiteFile;
+  }
+
+  public void setSentrySiteFile(String sentrySiteFile) {
+    this.sentrySiteFile = sentrySiteFile;
+  }
+
+  public String getPolicyFile() {
+    return policyFile;
+  }
+
+  public void setPolicyFile(String policyFile) {
+    this.policyFile = policyFile;
+  }
+
+  public String getQuery() {
+    return query;
+  }
+
+  public void setQuery(String query) {
+    this.query = query;
+  }
+
+  public String getJdbcURL() {
+    return jdbcURL;
+  }
+
+  public void setJdbcURL(String jdbcURL) {
+    this.jdbcURL = jdbcURL;
+  }
+
+  public String getUser() {
+    return user;
+  }
+
+  public void setUser(String user) {
+    this.user = user;
+  }
+
+  public String getPassWord() {
+    return passWord;
+  }
+
+  public void setPassWord(String passWord) {
+    this.passWord = passWord;
+  }
+
+  public boolean isListPrivs() {
+    return listPrivs;
+  }
+
+  public void setListPrivs(boolean listPrivs) {
+    this.listPrivs = listPrivs;
+  }
+
+  public boolean isImportOverwriteRole() {
+    return importOverwriteRole;
+  }
+
+  public void setImportOverwriteRole(boolean importOverwriteRole) {
+    this.importOverwriteRole = importOverwriteRole;
+  }
+
+  /**
+   * set the required system property to be read by HiveConf and AuthzConf
+   * @throws Exception
+   */
+  public void setupConfig() throws Exception {
+    System.out.println("Configuration: ");
+    CodeSource src = SentryConfigTool.class.getProtectionDomain()
+        .getCodeSource();
+    if (src != null) {
+      System.out.println("Sentry package jar: " + src.getLocation());
+    }
+
+    if (getPolicyFile() != null) {
+      System.setProperty(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar(),
+          getPolicyFile());
+    }
+    System.setProperty(AuthzConfVars.SENTRY_TESTING_MODE.getVar(), "true");
+    setHiveConf(new HiveConf(SessionState.class));
+    getHiveConf().setVar(ConfVars.SEMANTIC_ANALYZER_HOOK,
+        HiveAuthzBindingHookBase.class.getName());
+    try {
+      System.out.println("Hive config: " + HiveConf.getHiveSiteLocation());
+    } catch (NullPointerException e) {
+      // Hack, hiveConf doesn't provide a reliable way check if it found a 
valid
+      // hive-site
+      throw new SentryConfigurationException("Didn't find a hive-site.xml");
+
+    }
+
+    if (getSentrySiteFile() != null) {
+      getHiveConf()
+          .set(HiveAuthzConf.HIVE_SENTRY_CONF_URL, getSentrySiteFile());
+    }
+
+    setAuthzConf(HiveAuthzConf.getAuthzConf(getHiveConf()));
+    System.out.println("Sentry config: "
+        + getAuthzConf().getHiveAuthzSiteFile());
+    System.out.println("Sentry Policy: "
+        + getAuthzConf().get(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar()));
+    System.out.println("Sentry server: "
+        + getAuthzConf().get(AuthzConfVars.AUTHZ_SERVER_NAME.getVar()));
+
+    setSentryProvider(getAuthorizationProvider());
+  }
+
+  // load auth provider
+  private AuthorizationProvider getAuthorizationProvider()
+      throws IllegalStateException, SentryConfigurationException {
+    String serverName = new Server(getAuthzConf().get(
+        AuthzConfVars.AUTHZ_SERVER_NAME.getVar())).getName();
+    // get the configured sentry provider
+    AuthorizationProvider sentryProvider = null;
+    try {
+      sentryProvider = HiveAuthzBinding.getAuthProvider(getHiveConf(),
+          authzConf, serverName);
+    } catch (SentryConfigurationException eC) {
+      printConfigErrors(eC);
+    } catch (Exception e) {
+      throw new IllegalStateException("Couldn't load sentry provider ", e);
+    }
+    return sentryProvider;
+  }
+
+  // validate policy files
+  public void validatePolicy() throws Exception {
+    try {
+      getSentryProvider().validateResource(true);
+    } catch (SentryConfigurationException e) {
+      printConfigErrors(e);
+    }
+    System.out.println("No errors found in the policy file");
+  }
+
+  // import the sentry mapping data to database
+  public void importPolicy() throws Exception {
+    String requestorUserName = System.getProperty("user.name", "");
+    // get the FileFormatter according to the configuration
+    SentryPolicyFileFormatter sentryPolicyFileFormatter = 
SentryPolicyFileFormatFactory
+        .createFileFormatter(authzConf);
+    // parse the input file, get the mapping data in map structure
+    Map<String, Map<String, Set<String>>> policyFileMappingData = 
sentryPolicyFileFormatter.parse(
+        importPolicyFilePath, authzConf);
+    // todo: here should be an validator to check the data's value, format, 
hierarchy
+    SentryPolicyServiceClient client = 
SentryServiceClientFactory.create(getAuthzConf());
+    // import the mapping data to database
+    client.importPolicy(policyFileMappingData, requestorUserName, 
importOverwriteRole);
+  }
+
+  // export the sentry mapping data to file
+  public void exportPolicy() throws Exception {
+    String requestorUserName = System.getProperty("user.name", "");
+    SentryPolicyServiceClient client = 
SentryServiceClientFactory.create(getAuthzConf());
+    // export the sentry mapping data from database to map structure
+    Map<String, Map<String, Set<String>>> policyFileMappingData = client
+        .exportPolicy(requestorUserName);
+    // get the FileFormatter according to the configuration
+    SentryPolicyFileFormatter sentryPolicyFileFormatter = 
SentryPolicyFileFormatFactory
+        .createFileFormatter(authzConf);
+    // write the sentry mapping data to exportPolicyFilePath with the data in 
map structure
+    sentryPolicyFileFormatter.write(exportPolicyFilePath, 
policyFileMappingData);
+  }
+
+  // list permissions for given user
+  public void listPrivs() throws Exception {
+    getSentryProvider().validateResource(true);
+    System.out.println("Available privileges for user " + getUser() + ":");
+    Set<String> permList = getSentryProvider().listPrivilegesForSubject(
+        new Subject(getUser()));
+    for (String perms : permList) {
+      System.out.println("\t" + perms);
+    }
+    if (permList.isEmpty()) {
+      System.out.println("\t*** No permissions available ***");
+    }
+  }
+
+  // Verify the given query
+  public void verifyLocalQuery(String queryStr) throws Exception {
+    // setup Hive driver
+    SessionState session = new SessionState(getHiveConf());
+    SessionState.start(session);
+    Driver driver = new Driver(session.getConf(), getUser());
+
+    // compile the query
+    CommandProcessorResponse compilerStatus = driver
+        .compileAndRespond(queryStr);
+    if (compilerStatus.getResponseCode() != 0) {
+      String errMsg = compilerStatus.getErrorMessage();
+      if (errMsg.contains(HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE)) {
+        printMissingPerms(getHiveConf().get(
+            HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS));
+      }
+      throw new SemanticException("Compilation error: "
+          + compilerStatus.getErrorMessage());
+    }
+    driver.close();
+    System.out
+        .println("User " + getUser() + " has privileges to run the query");
+  }
+
+  // connect to remote HS2 and run mock query
+  public void verifyRemoteQuery(String queryStr) throws Exception {
+    Class.forName("org.apache.hive.jdbc.HiveDriver");
+    Connection conn = DriverManager.getConnection(getJdbcURL(), getUser(),
+        getPassWord());
+    Statement stmt = conn.createStatement();
+    if (!isSentryEnabledOnHiveServer(stmt)) {
+      throw new IllegalStateException("Sentry is not enabled on HiveServer2");
+    }
+    stmt.execute("set " + HiveAuthzConf.HIVE_SENTRY_MOCK_COMPILATION + 
"=true");
+    try {
+      stmt.execute(queryStr);
+    } catch (SQLException e) {
+      String errMsg = e.getMessage();
+      if (errMsg.contains(HiveAuthzConf.HIVE_SENTRY_MOCK_ERROR)) {
+        System.out.println("User "
+            + readConfig(stmt, HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME)
+            + " has privileges to run the query");
+        return;
+      } else if (errMsg
+          .contains(HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE)) {
+        printMissingPerms(readConfig(stmt,
+            HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS));
+        throw e;
+      } else {
+        throw e;
+      }
+    } finally {
+      if (!stmt.isClosed()) {
+        stmt.close();
+      }
+      conn.close();
+    }
+
+  }
+
+  // verify senty session hook is set
+  private boolean isSentryEnabledOnHiveServer(Statement stmt)
+      throws SQLException {
+    String bindingString = readConfig(stmt, 
HiveConf.ConfVars.HIVE_SERVER2_SESSION_HOOK.varname).toUpperCase();
+    return 
bindingString.contains("org.apache.sentry.binding.hive".toUpperCase())
+        && bindingString.contains("HiveAuthzBindingSessionHook".toUpperCase());
+  }
+
+  // read a config value using 'set' statement
+  private String readConfig(Statement stmt, String configKey)
+      throws SQLException {
+    ResultSet res = stmt.executeQuery("set " + configKey);
+    if (!res.next()) {
+      return null;
+    }
+    // parse key=value result format
+    String result = res.getString(1);
+    res.close();
+    return result.substring(result.indexOf("=") + 1);
+  }
+
+  // print configuration/policy file errors and warnings
+  private void printConfigErrors(SentryConfigurationException configException)
+      throws SentryConfigurationException {
+    System.out.println(" *** Found configuration problems *** ");
+    for (String errMsg : configException.getConfigErrors()) {
+      System.out.println("ERROR: " + errMsg);
+    }
+    for (String warnMsg : configException.getConfigWarnings()) {
+      System.out.println("Warning: " + warnMsg);
+    }
+    throw configException;
+  }
+
+  // extract the authorization errors from config property and print
+  private void printMissingPerms(String errMsg) {
+    if (errMsg == null || errMsg.isEmpty()) {
+      return;
+    }
+    System.out.println("*** Query compilation failed ***");
+    String perms[] = errMsg.replaceFirst(
+        ".*" + HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE, "")
+        .split(";");
+    System.out.println("Required privileges for given query:");
+    for (int count = 0; count < perms.length; count++) {
+      System.out.println(" \t " + perms[count]);
+    }
+  }
+
+  // print usage
+  private void usage(Options sentryOptions) {
+    HelpFormatter formatter = new HelpFormatter();
+    formatter.printHelp("sentry --command config-tool", sentryOptions);
+    System.exit(-1);
+  }
+
+  /**
+   * parse arguments
+   *
+   * <pre>
+   *   -d,--debug                  Enable debug output
+   *   -e,--query <arg>            Query privilege verification, requires -u
+   *   -h,--help                   Print usage
+   *   -i,--policyIni <arg>        Policy file path
+   *   -j,--jdbcURL <arg>          JDBC URL
+   *   -l,--listPrivs,--listPerms  List privilges for given user, requires -u
+   *   -p,--password <arg>         Password
+   *   -s,--sentry-site <arg>      sentry-site file path
+   *   -u,--user <arg>             user name
+   *   -v,--validate               Validate policy file
+   *   -I,--import                 Import policy file
+   *   -E,--export                 Export policy file
+   *   -o,--overwrite              Overwrite the exist role data when do the 
import
+   * </pre>
+   *
+   * @param args
+   */
+  private void parseArgs(String[] args) {
+    boolean enableDebug = false;
+
+    Options sentryOptions = new Options();
+
+    Option helpOpt = new Option("h", "help", false, "Print usage");
+    helpOpt.setRequired(false);
+
+    Option validateOpt = new Option("v", "validate", false,
+        "Validate policy file");
+    validateOpt.setRequired(false);
+
+    Option queryOpt = new Option("e", "query", true,
+        "Query privilege verification, requires -u");
+    queryOpt.setRequired(false);
+
+    Option listPermsOpt = new Option("l", "listPerms", false,
+        "list permissions for given user, requires -u");
+    listPermsOpt.setRequired(false);
+    Option listPrivsOpt = new Option("listPrivs", false,
+        "list privileges for given user, requires -u");
+    listPrivsOpt.setRequired(false);
+
+    Option importOpt = new Option("I", "import", true,
+        "Import policy file");
+    importOpt.setRequired(false);
+
+    Option exportOpt = new Option("E", "export", true, "Export policy file");
+    exportOpt.setRequired(false);
+    // required args
+    OptionGroup sentryOptGroup = new OptionGroup();
+    sentryOptGroup.addOption(helpOpt);
+    sentryOptGroup.addOption(validateOpt);
+    sentryOptGroup.addOption(queryOpt);
+    sentryOptGroup.addOption(listPermsOpt);
+    sentryOptGroup.addOption(listPrivsOpt);
+    sentryOptGroup.addOption(importOpt);
+    sentryOptGroup.addOption(exportOpt);
+    sentryOptGroup.setRequired(true);
+    sentryOptions.addOptionGroup(sentryOptGroup);
+
+    // optional args
+    Option jdbcArg = new Option("j", "jdbcURL", true, "JDBC URL");
+    jdbcArg.setRequired(false);
+    sentryOptions.addOption(jdbcArg);
+
+    Option sentrySitePath = new Option("s", "sentry-site", true,
+        "sentry-site file path");
+    sentrySitePath.setRequired(false);
+    sentryOptions.addOption(sentrySitePath);
+
+    Option globalPolicyPath = new Option("i", "policyIni", true,
+        "Policy file path");
+    globalPolicyPath.setRequired(false);
+    sentryOptions.addOption(globalPolicyPath);
+
+    Option userOpt = new Option("u", "user", true, "user name");
+    userOpt.setRequired(false);
+    sentryOptions.addOption(userOpt);
+
+    Option passWordOpt = new Option("p", "password", true, "Password");
+    userOpt.setRequired(false);
+    sentryOptions.addOption(passWordOpt);
+
+    Option debugOpt = new Option("d", "debug", false, "enable debug output");
+    debugOpt.setRequired(false);
+    sentryOptions.addOption(debugOpt);
+
+    Option overwriteOpt = new Option("o", "overwrite", false, "enable import 
overwrite");
+    overwriteOpt.setRequired(false);
+    sentryOptions.addOption(overwriteOpt);
+
+    try {
+      Parser parser = new GnuParser();
+      CommandLine cmd = parser.parse(sentryOptions, args);
+
+      for (Option opt : cmd.getOptions()) {
+        if (opt.getOpt().equals("s")) {
+          setSentrySiteFile(opt.getValue());
+        } else if (opt.getOpt().equals("i")) {
+          setPolicyFile(opt.getValue());
+        } else if (opt.getOpt().equals("e")) {
+          setQuery(opt.getValue());
+        } else if (opt.getOpt().equals("j")) {
+          setJdbcURL(opt.getValue());
+        } else if (opt.getOpt().equals("u")) {
+          setUser(opt.getValue());
+        } else if (opt.getOpt().equals("p")) {
+          setPassWord(opt.getValue());
+        } else if (opt.getOpt().equals("l") || 
opt.getOpt().equals("listPrivs")) {
+          setListPrivs(true);
+        } else if (opt.getOpt().equals("v")) {
+          setValidate(true);
+        } else if (opt.getOpt().equals("I")) {
+          setImportPolicyFilePath(opt.getValue());
+        } else if (opt.getOpt().equals("E")) {
+          setExportPolicyFilePath(opt.getValue());
+        } else if (opt.getOpt().equals("h")) {
+          usage(sentryOptions);
+        } else if (opt.getOpt().equals("d")) {
+          enableDebug = true;
+        } else if (opt.getOpt().equals("o")) {
+          setImportOverwriteRole(true);
+        }
+      }
+
+      if (isListPrivs() && getUser() == null) {
+        throw new ParseException("Can't use -l without -u ");
+      }
+      if (getQuery() != null && getUser() == null) {
+        throw new ParseException("Must use -u with -e ");
+      }
+    } catch (ParseException e1) {
+      usage(sentryOptions);
+    }
+
+    if (!enableDebug) {
+      // turn off log
+      LogManager.getRootLogger().setLevel(Level.OFF);
+    }
+  }
+
+  public static class CommandImpl implements Command {
+    @Override
+    public void run(String[] args) throws Exception {
+      SentryConfigTool sentryTool = new SentryConfigTool();
+
+      try {
+        // parse arguments
+        sentryTool.parseArgs(args);
+
+        // load configuration
+        sentryTool.setupConfig();
+
+        // validate configuration
+        if (sentryTool.isValidate()) {
+          sentryTool.validatePolicy();
+        }
+
+        if (!StringUtils.isEmpty(sentryTool.getImportPolicyFilePath())) {
+          sentryTool.importPolicy();
+        }
+
+        if (!StringUtils.isEmpty(sentryTool.getExportPolicyFilePath())) {
+          sentryTool.exportPolicy();
+        }
+
+        // list permissions for give user
+        if (sentryTool.isListPrivs()) {
+          sentryTool.listPrivs();
+        }
+
+        // verify given query
+        if (sentryTool.getQuery() != null) {
+          if (sentryTool.getJdbcURL() != null) {
+            sentryTool.verifyRemoteQuery(sentryTool.getQuery());
+          } else {
+            sentryTool.verifyLocalQuery(sentryTool.getQuery());
+          }
+        }
+      } catch (Exception e) {
+        System.out.println("Sentry tool reported Errors: " + e.getMessage());
+        e.printStackTrace(System.out);
+        System.exit(1);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
 
b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
new file mode 100644
index 0000000..5a89af2
--- /dev/null
+++ 
b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
@@ -0,0 +1,269 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive.conf;
+
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+public class HiveAuthzConf extends Configuration {
+
+  /**
+   * Configuration key used in hive-site.xml to point at sentry-site.xml
+   */
+  public static final String HIVE_ACCESS_CONF_URL = "hive.access.conf.url";
+  public static final String HIVE_SENTRY_CONF_URL = "hive.sentry.conf.url";
+  public static final String HIVE_ACCESS_SUBJECT_NAME = 
"hive.access.subject.name";
+  public static final String HIVE_SENTRY_SUBJECT_NAME = 
"hive.sentry.subject.name";
+  public static final String HIVE_SENTRY_AUTH_ERRORS = 
"sentry.hive.authorization.errors";
+  public static final String HIVE_SENTRY_MOCK_COMPILATION = 
"sentry.hive.mock.compilation";
+  public static final String HIVE_SENTRY_MOCK_ERROR = "sentry.hive.mock.error";
+  public static final String HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE = "No valid 
privileges";
+  /**
+   * Property used to persist the role set in the session. This is not public 
for now.
+   */
+  public static final String SENTRY_ACTIVE_ROLE_SET = 
"hive.sentry.active.role.set";
+
+  public static final String HIVE_SENTRY_SECURITY_COMMAND_WHITELIST =
+      "hive.sentry.security.command.whitelist";
+  public static final String HIVE_SENTRY_SECURITY_COMMAND_WHITELIST_DEFAULT =
+      "set,reset,reload";
+
+  public static final String HIVE_SENTRY_SERDE_WHITELIST = 
"hive.sentry.serde.whitelist";
+  public static final String HIVE_SENTRY_SERDE_WHITELIST_DEFAULT = 
"org.apache.hadoop.hive.serde2";
+
+  // Disable the serde Uri privileges by default for backward compatibilities.
+  public static final String HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED = 
"hive.sentry.turn.on.serde.uri.privileges";
+  public static final boolean 
HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED_DEFAULT = false;
+
+  public static final String HIVE_UDF_WHITE_LIST =
+      
"concat,substr,substring,space,repeat,ascii,lpad,rpad,size,round,floor,sqrt,ceil,"
 +
+          
"ceiling,rand,abs,pmod,ln,log2,sin,asin,cos,acos,log10,log,exp,power,pow,sign,pi,"
 +
+          
"degrees,radians,atan,tan,e,conv,bin,hex,unhex,base64,unbase64,encode,decode,upper,"
 +
+          
"lower,ucase,lcase,trim,ltrim,rtrim,length,reverse,field,find_in_set,initcap,like,"
 +
+          
"rlike,regexp,regexp_replace,regexp_extract,parse_url,nvl,split,str_to_map,translate"
 +
+          
",positive,negative,day,dayofmonth,month,year,hour,minute,second,from_unixtime,"
 +
+          
"to_date,weekofyear,last_day,date_add,date_sub,datediff,add_months,get_json_object,"
 +
+          
"xpath_string,xpath_boolean,xpath_number,xpath_double,xpath_float,xpath_long," +
+          
"xpath_int,xpath_short,xpath,+,-,*,/,%,div,&,|,^,~,current_database,isnull," +
+          
"isnotnull,if,in,and,or,=,==,<=>,!=,<>,<,<=,>,>=,not,!,between,ewah_bitmap_and,"
 +
+          
"ewah_bitmap_or,ewah_bitmap_empty,boolean,tinyint,smallint,int,bigint,float,double,"
 +
+          
"string,date,timestamp,binary,decimal,varchar,char,max,min,sum,count,avg,std,stddev,"
 +
+          
"stddev_pop,stddev_samp,variance,var_pop,var_samp,covar_pop,covar_samp,corr," +
+          
"histogram_numeric,percentile_approx,collect_set,collect_list,ngrams," +
+          "context_ngrams,ewah_bitmap,compute_stats,percentile," +
+          
"array,assert_true,map,struct,named_struct,create_union,case,when,hash,coalesce,"
 +
+          "index,in_file,instr,locate,elt,concat_ws,sort_array," +
+          
"array_contains,sentences,map_keys,map_values,format_number,printf,greatest,least,"
 +
+          
"from_utc_timestamp,to_utc_timestamp,unix_timestamp,to_unix_timestamp,explode," 
+
+          
"inline,json_tuple,parse_url_tuple,posexplode,stack,lead,lag,row_number,rank," +
+          
"dense_rank,percent_rank,cume_dist,ntile,first_value,last_value,noop,noopwithmap,"
 +
+          
"noopstreaming,noopwithmapstreaming,windowingtablefunction,matchpath";
+
+  public static final String HIVE_UDF_BLACK_LIST = 
"reflect,reflect2,java_method";
+
+  /**
+   * Config setting definitions
+   */
+  public static enum AuthzConfVars {
+    AUTHZ_PROVIDER("sentry.provider",
+      
"org.apache.sentry.provider.common.HadoopGroupResourceAuthorizationProvider"),
+    AUTHZ_PROVIDER_RESOURCE("sentry.hive.provider.resource", ""),
+    AUTHZ_PROVIDER_BACKEND("sentry.hive.provider.backend", 
"org.apache.sentry.provider.file.SimpleFileProviderBackend"),
+    AUTHZ_POLICY_ENGINE("sentry.hive.policy.engine", 
"org.apache.sentry.policy.db.SimpleDBPolicyEngine"),
+    AUTHZ_POLICY_FILE_FORMATTER(
+        "sentry.hive.policy.file.formatter",
+        "org.apache.sentry.binding.hive.SentryIniPolicyFileFormatter"),
+    AUTHZ_SERVER_NAME("sentry.hive.server", ""),
+    AUTHZ_RESTRICT_DEFAULT_DB("sentry.hive.restrict.defaultDB", "false"),
+    SENTRY_TESTING_MODE("sentry.hive.testing.mode", "false"),
+    AUTHZ_ALLOW_HIVE_IMPERSONATION("sentry.hive.allow.hive.impersonation", 
"false"),
+    AUTHZ_ONFAILURE_HOOKS("sentry.hive.failure.hooks", ""),
+    AUTHZ_METASTORE_SERVICE_USERS("sentry.metastore.service.users", null),
+    AUTHZ_SYNC_ALTER_WITH_POLICY_STORE("sentry.hive.sync.alter", "true"),
+    AUTHZ_SYNC_CREATE_WITH_POLICY_STORE("sentry.hive.sync.create", "false"),
+    AUTHZ_SYNC_DROP_WITH_POLICY_STORE("sentry.hive.sync.drop", "true"),
+
+    AUTHZ_PROVIDER_DEPRECATED("hive.sentry.provider",
+      "org.apache.sentry.provider.file.ResourceAuthorizationProvider"),
+    AUTHZ_PROVIDER_RESOURCE_DEPRECATED("hive.sentry.provider.resource", ""),
+    AUTHZ_SERVER_NAME_DEPRECATED("hive.sentry.server", ""),
+    AUTHZ_RESTRICT_DEFAULT_DB_DEPRECATED("hive.sentry.restrict.defaultDB", 
"false"),
+    SENTRY_TESTING_MODE_DEPRECATED("hive.sentry.testing.mode", "false"),
+    
AUTHZ_ALLOW_HIVE_IMPERSONATION_DEPRECATED("hive.sentry.allow.hive.impersonation",
 "false"),
+    AUTHZ_ONFAILURE_HOOKS_DEPRECATED("hive.sentry.failure.hooks", "");
+
+    private final String varName;
+    private final String defaultVal;
+
+    AuthzConfVars(String varName, String defaultVal) {
+      this.varName = varName;
+      this.defaultVal = defaultVal;
+    }
+
+    public String getVar() {
+      return varName;
+    }
+
+    public String getDefault() {
+      return defaultVal;
+    }
+
+    public static String getDefault(String varName) {
+      for (AuthzConfVars oneVar : AuthzConfVars.values()) {
+        if(oneVar.getVar().equalsIgnoreCase(varName)) {
+          return oneVar.getDefault();
+        }
+      }
+      return null;
+    }
+  }
+
+  // map of current property names - > deprecated property names.
+  // The binding layer code should work if the deprecated property names are 
provided,
+  // as long as the new property names aren't also provided.  Since the 
binding code
+  // only calls the new property names, we require a map from current names to 
deprecated
+  // names in order to check if the deprecated name of a property was set.
+  private static final Map<String, AuthzConfVars> currentToDeprecatedProps =
+      new HashMap<String, AuthzConfVars>();
+  static {
+    currentToDeprecatedProps.put(AuthzConfVars.AUTHZ_PROVIDER.getVar(), 
AuthzConfVars.AUTHZ_PROVIDER_DEPRECATED);
+    
currentToDeprecatedProps.put(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar(), 
AuthzConfVars.AUTHZ_PROVIDER_RESOURCE_DEPRECATED);
+    currentToDeprecatedProps.put(AuthzConfVars.AUTHZ_SERVER_NAME.getVar(), 
AuthzConfVars.AUTHZ_SERVER_NAME_DEPRECATED);
+    
currentToDeprecatedProps.put(AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), 
AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB_DEPRECATED);
+    currentToDeprecatedProps.put(AuthzConfVars.SENTRY_TESTING_MODE.getVar(), 
AuthzConfVars.SENTRY_TESTING_MODE_DEPRECATED);
+    
currentToDeprecatedProps.put(AuthzConfVars.AUTHZ_ALLOW_HIVE_IMPERSONATION.getVar(),
 AuthzConfVars.AUTHZ_ALLOW_HIVE_IMPERSONATION_DEPRECATED);
+    currentToDeprecatedProps.put(AuthzConfVars.AUTHZ_ONFAILURE_HOOKS.getVar(), 
AuthzConfVars.AUTHZ_ONFAILURE_HOOKS_DEPRECATED);
+  };
+
+  private static final Logger LOG = LoggerFactory
+      .getLogger(HiveAuthzConf.class);
+  public static final String AUTHZ_SITE_FILE = "sentry-site.xml";
+  private final String hiveAuthzSiteFile;
+
+  public HiveAuthzConf(URL hiveAuthzSiteURL) {
+    super();
+    LOG.info("DefaultFS: " + super.get("fs.defaultFS"));
+    addResource(hiveAuthzSiteURL);
+    applySystemProperties();
+    LOG.info("DefaultFS: " + super.get("fs.defaultFS"));
+    this.hiveAuthzSiteFile = hiveAuthzSiteURL.toString();
+  }
+  /**
+   * Apply system properties to this object if the property name is defined in 
ConfVars
+   * and the value is non-null and not an empty string.
+   */
+  private void applySystemProperties() {
+    Map<String, String> systemProperties = getConfSystemProperties();
+    for (Entry<String, String> systemProperty : systemProperties.entrySet()) {
+      this.set(systemProperty.getKey(), systemProperty.getValue());
+    }
+  }
+
+  /**
+   * This method returns a mapping from config variable name to its value for 
all config variables
+   * which have been set using System properties
+   */
+  public static Map<String, String> getConfSystemProperties() {
+    Map<String, String> systemProperties = new HashMap<String, String>();
+
+    for (AuthzConfVars oneVar : AuthzConfVars.values()) {
+      String value = System.getProperty(oneVar.getVar());
+      if (value != null && value.length() > 0) {
+        systemProperties.put(oneVar.getVar(), value);
+      }
+    }
+    return systemProperties;
+  }
+
+  @Override
+  public String get(String varName) {
+    return get(varName, null);
+  }
+
+  @Override
+  public String get(String varName, String defaultVal) {
+    String retVal = super.get(varName);
+    if (retVal == null) {
+      // check if the deprecated value is set here
+      if (currentToDeprecatedProps.containsKey(varName)) {
+        retVal = super.get(currentToDeprecatedProps.get(varName).getVar());
+      }
+      if (retVal == null) {
+        retVal = AuthzConfVars.getDefault(varName);
+      } else {
+        LOG.warn("Using the deprecated config setting " + 
currentToDeprecatedProps.get(varName).getVar() +
+            " instead of " + varName);
+      }
+    }
+    if (retVal == null) {
+      retVal = defaultVal;
+    }
+    return retVal;
+  }
+
+  public String getHiveAuthzSiteFile() {
+    return hiveAuthzSiteFile;
+  }
+
+  /**
+   * Extract the authz config file path from given hive conf and load the 
authz config
+   * @param hiveConf
+   * @return
+   * @throws IllegalArgumentException
+   */
+  public static HiveAuthzConf getAuthzConf(HiveConf hiveConf)
+    throws IllegalArgumentException {
+    boolean depreicatedConfigFile = false;
+
+    String hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
+    if (hiveAuthzConf == null
+        || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
+      hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_ACCESS_CONF_URL);
+      depreicatedConfigFile = true;
+    }
+
+    if (hiveAuthzConf == null
+        || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
+      throw new IllegalArgumentException("Configuration key "
+          + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf
+          + "' is invalid.");
+    }
+
+    try {
+      return new HiveAuthzConf(new URL(hiveAuthzConf));
+    } catch (MalformedURLException e) {
+      if (depreicatedConfigFile) {
+        throw new IllegalArgumentException("Configuration key "
+            + HiveAuthzConf.HIVE_ACCESS_CONF_URL
+            + " specifies a malformed URL '" + hiveAuthzConf + "'", e);
+      } else {
+        throw new IllegalArgumentException("Configuration key "
+            + HiveAuthzConf.HIVE_SENTRY_CONF_URL
+            + " specifies a malformed URL '" + hiveAuthzConf + "'", e);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/conf/InvalidConfigurationException.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/conf/InvalidConfigurationException.java
 
b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/conf/InvalidConfigurationException.java
new file mode 100644
index 0000000..b658922
--- /dev/null
+++ 
b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/conf/InvalidConfigurationException.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive.conf;
+
+public class InvalidConfigurationException extends Exception
+{
+  private static final long serialVersionUID = 1L;
+
+  //Parameterless Constructor
+  public InvalidConfigurationException() {}
+
+  //Constructor that accepts a message
+  public InvalidConfigurationException(String message)
+  {
+    super(message);
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStoreBase.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStoreBase.java
 
b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStoreBase.java
new file mode 100644
index 0000000..196bd2b
--- /dev/null
+++ 
b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStoreBase.java
@@ -0,0 +1,412 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.binding.metastore;
+
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.List;
+import java.util.Set;
+
+import javax.security.auth.login.LoginException;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.ObjectStore;
+import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Index;
+import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.UnknownDBException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.shims.Utils;
+import org.apache.sentry.binding.hive.HiveAuthzBindingHookBase;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
+
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+
+/***
+ * This class is the wrapper of ObjectStore which is the interface between the
+ * application logic and the database store. Do the authorization or filter the
+ * result when processing the metastore request.
+ * eg:
+ * Callers will only receive the objects back which they have privileges to
+ * access.
+ * If there is a request for the object list(like getAllTables()), the result
+ * will be filtered to exclude object the requestor doesn't have privilege to
+ * access.
+ */
+public class AuthorizingObjectStoreBase extends ObjectStore {
+  private static ImmutableSet<String> serviceUsers;
+  private static HiveConf hiveConf;
+  private static HiveAuthzConf authzConf;
+  private static HiveAuthzBinding hiveAuthzBinding;
+  private static String NO_ACCESS_MESSAGE_TABLE = "Table does not exist or 
insufficient privileges to access: ";
+  private static String NO_ACCESS_MESSAGE_DATABASE = "Database does not exist 
or insufficient privileges to access: ";
+
+  @Override
+  public List<String> getDatabases(String pattern) throws MetaException {
+    return filterDatabases(super.getDatabases(pattern));
+  }
+
+  @Override
+  public List<String> getAllDatabases() throws MetaException {
+    return filterDatabases(super.getAllDatabases());
+  }
+
+  @Override
+  public Database getDatabase(String name) throws NoSuchObjectException {
+    Database db = super.getDatabase(name);
+    try {
+      if (filterDatabases(Lists.newArrayList(name)).isEmpty()) {
+        throw new NoSuchObjectException(getNoAccessMessageForDB(name));
+      }
+    } catch (MetaException e) {
+      throw new NoSuchObjectException("Failed to authorized access to " + name
+          + " : " + e.getMessage());
+    }
+    return db;
+  }
+
+  @Override
+  public Table getTable(String dbName, String tableName) throws MetaException {
+    Table table = super.getTable(dbName, tableName);
+    if (table == null
+        || filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+      return null;
+    }
+    return table;
+  }
+
+  @Override
+  public Partition getPartition(String dbName, String tableName,
+      List<String> part_vals) throws MetaException, NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+      throw new NoSuchObjectException(getNoAccessMessageForTable(dbName, 
tableName));
+    }
+    return super.getPartition(dbName, tableName, part_vals);
+  }
+
+  @Override
+  public List<Partition> getPartitions(String dbName, String tableName,
+      int maxParts) throws MetaException, NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tableName));
+    }
+    return super.getPartitions(dbName, tableName, maxParts);
+  }
+
+  @Override
+  public List<String> getTables(String dbName, String pattern)
+      throws MetaException {
+    return filterTables(dbName, super.getTables(dbName, pattern));
+  }
+ 
+  @Override
+  public List<Table> getTableObjectsByName(String dbname, List<String> 
tableNames)
+      throws MetaException, UnknownDBException {
+    return super.getTableObjectsByName(dbname, filterTables(dbname, 
tableNames));
+  }
+
+  @Override
+  public List<String> getAllTables(String dbName) throws MetaException {
+    return filterTables(dbName, super.getAllTables(dbName));
+  }
+
+  @Override
+  public List<String> listTableNamesByFilter(String dbName, String filter,
+      short maxTables) throws MetaException {
+    return filterTables(dbName,
+        super.listTableNamesByFilter(dbName, filter, maxTables));
+  }
+
+  @Override
+  public List<String> listPartitionNames(String dbName, String tableName,
+      short max_parts) throws MetaException {
+    if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tableName));
+    }
+    return super.listPartitionNames(dbName, tableName, max_parts);
+  }
+
+  @Override
+  public List<String> listPartitionNamesByFilter(String dbName,
+      String tableName, String filter, short max_parts) throws MetaException {
+    if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tableName));
+    }
+    return super.listPartitionNamesByFilter(dbName, tableName, filter,
+        max_parts);
+  }
+
+  @Override
+  public Index getIndex(String dbName, String origTableName, String indexName)
+      throws MetaException {
+    if (filterTables(dbName, Lists.newArrayList(origTableName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, 
origTableName));
+    }
+    return super.getIndex(dbName, origTableName, indexName);
+  }
+
+  @Override
+  public List<Index> getIndexes(String dbName, String origTableName, int max)
+      throws MetaException {
+    if (filterTables(dbName, Lists.newArrayList(origTableName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, 
origTableName));
+    }
+    return super.getIndexes(dbName, origTableName, max);
+  }
+
+  @Override
+  public List<String> listIndexNames(String dbName, String origTableName,
+      short max) throws MetaException {
+    if (filterTables(dbName, Lists.newArrayList(origTableName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, 
origTableName));
+    }
+    return super.listIndexNames(dbName, origTableName, max);
+  }
+
+  @Override
+  public List<Partition> getPartitionsByFilter(String dbName,
+      String tblName, String filter, short maxParts) throws MetaException,
+      NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+    }
+    return super.getPartitionsByFilter(dbName, tblName, filter, maxParts);
+  }
+
+  @Override
+  public List<Partition> getPartitionsByNames(String dbName, String tblName,
+      List<String> partNames) throws MetaException, NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+    }
+    return super.getPartitionsByNames(dbName, tblName, partNames);
+  }
+
+  @Override
+  public Partition getPartitionWithAuth(String dbName, String tblName,
+      List<String> partVals, String user_name, List<String> group_names)
+      throws MetaException, NoSuchObjectException, InvalidObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+    }
+    return super.getPartitionWithAuth(dbName, tblName, partVals, user_name,
+        group_names);
+  }
+
+  @Override
+  public List<Partition> getPartitionsWithAuth(String dbName, String tblName,
+      short maxParts, String userName, List<String> groupNames)
+      throws MetaException, NoSuchObjectException, InvalidObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+    }
+    return super.getPartitionsWithAuth(dbName, tblName, maxParts, userName,
+        groupNames);
+  }
+
+  @Override
+  public List<String> listPartitionNamesPs(String dbName, String tblName,
+      List<String> part_vals, short max_parts) throws MetaException,
+      NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+    }
+    return super.listPartitionNamesPs(dbName, tblName, part_vals, max_parts);
+  }
+
+  @Override
+  public List<Partition> listPartitionsPsWithAuth(String dbName,
+      String tblName, List<String> part_vals, short max_parts, String userName,
+      List<String> groupNames) throws MetaException, InvalidObjectException,
+      NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+    }
+    return super.listPartitionsPsWithAuth(dbName, tblName, part_vals,
+        max_parts, userName, groupNames);
+  }
+
+  @Override
+  public ColumnStatistics getTableColumnStatistics(String dbName,
+      String tableName, List<String> colNames) throws MetaException,
+      NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tableName));
+    }
+    return super.getTableColumnStatistics(dbName, tableName, colNames);
+  }
+
+  @Override
+  public List<ColumnStatistics> getPartitionColumnStatistics(
+      String dbName, String tblName, List<String> partNames,
+      List<String> colNames) throws MetaException, NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+    }
+    return super.getPartitionColumnStatistics(dbName, tblName, partNames,
+        colNames);
+  }
+
+  /**
+   * Invoke Hive database filtering that removes the entries which use has no
+   * privileges to access
+   * @param dbList
+   * @return
+   * @throws MetaException
+   */
+  private List<String> filterDatabases(List<String> dbList)
+      throws MetaException {
+    if (needsAuthorization(getUserName())) {
+      try {
+        return 
HiveAuthzBindingHookBase.filterShowDatabases(getHiveAuthzBinding(),
+            dbList, HiveOperation.SHOWDATABASES, getUserName());
+      } catch (SemanticException e) {
+        throw new MetaException("Error getting DB list " + e.getMessage());
+      }
+    } else {
+      return dbList;
+    }
+  }
+
+  /**
+   * Invoke Hive table filtering that removes the entries which use has no
+   * privileges to access
+   * @param dbList
+   * @return
+   * @throws MetaException
+   */
+  protected List<String> filterTables(String dbName, List<String> tabList)
+      throws MetaException {
+    if (needsAuthorization(getUserName())) {
+      try {
+        return HiveAuthzBindingHookBase.filterShowTables(getHiveAuthzBinding(),
+            tabList, HiveOperation.SHOWTABLES, getUserName(), dbName);
+      } catch (SemanticException e) {
+        throw new MetaException("Error getting Table list " + e.getMessage());
+      }
+    } else {
+      return tabList;
+    }
+  }
+
+  /**
+   * load Hive auth provider
+   *
+   * @return
+   * @throws MetaException
+   */
+  private HiveAuthzBinding getHiveAuthzBinding() throws MetaException {
+    if (hiveAuthzBinding == null) {
+      try {
+        hiveAuthzBinding = new 
HiveAuthzBinding(HiveAuthzBinding.HiveHook.HiveMetaStore,
+            getHiveConf(), getAuthzConf());
+      } catch (Exception e) {
+        throw new MetaException("Failed to load Hive binding " + 
e.getMessage());
+      }
+    }
+    return hiveAuthzBinding;
+  }
+
+  private ImmutableSet<String> getServiceUsers() throws MetaException {
+    if (serviceUsers == null) {
+      serviceUsers = 
ImmutableSet.copyOf(toTrimed(Sets.newHashSet(getAuthzConf().getStrings(
+          AuthzConfVars.AUTHZ_METASTORE_SERVICE_USERS.getVar(), new String[] { 
"" }))));
+    }
+    return serviceUsers;
+  }
+
+  private HiveConf getHiveConf() {
+    if (hiveConf == null) {
+      hiveConf = new HiveConf(getConf(), this.getClass());
+    }
+    return hiveConf;
+  }
+
+  private HiveAuthzConf getAuthzConf() throws MetaException {
+    if (authzConf == null) {
+      String hiveAuthzConf = getConf().get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
+      if (hiveAuthzConf == null
+          || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
+        throw new MetaException("Configuration key "
+            + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf
+            + "' is invalid.");
+      }
+      try {
+        authzConf = new HiveAuthzConf(new URL(hiveAuthzConf));
+      } catch (MalformedURLException e) {
+        throw new MetaException("Configuration key "
+            + HiveAuthzConf.HIVE_SENTRY_CONF_URL
+            + " specifies a malformed URL '" + hiveAuthzConf + "' "
+            + e.getMessage());
+      }
+    }
+    return authzConf;
+  }
+
+  /**
+   * Extract the user from underlying auth subsystem
+   * @return
+   * @throws MetaException
+   */
+  private String getUserName() throws MetaException {
+    try {
+      return Utils.getUGI().getShortUserName();
+    } catch (LoginException e) {
+      throw new MetaException("Failed to get username " + e.getMessage());
+    } catch (IOException e) {
+      throw new MetaException("Failed to get username " + e.getMessage());
+    }
+  }
+
+  /**
+   * Check if the give user needs to be validated.
+   * @param userName
+   * @return
+   */
+  private boolean needsAuthorization(String userName) throws MetaException {
+    return !getServiceUsers().contains(userName.trim());
+  }
+
+  private static Set<String> toTrimed(Set<String> s) {
+    Set<String> result = Sets.newHashSet();
+    for (String v : s) {
+      result.add(v.trim());
+    }
+    return result;
+  }
+
+  protected String getNoAccessMessageForTable(String dbName, String tableName) 
{
+    return NO_ACCESS_MESSAGE_TABLE + "<" + dbName + ">.<" + tableName + ">";
+  }
+
+  private String getNoAccessMessageForDB(String dbName) {
+    return NO_ACCESS_MESSAGE_DATABASE + "<" + dbName + ">";
+  }
+}

Reply via email to