Change Hive version to Apache Hive 0.15 for authorization V2 (Prasad Mujumdar, 
reviewed by Lenni Kuff)


Project: http://git-wip-us.apache.org/repos/asf/incubator-sentry/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-sentry/commit/66eef78f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-sentry/tree/66eef78f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-sentry/diff/66eef78f

Branch: refs/heads/master
Commit: 66eef78f2b3207c72cfb399918d0d09a9c3f72c8
Parents: d3ff8d0
Author: Prasad Mujumdar <pras...@cloudera.com>
Authored: Fri Dec 12 18:21:23 2014 -0800
Committer: Prasad Mujumdar <pras...@cloudera.com>
Committed: Fri Dec 12 18:21:23 2014 -0800

----------------------------------------------------------------------
 pom.xml                                         |  53 ++++-
 .../ql/exec/SentryHivePrivilegeObjectDesc.java  |   9 -
 .../binding/hive/HiveAuthzBindingHook.java      |  41 ++--
 .../hive/HiveAuthzBindingSessionHook.java       |   9 +-
 .../SentryHiveAuthorizationTaskFactoryImpl.java |  36 ++--
 .../hive/authz/HiveAuthzPrivilegesMap.java      |   6 +-
 .../binding/hive/authz/SentryConfigTool.java    |   2 +-
 .../sentry/binding/hive/conf/HiveAuthzConf.java |   4 +-
 .../metastore/AuthorizingObjectStore.java       |   4 +-
 .../metastore/MetastoreAuthzBinding.java        |   4 +-
 .../metastore/SentryMetaStoreFilterHook.java    | 196 +++++++++++++++++++
 .../binding/hive/TestHiveAuthzBindingHook.java  |  86 --------
 .../binding/hive/TestHiveAuthzBindings.java     |   4 +-
 .../TestSentryHiveAuthorizationTaskFactory.java |  46 +++++
 .../org/apache/sentry/hdfs/MetastorePlugin.java |   4 -
 sentry-provider/sentry-provider-db/.gitignore   |   1 +
 sentry-tests/sentry-tests-hive/pom.xml          | 109 +++++------
 .../apache/sentry/tests/e2e/hive/Context.java   |   8 +-
 .../e2e/hive/TestMetadataObjectRetrieval.java   |   9 +-
 .../sentry/tests/e2e/hive/TestOperations.java   |  10 +-
 .../e2e/hive/TestPrivilegesAtFunctionScope.java |   8 +-
 .../e2e/hive/TestPrivilegesAtTableScope.java    |  25 +--
 .../sentry/tests/e2e/hive/fs/MiniDFS.java       |  10 +-
 .../e2e/hive/hiveserver/HiveServerFactory.java  |  20 +-
 .../e2e/hive/hiveserver/InternalHiveServer.java |   9 +-
 .../hiveserver/InternalMetastoreServer.java     |   5 -
 ...actMetastoreTestWithStaticConfiguration.java |   3 +-
 .../metastore/TestAuthorizingObjectStore.java   |  85 ++++----
 .../e2e/metastore/TestMetaStoreWithPigHCat.java |   6 +
 sentry-tests/sentry-tests-hive/testutil/hadoop  | 107 ++++++++++
 30 files changed, 604 insertions(+), 315 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 78859be..49bcf98 100644
--- a/pom.xml
+++ b/pom.xml
@@ -67,25 +67,28 @@ limitations under the License.
     <jdo-api.version>3.0.1</jdo-api.version>
     <derby.version>10.10.2.0</derby.version>
     <commons-cli.version>1.2</commons-cli.version>
-    <hive.version>0.13.1-cdh5.3.0-SNAPSHOT</hive.version>
+    <hive.version>0.15.0-SNAPSHOT</hive.version>
     <hadoop.version>2.5.0-cdh5.2.0-SNAPSHOT</hadoop.version>
     <fest.reflect.version>1.4.1</fest.reflect.version>
     <guava.version>11.0.2</guava.version>
     <junit.version>4.9</junit.version>
-    <libthrift.version>0.9.0-cdh4-1</libthrift.version>
-    <libfb303.version>0.9.0</libfb303.version>
+    <libthrift.version>0.9.2</libthrift.version>
+    <libfb303.version>0.9.2</libfb303.version>
+    <curator.version>2.6.0</curator.version>
     <log4j.version>1.2.16</log4j.version>
     <mockito.version>1.8.5</mockito.version>
     <shiro.version>1.2.1</shiro.version>
     <slf4j.version>1.6.1</slf4j.version>
     <solr.version>4.10.2</solr.version>
     
<solr.sentry.handlers.version>4.4.0-cdh5.2.0-SNAPSHOT</solr.sentry.handlers.version>
-    <zookeeper.version>3.4.5-cdh5.1.0-SNAPSHOT</zookeeper.version>
-    <pig.version>0.12.0-cdh5.1.0-SNAPSHOT</pig.version>
+    <zookeeper.version>3.4.5</zookeeper.version>
+    <pig.version>0.12.0</pig.version>
     <jackson.version>1.8.8</jackson.version>
     <metrics.version>3.1.0</metrics.version>
     <jettyVersion>7.6.16.v20140903</jettyVersion>
     <curator.version>2.6.0</curator.version>
+    <joda-time.version>2.5</joda-time.version>
+    
<test.sentry.hadoop.classpath>${maven.test.classpath}</test.sentry.hadoop.classpath>
   </properties>
 
   <dependencyManagement>
@@ -436,6 +439,27 @@ limitations under the License.
         <artifactId>curator-test</artifactId>
         <version>${curator.version}</version>
       </dependency>
+      <dependency>
+        <groupId>org.apache.curator</groupId>
+        <artifactId>curator-framework</artifactId>
+        <version>${curator.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.zookeeper</groupId>
+        <artifactId>zookeeper</artifactId>
+        <version>${zookeeper.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>org.jboss.netty</groupId>
+            <artifactId>netty</artifactId>
+          </exclusion>
+        </exclusions>
+      </dependency>
+      <dependency>
+        <groupId>joda-time</groupId>
+        <artifactId>joda-time</artifactId>
+        <version>${joda-time.version}</version>
+      </dependency>
     </dependencies>
   </dependencyManagement>
 
@@ -471,6 +495,25 @@ limitations under the License.
           
<sourceIncludes><include>src/gen/thrift/gen-javabean/**</include></sourceIncludes>
         </configuration>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>define-classpath</id>
+            <phase>process-resources</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <exportAntProperties>true</exportAntProperties>
+              <target>
+                <property name="maven.test.classpath" 
refid="maven.test.classpath"/>
+              </target>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
 
 <!---
       <plugin>

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java
 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java
index 0c59abb..18cdde2 100644
--- 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java
+++ 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java
@@ -25,7 +25,6 @@ import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc;
 public class SentryHivePrivilegeObjectDesc extends PrivilegeObjectDesc {
   private boolean isUri;
   private boolean isServer;
-  private List<String> columns = new ArrayList<String>();
 
   public SentryHivePrivilegeObjectDesc() {
     // reset table type which is on by default
@@ -48,12 +47,4 @@ public class SentryHivePrivilegeObjectDesc extends 
PrivilegeObjectDesc {
     this.isServer = isServer;
   }
 
-  public List<String> getColumns() {
-    return columns;
-  }
-
-  public void setColumns(List<String> columns) {
-    this.columns = columns;
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java
 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java
index 97ef3b8..d096551 100644
--- 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java
+++ 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java
@@ -31,6 +31,7 @@ import java.util.Set;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.SentryGrantRevokeTask;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.hooks.Entity;
@@ -78,6 +79,8 @@ public class HiveAuthzBindingHook extends 
AbstractSemanticAnalyzerHook {
   private Table currTab;
   private AccessURI udfURI;
   private AccessURI partitionURI;
+  private Table currOutTab = null;
+  private Database currOutDB = null;
 
   public HiveAuthzBindingHook() throws Exception {
     SessionState session = SessionState.get();
@@ -197,9 +200,10 @@ public class HiveAuthzBindingHook extends 
AbstractSemanticAnalyzerHook {
         break;
       case HiveParser.TOK_MSCK:
         // token name TOK_TABNAME is not properly set in this case and 
child(0) does
-         // not contain the table name.
-         currTab = extractTable((ASTNode)ast.getChild(1));
-        currDB = extractDatabase((ASTNode)ast.getChild(0));
+        // not contain the table name.
+        // TODO: Fix Hive to capture the table and DB name
+        currOutTab = extractTable((ASTNode)ast.getChild(1));
+        currOutDB  = extractDatabase((ASTNode)ast.getChild(0));
         break;
       case HiveParser.TOK_ALTERTABLE_ADDPARTS:
         /*
@@ -354,7 +358,7 @@ public class HiveAuthzBindingHook extends 
AbstractSemanticAnalyzerHook {
       HiveOperation hiveOp, AuthorizationException e) {
     SentryOnFailureHookContext hookCtx = new SentryOnFailureHookContextImpl(
         context.getCommand(), context.getInputs(), context.getOutputs(),
-        hiveOp, currDB, currTab, udfURI, partitionURI, context.getUserName(),
+        hiveOp, currDB, currTab, udfURI, null, context.getUserName(),
         context.getIpAddress(), e, context.getConf());
     String csHooks = authzConf.get(
         HiveAuthzConf.AuthzConfVars.AUTHZ_ONFAILURE_HOOKS.getVar(), "").trim();
@@ -438,7 +442,7 @@ public class HiveAuthzBindingHook extends 
AbstractSemanticAnalyzerHook {
         outputHierarchy.add(entityHierarchy);
       }
       // workaround for metadata queries.
-      // Capture the table name in pre-analyze and include that in the entity 
list
+      // Capture the table name in pre-analyze and include that in the input 
entity list
       if (currTab != null) {
         List<DBModelAuthorizable> externalAuthorizableHierarchy = new 
ArrayList<DBModelAuthorizable>();
         externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
@@ -446,6 +450,16 @@ public class HiveAuthzBindingHook extends 
AbstractSemanticAnalyzerHook {
         externalAuthorizableHierarchy.add(currTab);
         inputHierarchy.add(externalAuthorizableHierarchy);
       }
+
+      // workaround for DDL statements
+      // Capture the table name in pre-analyze and include that in the output 
entity list
+      if (currOutTab != null) {
+        List<DBModelAuthorizable> externalAuthorizableHierarchy = new 
ArrayList<DBModelAuthorizable>();
+        externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
+        externalAuthorizableHierarchy.add(currOutDB);
+        externalAuthorizableHierarchy.add(currOutTab);
+        outputHierarchy.add(externalAuthorizableHierarchy);
+      }
       break;
     case CONNECT:
       /* The 'CONNECT' is an implicit privilege scope currently used for
@@ -504,13 +518,7 @@ public class HiveAuthzBindingHook extends 
AbstractSemanticAnalyzerHook {
   }
 
   private boolean isUDF(ReadEntity readEntity) {
-    return readEntity.getType().equals(Type.UDF);
-  }
-
-  private boolean isBuiltinUDF(ReadEntity readEntity) {
-    return readEntity.getType().equals(Type.UDF) &&
-        readEntity.getUDF().isNative();
-
+    return readEntity.getType().equals(Type.FUNCTION);
   }
 
   private void checkUDFWhiteList(String queryUDF) throws 
AuthorizationException {
@@ -561,6 +569,7 @@ public class HiveAuthzBindingHook extends 
AbstractSemanticAnalyzerHook {
       }
       break;
     case DATABASE:
+    case FUNCTION:
       // TODO use database entities from compiler instead of capturing from AST
       break;
     default:
@@ -612,14 +621,6 @@ public class HiveAuthzBindingHook extends 
AbstractSemanticAnalyzerHook {
       if (isChildTabForView(readEntity)) {
         continue;
       }
-      // If this is a UDF, then check whether its allowed to be executed
-      // TODO: when we support execute privileges on UDF, this can be removed.
-      if (isUDF(readEntity)) {
-        if (isBuiltinUDF(readEntity)) {
-          checkUDFWhiteList(readEntity.getUDF().getDisplayName());
-        }
-        continue;
-      }
       if (readEntity.getAccessedColumns() != null && 
!readEntity.getAccessedColumns().isEmpty()) {
         addColumnHierarchy(inputHierarchy, readEntity);
       } else {

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java
 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java
index 716b8fb..cc7ef45 100644
--- 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java
+++ 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java
@@ -25,6 +25,7 @@ import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerImpl;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hive.service.cli.HiveSQLException;
@@ -70,11 +71,9 @@ public class HiveAuthzBindingSessionHook
     @Override
     public HiveAuthorizer createHiveAuthorizer(
         HiveMetastoreClientFactory metastoreClientFactory, HiveConf conf,
-        HiveAuthenticationProvider hiveAuthenticator)
-        throws HiveAuthzPluginException {
-      // TODO Auto-generated method stub
-      return new SentryHiveAuthorizerImpl(null, null);
-    }
+        HiveAuthenticationProvider hiveAuthenticator,
+        HiveAuthzSessionContext ctx) throws HiveAuthzPluginException {
+      return new SentryHiveAuthorizerImpl(null, null);    }
   }
 
   public static class SentryHiveAuthorizerImpl extends HiveAuthorizerImpl {

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryHiveAuthorizationTaskFactoryImpl.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryHiveAuthorizationTaskFactoryImpl.java
 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryHiveAuthorizationTaskFactoryImpl.java
index 9b44f15..5898b7e 100644
--- 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryHiveAuthorizationTaskFactoryImpl.java
+++ 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryHiveAuthorizationTaskFactoryImpl.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import 
org.apache.hadoop.hive.ql.parse.authorization.HiveAuthorizationTaskFactory;
@@ -218,7 +219,7 @@ public class SentryHiveAuthorizationTaskFactoryImpl 
implements HiveAuthorization
     }
 
     ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(),
-        principalDesc, privHiveObj, cols);
+        principalDesc, privHiveObj);
     return createTask(new DDLWork(inputs, outputs, showGrant));
   }
 
@@ -232,7 +233,7 @@ public class SentryHiveAuthorizationTaskFactoryImpl 
implements HiveAuthorization
       HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws 
SemanticException {
     List<PrincipalDesc> principalDesc = analyzePrincipalListDef(
         (ASTNode) ast.getChild(0));
-    
+
     List<String> roles = new ArrayList<String>();
     for (int i = 1; i < ast.getChildCount(); i++) {
       
roles.add(BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(i).getText()));
@@ -280,26 +281,33 @@ public class SentryHiveAuthorizationTaskFactoryImpl 
implements HiveAuthorization
   private SentryHivePrivilegeObjectDesc analyzePrivilegeObject(ASTNode ast)
       throws SemanticException {
     SentryHivePrivilegeObjectDesc subject = new 
SentryHivePrivilegeObjectDesc();
-    String privilegeObject = BaseSemanticAnalyzer.unescapeIdentifier(ast
-        .getChild(0).getText());
-    if (ast.getChildCount() > 1) {
-      for (int i = 1; i < ast.getChildCount(); i++) {
-        ASTNode astChild = (ASTNode) ast.getChild(i);
-        if (astChild.getToken().getType() == HiveParser.TOK_PARTSPEC) {
+    ASTNode astChild = (ASTNode) ast.getChild(0);
+    ASTNode gchild = (ASTNode) astChild.getChild(0);
+
+    String privilegeObject = 
BaseSemanticAnalyzer.unescapeIdentifier(gchild.getText());
+    subject.setObject(privilegeObject);
+      if (astChild.getToken().getType() == HiveParser.TOK_PARTSPEC) {
           throw new 
SemanticException(SentryHiveConstants.PARTITION_PRIVS_NOT_SUPPORTED);
-        } else if (astChild.getToken().getType() == HiveParser.TOK_TABCOLNAME) 
{
-          subject.setColumns(BaseSemanticAnalyzer.getColumnNames(astChild));
-        } else if (astChild.getToken().getType() == HiveParser.TOK_URI) {
+        } else if (astChild.getToken().getType() == HiveParser.TOK_URI_TYPE) {
           privilegeObject = privilegeObject.replaceAll("'", 
"").replaceAll("\"", "");
+          subject.setObject(privilegeObject);
           subject.setUri(true);
-        } else if (astChild.getToken().getType() == HiveParser.TOK_SERVER) {
+        } else if (astChild.getToken().getType() == 
HiveParser.TOK_SERVER_TYPE) {
           subject.setServer(true);
         } else if (astChild.getToken().getType() == HiveParser.TOK_TABLE_TYPE) 
{
           subject.setTable(true);
+          String[] qualified = 
BaseSemanticAnalyzer.getQualifiedTableName(gchild);
+          subject.setObject(qualified[1]);
+        }
+      for (int i = 1; i < astChild.getChildCount(); i++) {
+        gchild = (ASTNode) astChild.getChild(i);
+        if (gchild.getType() == HiveParser.TOK_PARTSPEC) {
+          throw new 
SemanticException(SentryHiveConstants.PARTITION_PRIVS_NOT_SUPPORTED);
+        } else if (gchild.getType() == HiveParser.TOK_TABCOLNAME) {
+          subject.setColumns(BaseSemanticAnalyzer.getColumnNames(gchild));
         }
       }
-    }
-    subject.setObject(privilegeObject);
+
     return subject;
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivilegesMap.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivilegesMap.java
 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivilegesMap.java
index b5c7aff..3dedbc3 100644
--- 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivilegesMap.java
+++ 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivilegesMap.java
@@ -63,7 +63,7 @@ public class HiveAuthzPrivilegesMap {
         build();
 
     HiveAuthzPrivileges alterTablePrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.ALTER)).
+        addOutputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.ALTER)).
         setOperationScope(HiveOperationScope.TABLE).
         setOperationType(HiveOperationType.DDL).
         build();
@@ -79,13 +79,13 @@ public class HiveAuthzPrivilegesMap {
         build();
 
     HiveAuthzPrivileges alterTableAndUriPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.ALTER)).
+        addOutputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.ALTER)).
         addOutputObjectPriviledge(AuthorizableType.URI, 
EnumSet.of(DBModelAction.ALL)).
         setOperationScope(HiveOperationScope.TABLE).
         setOperationType(HiveOperationType.DDL).
         build();
     HiveAuthzPrivileges addPartitionPrivilege = new 
HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.ALTER)).
+        addOutputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.ALTER)).
         //TODO: Uncomment this if we want to make it more restrictive
         //addInputObjectPriviledge(AuthorizableType.Table, 
EnumSet.of(DBModelAction.CREATE)).
         addInputObjectPriviledge(AuthorizableType.URI, 
EnumSet.of(DBModelAction.SELECT)).//TODO: make it optional

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java
 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java
index 33bffa8..ecbd664 100644
--- 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java
+++ 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java
@@ -400,7 +400,7 @@ public class SentryConfigTool {
     // setup Hive driver
     SessionState session = new SessionState(getHiveConf());
     SessionState.start(session);
-    Driver driver = new Driver(session.getConf(), getUser(), null);
+    Driver driver = new Driver(session.getConf(), getUser());
 
     // compile the query
     CommandProcessorResponse compilerStatus = driver

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
index 93f19f3..0a3b509 100644
--- 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
+++ 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
@@ -39,8 +39,8 @@ public class HiveAuthzConf extends Configuration {
   public static final String HIVE_ACCESS_SUBJECT_NAME = 
"hive.access.subject.name";
   public static final String HIVE_SENTRY_SUBJECT_NAME = 
"hive.sentry.subject.name";
   public static final String HIVE_SENTRY_AUTH_ERRORS = 
"sentry.hive.authorization.errors";
-  public static final String HIVE_SENTRY_MOCK_COMPILATION = 
"hive.sentry.mock.compilation";
-  public static final String HIVE_SENTRY_MOCK_ERROR = "hive.sentry.mock.error";
+  public static final String HIVE_SENTRY_MOCK_COMPILATION = 
"sentry.hive.mock.compilation";
+  public static final String HIVE_SENTRY_MOCK_ERROR = "sentry.hive.mock.error";
   public static final String HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE = "No valid 
privileges";
   /**
    * Property used to persist the role set in the session. This is not public 
for now.

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java
 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java
index fcd7292..5a0c950 100644
--- 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java
+++ 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java
@@ -40,6 +40,7 @@ import 
org.apache.hadoop.hive.metastore.api.UnknownDBException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.sentry.binding.hive.HiveAuthzBindingHook;
 import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
 import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
@@ -377,8 +378,7 @@ public class AuthorizingObjectStore extends ObjectStore {
    */
   private String getUserName() throws MetaException {
     try {
-      return ShimLoader.getHadoopShims().getUGIForConf(getHiveConf())
-          .getShortUserName();
+      return Utils.getUGI().getShortUserName();
     } catch (LoginException e) {
       throw new MetaException("Failed to get username " + e.getMessage());
     } catch (IOException e) {

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBinding.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBinding.java
 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBinding.java
index e7f96c1..8d388d2 100644
--- 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBinding.java
+++ 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBinding.java
@@ -48,6 +48,7 @@ import 
org.apache.hadoop.hive.metastore.events.PreEventContext;
 import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
 import org.apache.sentry.binding.hive.authz.HiveAuthzPrivilegesMap;
 import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
@@ -430,8 +431,7 @@ public class MetastoreAuthzBinding extends 
MetaStorePreEventListener {
 
   private String getUserName() throws MetaException {
     try {
-      return ShimLoader.getHadoopShims().getUGIForConf(hiveConf)
-          .getShortUserName();
+      return Utils.getUGI().getShortUserName();
     } catch (LoginException e) {
       throw new MetaException("Failed to get username " + e.getMessage());
     } catch (IOException e) {

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetaStoreFilterHook.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetaStoreFilterHook.java
 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetaStoreFilterHook.java
new file mode 100644
index 0000000..5e26e83
--- /dev/null
+++ 
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetaStoreFilterHook.java
@@ -0,0 +1,196 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.metastore;
+
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.MetaStoreFilterHook;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Index;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.PartitionSpec;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.sentry.binding.hive.HiveAuthzBindingHook;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+
+import com.google.common.collect.Lists;
+
+public class SentryMetaStoreFilterHook implements MetaStoreFilterHook {
+
+  static final protected Log LOG = 
LogFactory.getLog(SentryMetaStoreFilterHook.class);
+
+  private HiveAuthzBinding hiveAuthzBinding;
+  private HiveAuthzConf authzConf;
+  private final HiveConf hiveConf;
+
+  public SentryMetaStoreFilterHook(HiveConf hiveConf) {
+    this.hiveConf = hiveConf;
+  }
+
+  @Override
+  public List<String> filterDatabases(List<String> dbList) {
+    return filterDb(dbList);
+  }
+
+  @Override
+  public Database filterDatabase(Database dataBase)
+      throws NoSuchObjectException {
+    return dataBase;
+  }
+
+  @Override
+  public List<String> filterTableNames(String dbName, List<String> tableList) {
+    return filterTab(dbName, tableList);
+  }
+
+  @Override
+  public Table filterTable(Table table) throws NoSuchObjectException {
+    return table;
+  }
+
+  @Override
+  public List<Table> filterTables(List<Table> tableList) {
+    return tableList;
+  }
+
+  @Override
+  public List<Partition> filterPartitions(List<Partition> partitionList) {
+    return partitionList;
+  }
+
+  @Override
+  public List<PartitionSpec> filterPartitionSpecs(
+      List<PartitionSpec> partitionSpecList) {
+    return partitionSpecList;
+  }
+
+  @Override
+  public Partition filterPartition(Partition partition)
+      throws NoSuchObjectException {
+    return partition;
+  }
+
+  @Override
+  public List<String> filterPartitionNames(String dbName, String tblName,
+      List<String> partitionNames) {
+    return partitionNames;
+  }
+
+  @Override
+  public Index filterIndex(Index index) throws NoSuchObjectException {
+    return index;
+  }
+
+  @Override
+  public List<String> filterIndexNames(String dbName, String tblName,
+      List<String> indexList) {
+    return null;
+  }
+
+  @Override
+  public List<Index> filterIndexes(List<Index> indexeList) {
+    return indexeList;
+  }
+
+  /**
+   * Invoke Hive database filtering that removes the entries which use has no
+   * privileges to access
+   * @param dbList
+   * @return
+   * @throws MetaException
+   */
+  private List<String> filterDb(List<String> dbList) {
+    try {
+      return HiveAuthzBindingHook.filterShowDatabases(getHiveAuthzBinding(),
+          dbList, HiveOperation.SHOWDATABASES, getUserName());
+    } catch (Exception e) {
+      LOG.warn("Error getting DB list ", e);
+      return new ArrayList<String>();
+    }
+  }
+
+  /**
+   * Invoke Hive table filtering that removes the entries which use has no
+   * privileges to access
+   * @param dbList
+   * @return
+   * @throws MetaException
+   */
+  private List<String> filterTab(String dbName, List<String> tabList) {
+    try {
+      return HiveAuthzBindingHook.filterShowTables(getHiveAuthzBinding(),
+          tabList, HiveOperation.SHOWTABLES, getUserName(), dbName);
+    } catch (Exception e) {
+      LOG.warn("Error getting Table list ", e);
+      return new ArrayList<String>();
+    }
+  }
+
+  private String getUserName() {
+    return getConf().get(HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME);
+  }
+
+  /**
+   * load Hive auth provider
+   * @return
+   * @throws MetaException
+   */
+  private HiveAuthzBinding getHiveAuthzBinding() throws MetaException {
+    if (hiveAuthzBinding == null) {
+      String hiveAuthzConf = getConf().get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
+      if (hiveAuthzConf == null
+          || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
+        throw new MetaException("Configuration key "
+            + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf
+            + "' is invalid.");
+      }
+      try {
+        authzConf = new HiveAuthzConf(new URL(hiveAuthzConf));
+      } catch (MalformedURLException e) {
+        throw new MetaException("Configuration key "
+            + HiveAuthzConf.HIVE_SENTRY_CONF_URL
+            + " specifies a malformed URL '" + hiveAuthzConf + "' "
+            + e.getMessage());
+      }
+      try {
+        hiveAuthzBinding = new HiveAuthzBinding(
+            HiveAuthzBinding.HiveHook.HiveMetaStore, getConf(), authzConf);
+      } catch (Exception e) {
+        throw new MetaException("Failed to load Hive binding " + 
e.getMessage());
+      }
+    }
+    return hiveAuthzBinding;
+  }
+
+  private HiveConf getConf() {
+    return SessionState.get().getConf();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestHiveAuthzBindingHook.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestHiveAuthzBindingHook.java
 
b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestHiveAuthzBindingHook.java
deleted file mode 100644
index 9dd4774..0000000
--- 
a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestHiveAuthzBindingHook.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.binding.hive;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.ParseDriver;
-import org.apache.hadoop.hive.ql.parse.ParseUtils;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.sentry.core.model.db.AccessURI;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class TestHiveAuthzBindingHook {
-  private static final Logger LOG = LoggerFactory
-      .getLogger(TestHiveAuthzBindingHook.class);
-
-  private ParseDriver parseDriver;
-  private HiveConf conf;
-
-  @Before
-  public void setupTest() throws Exception {
-    conf = new HiveConf();
-    SessionState.start(conf);
-    parseDriver = new ParseDriver();
-  }
-
-  @Test
-  public void testAddPartition() throws Exception {
-    ASTNode ast = parse("alter table parted add partition (day='Monday')");
-    LOG.info("AST: " + ast.toStringTree());
-    AccessURI partitionLocation = HiveAuthzBindingHook.extractPartition(ast);
-    Assert.assertNull("Query without part location should not return location",
-        partitionLocation);
-  }
-  @Test
-  public void testAddPartitionWithLocation() throws Exception {
-    ASTNode ast = parse("alter table parted add partition (day='Monday') 
location 'file:/'");
-    LOG.info("AST: " + ast.toStringTree());
-    AccessURI partitionLocation = HiveAuthzBindingHook.extractPartition(ast);
-    Assert.assertNotNull("Query with part location must return location",
-        partitionLocation);
-    Assert.assertEquals("file:///", partitionLocation.getName());
-  }
-
-  @Test
-  public void testAddPartitionIfNotExists() throws Exception {
-    ASTNode ast = parse("alter table parted add if not exists partition 
(day='Monday')");
-    LOG.info("AST: " + ast.toStringTree());
-    AccessURI partitionLocation = HiveAuthzBindingHook.extractPartition(ast);
-    Assert.assertNull("Query without part location should not return location",
-        partitionLocation);
-  }
-  @Test
-  public void testAddPartitionIfNotExistsWithLocation() throws Exception {
-    ASTNode ast = parse("alter table parted add if not exists partition 
(day='Monday')" +
-        " location 'file:/'");
-    LOG.info("AST: " + ast.toStringTree());
-    AccessURI partitionLocation = HiveAuthzBindingHook.extractPartition(ast);
-    Assert.assertNotNull("Query with part location must return location",
-        partitionLocation);
-    Assert.assertEquals("file:///", partitionLocation.getName());
-  }
-
-  private ASTNode parse(String command) throws Exception {
-    return ParseUtils.findRootNonNullToken(parseDriver.parse(command));
-  }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestHiveAuthzBindings.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestHiveAuthzBindings.java
 
b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestHiveAuthzBindings.java
index d41f6cf..b942678 100644
--- 
a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestHiveAuthzBindings.java
+++ 
b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestHiveAuthzBindings.java
@@ -204,7 +204,7 @@ public class TestHiveAuthzBindings {
    */
   @Test
   public void testMsckRepairTable() throws Exception {
-    inputTabHierarcyList.add(buildObjectHierarchy(SERVER1, JUNIOR_ANALYST_DB, 
PURCHASES_TAB));
+    outputTabHierarcyList.add(buildObjectHierarchy(SERVER1, JUNIOR_ANALYST_DB, 
PURCHASES_TAB));
     testAuth.authorize(HiveOperation.MSCK, alterTabPrivileges, MANAGER_SUBJECT,
       inputTabHierarcyList, outputTabHierarcyList);
 
@@ -224,7 +224,7 @@ public class TestHiveAuthzBindings {
    */
   @Test(expected=AuthorizationException.class)
   public void testMsckRepairTableRejection() throws Exception {
-       outputTabHierarcyList.add(buildObjectHierarchy(SERVER1, 
JUNIOR_ANALYST_DB, PURCHASES_TAB));
+       inputTabHierarcyList.add(buildObjectHierarchy(SERVER1, 
JUNIOR_ANALYST_DB, PURCHASES_TAB));
     testAuth.authorize(HiveOperation.MSCK, alterTabPrivileges,
         JUNIOR_ANALYST_SUBJECT, inputTabHierarcyList, outputTabHierarcyList);
   }

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestSentryHiveAuthorizationTaskFactory.java
----------------------------------------------------------------------
diff --git 
a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestSentryHiveAuthorizationTaskFactory.java
 
b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestSentryHiveAuthorizationTaskFactory.java
index 129a6b5..eebf419 100644
--- 
a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestSentryHiveAuthorizationTaskFactory.java
+++ 
b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestSentryHiveAuthorizationTaskFactory.java
@@ -18,11 +18,13 @@
 package org.apache.sentry.binding.hive;
 
 import junit.framework.Assert;
+
 import org.apache.hadoop.hive.SentryHiveConstants;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.ql.Context;
+import org.apache.hadoop.hive.ql.exec.SentryHivePrivilegeObjectDesc;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.Partition;
@@ -60,6 +62,8 @@ public class TestSentryHiveAuthorizationTaskFactory {
   private static final String GROUP = "group1";
   private static final String ROLE = "role1";
   private static final String USER = "user1";
+  private static final String SERVER = "server1";
+
 
   private ParseDriver parseDriver;
   private DDLSemanticAnalyzer analyzer;
@@ -73,6 +77,7 @@ public class TestSentryHiveAuthorizationTaskFactory {
   @Before
   public void setup() throws Exception {
     conf = new HiveConf();
+    SessionState.start(conf);
     conf.setVar(ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY,
         SentryHiveAuthorizationTaskFactoryImpl.class.getName());
     db = Mockito.mock(Hive.class);
@@ -381,6 +386,47 @@ public class TestSentryHiveAuthorizationTaskFactory {
     Assert.assertEquals(RoleOperation.SHOW_CURRENT_ROLE, 
roleDDLDesc.getOperation());
   }
 
+  @Test
+  public void testGrantUri() throws Exception {
+    String uriPath = "/tmp";
+    DDLWork work = analyze(parse("GRANT " + ALL + " ON URI '" + uriPath
+        + "' TO ROLE " + ROLE));
+    GrantDesc grantDesc = work.getGrantDesc();
+    Assert.assertNotNull("Grant should not be null", grantDesc);
+    for (PrincipalDesc principal : assertSize(1, grantDesc.getPrincipals())) {
+      Assert.assertEquals(PrincipalType.ROLE, principal.getType());
+      Assert.assertEquals(ROLE, principal.getName());
+    }
+    for (PrivilegeDesc privilege : assertSize(1, grantDesc.getPrivileges())) {
+      Assert.assertEquals(Privilege.ALL, privilege.getPrivilege());
+    }
+    SentryHivePrivilegeObjectDesc privilegeDesc = 
(SentryHivePrivilegeObjectDesc)grantDesc.getPrivilegeSubjectDesc();
+    Assert.assertTrue("Expected uri", privilegeDesc.getUri());
+    Assert.assertEquals(uriPath, privilegeDesc.getObject());
+  }
+
+  /**
+   * GRANT ALL ON SERVER
+   */
+  @Test
+  public void testGrantServer() throws Exception {
+    DDLWork work = analyze(parse("GRANT " + ALL + " ON SERVER " + SERVER
+        + " TO ROLE " + ROLE));
+    GrantDesc grantDesc = work.getGrantDesc();
+    Assert.assertNotNull("Grant should not be null", grantDesc);
+    for (PrincipalDesc principal : assertSize(1, grantDesc.getPrincipals())) {
+      Assert.assertEquals(PrincipalType.ROLE, principal.getType());
+      Assert.assertEquals(ROLE, principal.getName());
+    }
+    for (PrivilegeDesc privilege : assertSize(1, grantDesc.getPrivileges())) {
+      Assert.assertEquals(Privilege.ALL, privilege.getPrivilege());
+    }
+    SentryHivePrivilegeObjectDesc privilegeDesc =
+        (SentryHivePrivilegeObjectDesc)grantDesc.getPrivilegeSubjectDesc();
+    Assert.assertTrue("Expected server", privilegeDesc.getServer());
+    Assert.assertEquals(SERVER, privilegeDesc.getObject());
+  }
+
   private void expectSemanticException(String command, String msg) throws 
Exception {
     try {
       analyze(parse(command));

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/MetastorePlugin.java
----------------------------------------------------------------------
diff --git 
a/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/MetastorePlugin.java
 
b/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/MetastorePlugin.java
index 82ab27d..cdd8d9c 100644
--- 
a/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/MetastorePlugin.java
+++ 
b/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/MetastorePlugin.java
@@ -101,10 +101,6 @@ public class MetastorePlugin extends 
SentryMetastoreListenerPlugin {
     public ProxyHMSHandler(String name, HiveConf conf) throws MetaException {
       super(name, conf);
     }
-    @Override
-    public String startFunction(String function, String extraLogInfo) {
-      return function;
-    }
   }
 
   public MetastorePlugin(Configuration conf) {

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-provider/sentry-provider-db/.gitignore
----------------------------------------------------------------------
diff --git a/sentry-provider/sentry-provider-db/.gitignore 
b/sentry-provider/sentry-provider-db/.gitignore
index 55b8677..a2f1f96 100644
--- a/sentry-provider/sentry-provider-db/.gitignore
+++ b/sentry-provider/sentry-provider-db/.gitignore
@@ -1 +1,2 @@
 sentry_policy_db
+/target

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-tests/sentry-tests-hive/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/pom.xml 
b/sentry-tests/sentry-tests-hive/pom.xml
index 17109b8..ed9c704 100644
--- a/sentry-tests/sentry-tests-hive/pom.xml
+++ b/sentry-tests/sentry-tests-hive/pom.xml
@@ -177,6 +177,8 @@ limitations under the License.
     <dependency>
       <groupId>org.apache.pig</groupId>
       <artifactId>pig</artifactId>
+      <version>${pig.version}</version>
+      <classifier>h2</classifier>
       <scope>test</scope>
     </dependency>
     <dependency>
@@ -251,6 +253,28 @@ limitations under the License.
           <scope>test</scope>
           <version>1.3</version>
       </dependency>
+      <dependency>
+        <groupId>org.apache.curator</groupId>
+        <artifactId>curator-framework</artifactId>
+        <scope>test</scope>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.zookeeper</groupId>
+        <artifactId>zookeeper</artifactId>
+        <exclusions>
+          <exclusion>
+            <groupId>org.jboss.netty</groupId>
+            <artifactId>netty</artifactId>
+          </exclusion>
+        </exclusions>
+        <scope>test</scope>
+      </dependency>
+      <dependency>
+        <groupId>joda-time</groupId>
+        <artifactId>joda-time</artifactId>
+        <version>${joda-time.version}</version>
+        <scope>test</scope>
+      </dependency>
   </dependencies>
   <build>
     <plugins>
@@ -296,7 +320,7 @@ limitations under the License.
   </build>
   <profiles>
    <profile>
-     <id>download-hadoop</id>
+     <id>link-hadoop</id>
      <activation>
        <activeByDefault>false</activeByDefault>
        <property><name>!skipTests</name></property>
@@ -307,95 +331,54 @@ limitations under the License.
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-antrun-plugin</artifactId>
         <configuration>
+          <exportAntProperties>true</exportAntProperties>
           <skipTests>true</skipTests>
         </configuration>
         <executions>
           <execution>
-            <id>download-hadoop</id>
+            <id>link-hadoop</id>
             <phase>generate-sources</phase>
             <goals>
               <goal>run</goal>
             </goals>
             <configuration>
               <target>
-                <echo file="target/download.sh">
+                <echo file="target/hadoop-setup.sh">
                   set -e
                   set -x
                   /bin/pwd
                   BASE_DIR=./target
-                  DOWNLOAD_DIR=./thirdparty
-                  download() {
-                    url=$1;
-                    tarName=$2
-                    finalName=$3
-                    rm -rf $BASE_DIR/$finalName
-                    if [[ ! -f $DOWNLOAD_DIR/$tarName ]]
-                    then
-                      wget -nv -O $DOWNLOAD_DIR/$tarName $url
-                    fi
-                    tar -zxf $DOWNLOAD_DIR/$tarName -C $BASE_DIR
-                    mv $BASE_DIR/${finalName}* $BASE_DIR/$finalName
+                  TEST_UTIL_DIR=./testutil
+                  setup_hadoop() {
+                    set -e
+                    set -x
+                    /bin/pwd
+                    cp -f $TEST_UTIL_DIR/* $BASE_DIR/.
+                    chmod 777 $BASE_DIR/hadoop
                   }
-                  mkdir -p $DOWNLOAD_DIR
-                  download 
"http://nightly.cloudera.com/cdh5/cdh/5/hadoop-latest.tar.gz"; hadoop.tar.gz 
hadoop
-                  download 
"http://nightly.cloudera.com/cdh5/cdh/5/hive-latest.tar.gz"; hive.tar.gz hive
+                  setup_hadoop
                 </echo>
                 <exec executable="bash" dir="${basedir}" failonerror="true">
-                  <arg line="target/download.sh"/>
+                  <arg line="target/hadoop-setup.sh"/>
                 </exec>
               </target>
             </configuration>
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <environmentVariables>
+            
<SENTRY_HADOOP_TEST_CLASSPATH>${maven.test.classpath}</SENTRY_HADOOP_TEST_CLASSPATH>
+          </environmentVariables>
+        </configuration>
+      </plugin>
      </plugins>
     </build>
    </profile>
    <profile>
-     <id>link-hadoop</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-        <property><name>hadoop-dist</name></property>
-      </activation>
-     <build>
-      <plugins>
-        <plugin>
-          <groupId>org.apache.maven.plugins</groupId>
-          <artifactId>maven-antrun-plugin</artifactId>
-          <configuration>
-            <skipTests>false</skipTests>
-          </configuration>
-          <executions>
-            <execution>
-              <id>link-hadoop</id>
-              <phase>generate-sources</phase>
-              <goals>
-                <goal>run</goal>
-              </goals>
-              <configuration>
-                <target>
-                  <echo file="target/link_dist.sh">
-                    set -e
-                    set -x
-                    /bin/pwd
-                    BASE_DIR=./target
-                    rm -f $BASE_DIR/hadoop
-                    ln -s $1/hadoop $BASE_DIR/.
-                    rm -f $BASE_DIR/hive
-                    ln -s $2/hive $BASE_DIR/.
-                  </echo>
-                  <exec executable="bash" dir="${basedir}" failonerror="true">
-                    <arg line="target/link_dist.sh ${hadoop-dist} 
${hive-dist}"/>
-                  </exec>
-                </target>
-              </configuration>
-            </execution>
-          </executions>
-        </plugin>
-      </plugins>
-     </build>
-   </profile>
-   <profile>
      <id>cluster-hadoop</id>
      <build>
       <plugins>

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/Context.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/Context.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/Context.java
index f609c46..69743bc 100644
--- 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/Context.java
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/Context.java
@@ -256,8 +256,8 @@ public class Context {
   // TODO: Handle kerberos login
   public HiveMetaStoreClient getMetaStoreClient(String userName) throws 
Exception {
     UserGroupInformation clientUgi = 
UserGroupInformation.createRemoteUser(userName);
-    HiveMetaStoreClient client = 
(HiveMetaStoreClient)ShimLoader.getHadoopShims()
-        .doAs(clientUgi, new PrivilegedExceptionAction<Object> () {
+    HiveMetaStoreClient client = (HiveMetaStoreClient) clientUgi.
+        doAs(new PrivilegedExceptionAction<Object> () {
           @Override
           public HiveMetaStoreClient run() throws Exception {
             return new HiveMetaStoreClient(new HiveConf());
@@ -270,8 +270,8 @@ public class Context {
       throws Exception {
     UserGroupInformation clientUgi = UserGroupInformation
         .createRemoteUser(userName);
-    PigServer pigServer = (PigServer) ShimLoader.getHadoopShims().doAs(
-        clientUgi, new PrivilegedExceptionAction<Object>() {
+    PigServer pigServer = (PigServer) clientUgi.
+        doAs(new PrivilegedExceptionAction<Object>() {
       @Override
       public PigServer run() throws Exception {
         return new PigServer(exType, new HiveConf());

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestMetadataObjectRetrieval.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestMetadataObjectRetrieval.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestMetadataObjectRetrieval.java
index e4d2333..fbfb031 100644
--- 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestMetadataObjectRetrieval.java
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestMetadataObjectRetrieval.java
@@ -375,23 +375,24 @@ public class TestMetadataObjectRetrieval extends 
AbstractTestWithStaticConfigura
     policyFile.removePermissionsFromRole(GROUP1_ROLE, SELECT_DB1_VIEW1)
     .addPermissionsToRole(GROUP1_ROLE, SELECT_DB1_TBL1);
     writePolicyFile(policyFile);
-    verifyIndex(statement, TBL1, INDEX1);
+    verifyIndex(statement, DB1, TBL1, INDEX1);
 
     policyFile.removePermissionsFromRole(GROUP1_ROLE, SELECT_DB1_TBL1)
     .addPermissionsToRole(GROUP1_ROLE, INSERT_DB1_TBL1);
     writePolicyFile(policyFile);
-    verifyIndex(statement, TBL1, INDEX1);
+    verifyIndex(statement, DB1, TBL1, INDEX1);
     statement.close();
     connection.close();
   }
 
-  private void verifyIndex(Statement statement, String table, String index) 
throws Exception {
+  private void verifyIndex(Statement statement, String dbName, String table, 
String index) throws Exception {
     ResultSet rs = statement.executeQuery("SHOW INDEX ON " + table);
     assertTrue(rs.next());
     assertEquals(index, rs.getString(1).trim());
     assertEquals(table, rs.getString(2).trim());
     assertEquals("value", rs.getString(3).trim());
-    assertEquals("db_1__tb_1_index_1__", rs.getString(4).trim());
+    assertEquals(dbName + "." + dbName + "__" + table + "_" + index + "__",
+        rs.getString(4).trim());
     assertEquals("compact", rs.getString(5).trim());
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperations.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperations.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperations.java
index 6437d23..2fbdfa6 100644
--- 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperations.java
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperations.java
@@ -54,6 +54,7 @@ public class TestOperations extends 
AbstractTestWithStaticConfiguration {
     privileges.put("select_db1_tb1", "server=server1->db=" + DB1 + 
"->table=tb1->action=select");
     privileges.put("insert_db1_tb1", "server=server1->db=" + DB1 + 
"->table=tb1->action=insert");
     privileges.put("alter_db1_tb1", "server=server1->db=" + DB1 + 
"->table=tb1->action=alter");
+    privileges.put("alter_db1_ptab", "server=server1->db=" + DB1 + 
"->table=ptab->action=alter");
     privileges.put("index_db1_tb1", "server=server1->db=" + DB1 + 
"->table=tb1->action=index");
     privileges.put("lock_db1_tb1", "server=server1->db=" + DB1 + 
"->table=tb1->action=lock");
     privileges.put("drop_db1_tb1", "server=server1->db=" + DB1 + 
"->table=tb1->action=drop");
@@ -446,7 +447,8 @@ public class TestOperations extends 
AbstractTestWithStaticConfiguration {
     adminCreate(DB1, tableName, true);
     policyFile
         .addPermissionsToRole("alter_db1_tb1", privileges.get("alter_db1_tb1"))
-        .addRolesToGroup(USERGROUP1, "alter_db1_tb1")
+        .addPermissionsToRole("alter_db1_ptab", 
privileges.get("alter_db1_ptab"))
+        .addRolesToGroup(USERGROUP1, "alter_db1_tb1", "alter_db1_ptab")
         .addPermissionsToRole("insert_db1_tb1", 
privileges.get("insert_db1_tb1"))
         .addRolesToGroup(USERGROUP2, "insert_db1_tb1");
     writePolicyFile(policyFile);
@@ -459,7 +461,7 @@ public class TestOperations extends 
AbstractTestWithStaticConfiguration {
     statement.execute("Use " + DB1);
     statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '10') 
");
     statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '1') 
");
-
+    statement.execute("CREATE TABLE ptab (a int) STORED AS PARQUET");
     //Negative test cases
     connection = context.createConnection(USER2_1);
     statement = context.createStatement(connection);
@@ -483,7 +485,7 @@ public class TestOperations extends 
AbstractTestWithStaticConfiguration {
 
     assertSemanticException(statement, "ALTER TABLE tb1 CHANGE COLUMN a c 
int");
     assertSemanticException(statement, "ALTER TABLE tb1 ADD COLUMNS (a int)");
-    assertSemanticException(statement, "ALTER TABLE tb1 REPLACE COLUMNS (a 
int, c int)");
+    assertSemanticException(statement, "ALTER TABLE ptab REPLACE COLUMNS (a 
int, c int)");
     assertSemanticException(statement, "MSCK REPAIR TABLE tb1");
 
     //assertSemanticException(statement, "ALTER VIEW view1 SET TBLPROPERTIES 
('comment' = 'new_comment')");
@@ -515,7 +517,7 @@ public class TestOperations extends 
AbstractTestWithStaticConfiguration {
 
     statement.execute("ALTER TABLE tb1 CHANGE COLUMN a c int");
     statement.execute("ALTER TABLE tb1 ADD COLUMNS (a int)");
-    statement.execute("ALTER TABLE tb1 REPLACE COLUMNS (a int, c int)");
+    statement.execute("ALTER TABLE ptab REPLACE COLUMNS (a int, c int)");
     statement.execute("MSCK REPAIR TABLE tb1");
 
     //statement.execute("ALTER VIEW view1 SET TBLPROPERTIES ('comment' = 
'new_comment')");

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtFunctionScope.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtFunctionScope.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtFunctionScope.java
index 2c0b02c..de9b2ed 100644
--- 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtFunctionScope.java
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtFunctionScope.java
@@ -79,13 +79,13 @@ public class TestPrivilegesAtFunctionScope extends 
AbstractTestWithStaticConfigu
     context.close();
 
     policyFile
-        .addRolesToGroup(USERGROUP1, "db1_all", "UDF_JAR")
+        .addRolesToGroup(USERGROUP1, "db1_all", "UDF_JAR", "data_read")
         .addRolesToGroup(USERGROUP2, "db1_tab1", "UDF_JAR")
         .addRolesToGroup(USERGROUP3, "db1_tab1")
         .addPermissionsToRole("db1_all", "server=server1->db=" + DB1)
         .addPermissionsToRole("db1_tab1", "server=server1->db=" + DB1 + 
"->table=" + tableName1)
-        .addPermissionsToRole("UDF_JAR", "server=server1->uri=file://" + 
udfLocation);
-
+        .addPermissionsToRole("UDF_JAR", "server=server1->uri=file://" + 
udfLocation)
+        .addPermissionsToRole("data_read", "server=server1->URI=" + 
"file:///tmp");
     writePolicyFile(policyFile);
 
     // user1 should be able create/drop temp functions
@@ -105,7 +105,7 @@ public class TestPrivilegesAtFunctionScope extends 
AbstractTestWithStaticConfigu
     // test perm UDF with 'using file' syntax
     statement
         .execute("CREATE FUNCTION printf_test_perm AS 
'org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf' "
-            + " using file 'file://" + udfLocation + "'");
+            + " using file 'file:///tmp'");
     statement.execute("DROP FUNCTION printf_test_perm");
 
     context.close();

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtTableScope.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtTableScope.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtTableScope.java
index 45a4b7c..bbac5c8 100644
--- 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtTableScope.java
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtTableScope.java
@@ -18,6 +18,7 @@
 package org.apache.sentry.tests.e2e.hive;
 
 import org.apache.sentry.provider.file.PolicyFile;
+
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
@@ -48,7 +49,15 @@ public class TestPrivilegesAtTableScope extends 
AbstractTestWithStaticConfigurat
 
   @Before
   public void setup() throws Exception {
-    policyFile = PolicyFile.setAdminOnServer1(ADMINGROUP);
+    policyFile = PolicyFile.setAdminOnServer1(ADMINGROUP)
+        .setUserGroupMapping(StaticUserGroup.getStaticMapping());
+    writePolicyFile(policyFile);
+    Connection connection = context.createConnection(ADMIN1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("DROP DATABASE IF EXISTS DB_1 CASCADE");
+    statement.execute("CREATE DATABASE DB_1");
+    statement.close();
+    connection.close();
   }
 
   /*
@@ -76,8 +85,6 @@ public class TestPrivilegesAtTableScope extends 
AbstractTestWithStaticConfigurat
     // setup db objects needed by the test
     Connection connection = context.createConnection(ADMIN1);
     Statement statement = context.createStatement(connection);
-    statement.execute("DROP DATABASE IF EXISTS DB_1 CASCADE");
-    statement.execute("CREATE DATABASE DB_1");
     statement.execute("USE DB_1");
     statement.execute("CREATE TABLE TAB_1(A STRING)");
     statement.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath()
@@ -172,8 +179,6 @@ public class TestPrivilegesAtTableScope extends 
AbstractTestWithStaticConfigurat
     // setup db objects needed by the test
     Connection connection = context.createConnection(ADMIN1);
     Statement statement = context.createStatement(connection);
-    statement.execute("DROP DATABASE IF EXISTS DB_1 CASCADE");
-    statement.execute("CREATE DATABASE " + DB1);
     statement.execute("USE " + DB1);
     statement.execute("CREATE TABLE TAB_1(A STRING)");
     statement.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath()
@@ -261,8 +266,6 @@ public class TestPrivilegesAtTableScope extends 
AbstractTestWithStaticConfigurat
     Connection connection = context.createConnection(ADMIN1);
     Statement statement = context.createStatement(connection);
 
-    statement.execute("DROP DATABASE IF EXISTS DB_1 CASCADE");
-    statement.execute("CREATE DATABASE " + DB1);
     statement.execute("USE " + DB1);
     statement.execute("CREATE TABLE TAB_1(A STRING)");
     statement.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath()
@@ -349,8 +352,6 @@ public class TestPrivilegesAtTableScope extends 
AbstractTestWithStaticConfigurat
     Connection connection = context.createConnection(ADMIN1);
     Statement statement = context.createStatement(connection);
 
-    statement.execute("DROP DATABASE IF EXISTS DB_1 CASCADE");
-    statement.execute("CREATE DATABASE " + DB1);
     statement.execute("USE " + DB1);
     statement.execute("CREATE TABLE TAB_1(B INT, A STRING) "
         + " row format delimited fields terminated by '|'  stored as 
textfile");
@@ -427,8 +428,6 @@ public class TestPrivilegesAtTableScope extends 
AbstractTestWithStaticConfigurat
     Connection connection = context.createConnection(ADMIN1);
     Statement statement = context.createStatement(connection);
 
-    statement.execute("DROP DATABASE IF EXISTS DB_1 CASCADE");
-    statement.execute("CREATE DATABASE " + DB1);
     statement.execute("USE " + DB1);
     statement.execute("CREATE TABLE TAB_1(B INT, A STRING) "
         + " row format delimited fields terminated by '|'  stored as 
textfile");
@@ -512,8 +511,6 @@ public class TestPrivilegesAtTableScope extends 
AbstractTestWithStaticConfigurat
     Connection connection = context.createConnection(ADMIN1);
     Statement statement = context.createStatement(connection);
 
-    statement.execute("DROP DATABASE IF EXISTS DB_1 CASCADE");
-    statement.execute("CREATE DATABASE " + DB1);
     statement.execute("USE " + DB1);
     statement.execute("CREATE TABLE TAB_1(B INT, A STRING) "
         + " row format delimited fields terminated by '|'  stored as 
textfile");
@@ -613,8 +610,6 @@ public class TestPrivilegesAtTableScope extends 
AbstractTestWithStaticConfigurat
     Connection connection = context.createConnection(ADMIN1);
     Statement statement = context.createStatement(connection);
 
-    statement.execute("DROP DATABASE IF EXISTS DB_1 CASCADE");
-    statement.execute("CREATE DATABASE " + DB1);
     statement.execute("USE " + DB1);
     statement.execute("CREATE TABLE TAB_1(B INT, A STRING) "
         + " row format delimited fields terminated by '|'  stored as 
textfile");

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/MiniDFS.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/MiniDFS.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/MiniDFS.java
index f632461..3ba14f1 100644
--- 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/MiniDFS.java
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/MiniDFS.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.security.GroupMappingServiceProvider;
+import org.apache.hadoop.security.Groups;
 import 
org.apache.sentry.tests.e2e.hive.hiveserver.HiveServerFactory.HiveServer2Type;
 
 import com.google.common.collect.Lists;
@@ -54,10 +55,15 @@ public class MiniDFS extends AbstractDFS {
   private static MiniDFSCluster dfsCluster;
 
   MiniDFS(File baseDir, String serverType) throws Exception {
+    Configuration conf = new Configuration();
     if (HiveServer2Type.InternalMetastore.name().equalsIgnoreCase(serverType)) 
{
-      Configuration.addDefaultResource("core-site-for-sentry-test.xml");
+      // set the test group mapping that maps user to a group of same name
+      conf.set("hadoop.security.group.mapping",
+          
"org.apache.sentry.tests.e2e.hive.fs.MiniDFS$PseudoGroupMappingService");
+      // set umask for metastore test client can create tables in the 
warehouse dir
+      conf.set("fs.permissions.umask-mode", "000");
+      Groups.getUserToGroupsMappingServiceWithLoadedConfiguration(conf);
     }
-    Configuration conf = new Configuration();
     File dfsDir = assertCreateDir(new File(baseDir, "dfs"));
     conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, dfsDir.getPath());
     conf.set("hadoop.security.group.mapping",

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/HiveServerFactory.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/HiveServerFactory.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/HiveServerFactory.java
index abb39fd..934ceb8 100644
--- 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/HiveServerFactory.java
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/HiveServerFactory.java
@@ -66,7 +66,6 @@ public class HiveServerFactory {
   public static final String METASTORE_SETUGI = 
HiveConf.ConfVars.METASTORE_EXECUTE_SET_UGI.varname;
   public static final String METASTORE_BYPASS = 
AuthzConfVars.AUTHZ_METASTORE_SERVICE_USERS.getVar();
   public static final String METASTORE_CLIENT_TIMEOUT = 
HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT.varname;
-  public static final String METASTORE_CLIENT_IMPL = 
HiveConf.ConfVars.METASTORE_CLIENT_IMPL.varname;
   public static final String METASTORE_RAW_STORE_IMPL = 
HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL.varname;
 
   static {
@@ -137,12 +136,12 @@ public class HiveServerFactory {
       properties.put(SUPPORT_CONCURRENCY, "false");
     }
     if(!properties.containsKey(HADOOPBIN)) {
-      properties.put(HADOOPBIN, "./target/hadoop/bin/hadoop");
+      properties.put(HADOOPBIN, "./target/hadoop");
     }
+    properties.put(METASTORE_RAW_STORE_IMPL,
+        "org.apache.sentry.binding.metastore.AuthorizingObjectStore");
     if (!properties.containsKey(METASTORE_URI)) {
       if (HiveServer2Type.InternalMetastore.equals(type)) {
-        properties.put(METASTORE_RAW_STORE_IMPL,
-            "org.apache.sentry.binding.metastore.AuthorizingObjectStore");
         // The configuration sentry.metastore.service.users is for the user who
         // has all access to get the metadata.
         properties.put(METASTORE_BYPASS, "accessAllMetaUser");
@@ -155,6 +154,13 @@ public class HiveServerFactory {
         properties.put(ConfVars.METASTORESERVERMINTHREADS.varname, "5");
       }
     }
+
+    // set the SentryMetaStoreFilterHook for HiveServer2 only, not for 
metastore
+    if (!HiveServer2Type.InternalMetastore.equals(type)) {
+      properties.put(ConfVars.METASTORE_FILTER_HOOK.varname,
+          
org.apache.sentry.binding.metastore.SentryMetaStoreFilterHook.class.getName());
+    }
+
     if (!properties.containsKey(METASTORE_BYPASS)) {
       properties.put(METASTORE_BYPASS, "hive,impala," + 
System.getProperty("user.name", ""));
     } else {
@@ -166,9 +172,8 @@ public class HiveServerFactory {
     properties.put(METASTORE_SETUGI, "true");
     properties.put(METASTORE_CLIENT_TIMEOUT, "100");
     properties.put(ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS.varname, 
"true");
-    properties.put(METASTORE_CLIENT_IMPL,
-        "org.apache.sentry.binding.metastore.SentryHiveMetaStoreClient");
 
+    properties.put(ConfVars.HIVE_SERVER2_BUILTIN_UDF_BLACKLIST.varname, 
"reflect,reflect2,java_method");
     properties.put(ConfVars.HIVESTATSAUTOGATHER.varname, "false");
     properties.put(ConfVars.HIVE_STATS_COLLECT_SCANCOLS.varname, "true");
     String hadoopBinPath = properties.get(HADOOPBIN);
@@ -203,8 +208,7 @@ public class HiveServerFactory {
     authzConf.writeXml(out);
     out.close();
     // points hive-site.xml at access-site.xml
-    hiveConf.set(HiveAuthzConf.HIVE_SENTRY_CONF_URL, accessSite.toURI().toURL()
-        .toExternalForm());
+    hiveConf.set(HiveAuthzConf.HIVE_SENTRY_CONF_URL, "file:///" + 
accessSite.getPath());
 
     
if(!properties.containsKey(HiveConf.ConfVars.HIVE_SERVER2_SESSION_HOOK.varname))
 {
       hiveConf.set(HiveConf.ConfVars.HIVE_SERVER2_SESSION_HOOK.varname,

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/InternalHiveServer.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/InternalHiveServer.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/InternalHiveServer.java
index 84b3481..a19cbd3 100644
--- 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/InternalHiveServer.java
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/InternalHiveServer.java
@@ -29,13 +29,6 @@ public class InternalHiveServer extends AbstractHiveServer {
 
   public InternalHiveServer(HiveConf conf) throws Exception {
     super(conf, getHostname(conf), getPort(conf));
-    // Fix for ACCESS-148. Resets a static field
-    // so the default database is created even
-    // though is has been created before in this JVM
-    Reflection.staticField("createDefaultDB")
-      .ofType(boolean.class)
-      .in(HiveMetaStore.HMSHandler.class)
-      .set(false);
     hiveServer2 = new HiveServer2();
     this.conf = conf;
   }
@@ -51,4 +44,4 @@ public class InternalHiveServer extends AbstractHiveServer {
   public synchronized void shutdown() throws Exception {
     hiveServer2.stop();
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/InternalMetastoreServer.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/InternalMetastoreServer.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/InternalMetastoreServer.java
index 50e0e4f..4f73954 100644
--- 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/InternalMetastoreServer.java
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/InternalMetastoreServer.java
@@ -35,11 +35,6 @@ public class InternalMetastoreServer extends 
AbstractHiveServer {
 
   public InternalMetastoreServer(HiveConf conf) throws Exception {
     super(conf, getMetastoreHostname(conf), getMetastorePort(conf));
-    // Fix for ACCESS-148. Resets a static field
-    // so the default database is created even
-    // though is has been created before in this JVM
-    Reflection.staticField("createDefaultDB").ofType(boolean.class)
-        .in(HiveMetaStore.HMSHandler.class).set(false);
     this.conf = conf;
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/66eef78f/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/metastore/AbstractMetastoreTestWithStaticConfiguration.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/metastore/AbstractMetastoreTestWithStaticConfiguration.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/metastore/AbstractMetastoreTestWithStaticConfiguration.java
index d4be70a..9d15c95 100644
--- 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/metastore/AbstractMetastoreTestWithStaticConfiguration.java
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/metastore/AbstractMetastoreTestWithStaticConfiguration.java
@@ -38,6 +38,7 @@ import 
org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.pig.PigServer;
 import org.apache.sentry.provider.db.service.thrift.SentryPolicyServiceClient;
@@ -197,7 +198,7 @@ public abstract class 
AbstractMetastoreTestWithStaticConfiguration extends
       final String pigLatin) throws Exception {
     UserGroupInformation clientUgi = UserGroupInformation
         .createRemoteUser(userName);
-    ShimLoader.getHadoopShims().doAs(clientUgi,
+    clientUgi.doAs(
         new PrivilegedExceptionAction<Object>() {
           @Override
           public Void run() throws Exception {

Reply via email to